gt
stringclasses
1 value
context
stringlengths
2.05k
161k
// Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.autofill_assistant.header; import android.content.Context; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.PopupMenu; import android.widget.TextView; import androidx.annotation.Nullable; import androidx.recyclerview.widget.DefaultItemAnimator; import androidx.recyclerview.widget.RecyclerView; import org.chromium.base.task.PostTask; import org.chromium.chrome.autofill_assistant.R; import org.chromium.chrome.browser.autofill_assistant.AssistantTagsForTesting; import org.chromium.chrome.browser.autofill_assistant.AssistantTextUtils; import org.chromium.chrome.browser.autofill_assistant.AutofillAssistantPreferenceFragment; import org.chromium.chrome.browser.autofill_assistant.carousel.AssistantChipAdapter; import org.chromium.chrome.browser.settings.SettingsLauncherImpl; import org.chromium.components.browser_ui.settings.SettingsLauncher; import org.chromium.components.browser_ui.widget.textbubble.TextBubble; import org.chromium.content_public.browser.UiThreadTaskTraits; import org.chromium.ui.modelutil.PropertyKey; import org.chromium.ui.modelutil.PropertyModelChangeProcessor; import org.chromium.ui.util.AccessibilityUtil; import org.chromium.ui.widget.ViewRectProvider; /** * This class is responsible for pushing updates to the Autofill Assistant header view. These * updates are pulled from the {@link AssistantHeaderModel} when a notification of an update is * received. */ class AssistantHeaderViewBinder implements PropertyModelChangeProcessor.ViewBinder<AssistantHeaderModel, AssistantHeaderViewBinder.ViewHolder, PropertyKey> { /** The amount of space to put between the top of the sheet and the bottom of the bubble.*/ private static final int TEXT_BUBBLE_PIXELS_ABOVE_SHEET = 4; private final AccessibilityUtil mAccessibilityUtil; public AssistantHeaderViewBinder(AccessibilityUtil accessibilityUtil) { mAccessibilityUtil = accessibilityUtil; } /** * A wrapper class that holds the different views of the header. */ static class ViewHolder { final Context mContext; final AnimatedPoodle mPoodle; final ViewGroup mHeader; final TextView mStatusMessage; final AssistantStepProgressBar mStepProgressBar; final ImageView mTtsButton; final View mProfileIconView; final PopupMenu mProfileIconMenu; final MenuItem mProfileIconMenuSettingsMessage; final MenuItem mProfileIconMenuSendFeedbackMessage; final RecyclerView mChipsContainer; @Nullable TextBubble mTextBubble; ViewHolder(Context context, ViewGroup headerView, AnimatedPoodle poodle, RecyclerView chipsContainer) { mContext = context; mPoodle = poodle; mHeader = headerView; mStatusMessage = headerView.findViewById(R.id.status_message); mStepProgressBar = new AssistantStepProgressBar(headerView.findViewById(R.id.step_progress_bar)); mTtsButton = (ImageView) headerView.findViewById(R.id.tts_button); mProfileIconView = headerView.findViewById(R.id.profile_image); mProfileIconMenu = new PopupMenu(context, mProfileIconView); mProfileIconMenu.inflate(R.menu.profile_icon_menu); mProfileIconMenuSettingsMessage = mProfileIconMenu.getMenu().findItem(R.id.settings); mProfileIconMenuSendFeedbackMessage = mProfileIconMenu.getMenu().findItem(R.id.send_feedback); mProfileIconView.setOnClickListener(unusedView -> mProfileIconMenu.show()); mChipsContainer = chipsContainer; } void disableAnimations(boolean disable) { mStepProgressBar.disableAnimations(disable); // Hiding the animated poodle seems to be the easiest way to disable its animation since // {@link LogoView#setAnimationEnabled(boolean)} is private. mPoodle.getView().setVisibility(View.INVISIBLE); ((DefaultItemAnimator) mChipsContainer.getItemAnimator()) .setSupportsChangeAnimations(!disable); } void updateProgressBarVisibility(boolean visible) { mStepProgressBar.setVisible(visible); } } @Override public void bind(AssistantHeaderModel model, ViewHolder view, PropertyKey propertyKey) { if (AssistantHeaderModel.STATUS_MESSAGE == propertyKey) { String message = model.get(AssistantHeaderModel.STATUS_MESSAGE); AssistantTextUtils.applyVisualAppearanceTags(view.mStatusMessage, message, null); view.mStatusMessage.announceForAccessibility(view.mStatusMessage.getText()); } else if (AssistantHeaderModel.PROFILE_ICON_MENU_SETTINGS_MESSAGE == propertyKey) { view.mProfileIconMenuSettingsMessage.setTitle( model.get(AssistantHeaderModel.PROFILE_ICON_MENU_SETTINGS_MESSAGE)); } else if (AssistantHeaderModel.PROFILE_ICON_MENU_SEND_FEEDBACK_MESSAGE == propertyKey) { view.mProfileIconMenuSendFeedbackMessage.setTitle( model.get(AssistantHeaderModel.PROFILE_ICON_MENU_SEND_FEEDBACK_MESSAGE)); } else if (AssistantHeaderModel.PROGRESS_ACTIVE_STEP == propertyKey) { int activeStep = model.get(AssistantHeaderModel.PROGRESS_ACTIVE_STEP); if (activeStep >= 0) { view.mStepProgressBar.setActiveStep(activeStep); } } else if (AssistantHeaderModel.PROGRESS_BAR_ERROR == propertyKey) { view.mStepProgressBar.setError(model.get(AssistantHeaderModel.PROGRESS_BAR_ERROR)); } else if (AssistantHeaderModel.PROGRESS_VISIBLE == propertyKey) { view.updateProgressBarVisibility(model.get(AssistantHeaderModel.PROGRESS_VISIBLE)); } else if (AssistantHeaderModel.STEP_PROGRESS_BAR_ICONS == propertyKey) { view.mStepProgressBar.setSteps(model.get(AssistantHeaderModel.STEP_PROGRESS_BAR_ICONS)); view.mStepProgressBar.disableAnimations( model.get(AssistantHeaderModel.DISABLE_ANIMATIONS_FOR_TESTING)); } else if (AssistantHeaderModel.SPIN_POODLE == propertyKey) { view.mPoodle.setSpinEnabled(model.get(AssistantHeaderModel.SPIN_POODLE)); } else if (AssistantHeaderModel.FEEDBACK_BUTTON_CALLBACK == propertyKey) { setProfileMenuListener(view, model.get(AssistantHeaderModel.FEEDBACK_BUTTON_CALLBACK)); } else if (AssistantHeaderModel.CHIPS == propertyKey) { view.mChipsContainer.invalidateItemDecorations(); ((AssistantChipAdapter) view.mChipsContainer.getAdapter()) .setChips(model.get(AssistantHeaderModel.CHIPS)); maybeShowChips(model, view); } else if (AssistantHeaderModel.CHIPS_VISIBLE == propertyKey) { maybeShowChips(model, view); } else if (AssistantHeaderModel.BUBBLE_MESSAGE == propertyKey) { showOrDismissBubble(model, view); } else if (AssistantHeaderModel.TTS_BUTTON_VISIBLE == propertyKey) { showOrHideTtsButton(model, view); } else if (AssistantHeaderModel.TTS_BUTTON_STATE == propertyKey) { setTtsButtonState(view, model.get(AssistantHeaderModel.TTS_BUTTON_STATE)); } else if (AssistantHeaderModel.TTS_BUTTON_CALLBACK == propertyKey) { setTtsButtonClickListener(view, model.get(AssistantHeaderModel.TTS_BUTTON_CALLBACK)); } else if (AssistantHeaderModel.DISABLE_ANIMATIONS_FOR_TESTING == propertyKey) { view.disableAnimations(model.get(AssistantHeaderModel.DISABLE_ANIMATIONS_FOR_TESTING)); } else { assert false : "Unhandled property detected in AssistantHeaderViewBinder!"; } } private void maybeShowChips(AssistantHeaderModel model, ViewHolder view) { // The PostTask is necessary as a workaround for the sticky button occasionally not showing, // this makes sure that the change happens after any possibly clashing animation currently // happening. // TODO(b/164389932): Figure out a better fix that doesn't require issuing the change in the // following UI iteration. PostTask.postTask(UiThreadTaskTraits.DEFAULT, () -> { if (model.get(AssistantHeaderModel.CHIPS_VISIBLE) && !model.get(AssistantHeaderModel.CHIPS).isEmpty()) { view.mChipsContainer.setVisibility(View.VISIBLE); view.mProfileIconView.setVisibility(View.GONE); } else { view.mChipsContainer.setVisibility(View.GONE); view.mProfileIconView.setVisibility(View.VISIBLE); } }); } private void setProfileMenuListener(ViewHolder view, @Nullable Runnable feedbackCallback) { view.mProfileIconMenu.setOnMenuItemClickListener(item -> { int itemId = item.getItemId(); if (itemId == R.id.settings) { SettingsLauncher settingsLauncher = new SettingsLauncherImpl(); settingsLauncher.launchSettingsActivity( view.mHeader.getContext(), AutofillAssistantPreferenceFragment.class); return true; } else if (itemId == R.id.send_feedback) { if (feedbackCallback != null) { feedbackCallback.run(); } return true; } return false; }); } private void showOrDismissBubble(AssistantHeaderModel model, ViewHolder view) { String message = model.get(AssistantHeaderModel.BUBBLE_MESSAGE); if (message.isEmpty() && view.mTextBubble == null) { return; } if (message.isEmpty() && view.mTextBubble != null) { view.mTextBubble.dismiss(); view.mTextBubble = null; return; } View poodle = view.mPoodle.getView(); ViewRectProvider anchorRectProvider = new ViewRectProvider(poodle); int topOffset = view.mContext.getResources().getDimensionPixelSize( R.dimen.autofill_assistant_root_view_top_padding) + TEXT_BUBBLE_PIXELS_ABOVE_SHEET; anchorRectProvider.setInsetPx(0, -topOffset, 0, 0); view.mTextBubble = new TextBubble( /*context = */ view.mContext, /*rootView = */ poodle, /*contentString = */ message, /*accessibilityString = */ message, /*showArrow = */ true, /*anchorRectProvider = */ anchorRectProvider, mAccessibilityUtil.isAccessibilityEnabled()); view.mTextBubble.setDismissOnTouchInteraction(true); view.mTextBubble.show(); } private void showOrHideTtsButton(AssistantHeaderModel model, ViewHolder view) { if (model.get(AssistantHeaderModel.TTS_BUTTON_VISIBLE)) { view.mTtsButton.setVisibility(View.VISIBLE); } else { view.mTtsButton.setVisibility(View.GONE); } } private void setTtsButtonClickListener(ViewHolder view, @Nullable Runnable ttsButtonCallback) { view.mTtsButton.setOnClickListener(unusedView -> { if (ttsButtonCallback != null) { ttsButtonCallback.run(); } }); } private void setTtsButtonState(ViewHolder view, @AssistantTtsButtonState int state) { switch (state) { case AssistantTtsButtonState.DEFAULT: view.mTtsButton.setImageResource(R.drawable.ic_volume_on_white_24dp); view.mTtsButton.setTag(AssistantTagsForTesting.TTS_ENABLED_ICON_TAG); break; case AssistantTtsButtonState.PLAYING: view.mTtsButton.setImageResource(R.drawable.ic_volume_on_white_24dp); view.mTtsButton.setTag(AssistantTagsForTesting.TTS_PLAYING_ICON_TAG); break; case AssistantTtsButtonState.DISABLED: view.mTtsButton.setImageResource(R.drawable.ic_volume_off_white_24dp); view.mTtsButton.setTag(AssistantTagsForTesting.TTS_DISABLED_ICON_TAG); break; } } }
/* * Copyright 2011 Mikhail Lopatkin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.bitbucket.mlopatkin.android.logviewer; import java.awt.Color; import java.io.File; import java.io.FileInputStream; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.io.Writer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; import java.util.List; import java.util.Properties; import java.util.TreeMap; import org.apache.commons.lang3.BooleanUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.SystemUtils; import org.apache.log4j.BasicConfigurator; import org.apache.log4j.ConsoleAppender; import org.apache.log4j.Logger; import org.apache.log4j.PatternLayout; import org.apache.log4j.PropertyConfigurator; import org.bitbucket.mlopatkin.android.liblogcat.LogRecord.Buffer; import org.bitbucket.mlopatkin.android.liblogcat.LogRecord.Priority; import org.bitbucket.mlopatkin.utils.MyStringUtils; public class Configuration { private static final boolean DEBUG_MODE = System.getProperty("logview.debug") != null; public static class ui { private static final String PREFIX = "ui."; private static List<String> columns_; private static EnumSet<Buffer> buffers_; private static void initColumns() { String columnsValue = instance.properties.getProperty(PREFIX + "columns", "time, pid, priority, tag, message"); columns_ = splitCommaSeparatedValues(columnsValue); } private static void initBuffers() { buffers_ = EnumSet.noneOf(Buffer.class); String columnsValue = instance.properties.getProperty(PREFIX + "buffers", "MAIN"); for (String bufferName : splitCommaSeparatedValues(columnsValue)) { buffers_.add(Buffer.valueOf(bufferName.toUpperCase())); } } public synchronized static List<String> columns() { if (columns_ == null) { initColumns(); } return columns_; } public static int tooltipMaxWidth() { return parseInt(PREFIX + "tooltip_max_width", 120); } public static int autoscrollThreshold() { return parseInt(PREFIX + "autoscroll_threshold", 20); } public static Color priorityColor(Priority p) { String priorityName = p.name().toLowerCase(); return parseColor(PREFIX + "priority_color." + priorityName, Color.BLACK); } public static Color bookmarkBackground() { Color defaultColor = Color.decode("#D0F0C0"); return parseColor(PREFIX + "bookmark_background", defaultColor); } public static Color bookmarkedForeground() { return parseColor(PREFIX + "bookmark_foreground", null); } private static void initHighlightColors() { Color defaultColor = Color.decode("#D0F0C0"); String prefix = PREFIX + "highlight_color."; TreeMap<Integer, Color> colors = new TreeMap<Integer, Color>(); for (String param : instance.properties.stringPropertyNames()) { if (param.startsWith(prefix)) { int id = Integer.parseInt(param.substring(prefix.length())); colors.put(id, parseColor(param, defaultColor)); } } _highlightColors = new Color[colors.size()]; int i = 0; for (Color color : colors.values()) { _highlightColors[i++] = color; } } private static Color[] _highlightColors; public static Color[] highlightColors() { if (_highlightColors == null) { initHighlightColors(); } return _highlightColors; } public static Color backgroundColor() { return parseColor(PREFIX + "background_color", Color.WHITE); } public static boolean bufferEnabled(Buffer buffer) { if (buffers_ == null) { initBuffers(); } return buffers_.contains(buffer); } public static boolean hideLoggingProcesses() { return parseBoolean(PREFIX + "hide_logging_processes", true); } } public static class adb { private static final String PREFIX = "adb."; public static final String DEFAULT_EXECUTABLE = ((SystemUtils.IS_OS_WINDOWS) ? "adb.exe" : "adb").intern(); public static String commandline() { return instance.properties.getProperty(PREFIX + "commandline", "logcat -v threadtime"); } public static String bufferswitch() { return instance.properties.getProperty(PREFIX + "bufferswitch", "-b"); } public static String bufferName(Buffer buffer) { return instance.properties.getProperty(PREFIX + "buffer." + buffer.toString()); } public static String psCommandLine() { return "ps -P"; } public static String executable() { return instance.properties.getProperty(PREFIX + "executable", DEFAULT_EXECUTABLE); } public static void executable(String newExecutable) { instance.properties.setProperty(PREFIX + "executable", newExecutable); } public static boolean showSetupDialog() { return parseBoolean(PREFIX + "show_setup_dialog", true); } public static void showSetupDialog(boolean value) { instance.properties.setProperty(PREFIX + "show_setup_dialog", BooleanUtils .toStringTrueFalse(value)); } } public static class dump { private static final String PREFIX = "dump."; public static String bufferHeader(Buffer buffer) { return instance.properties.getProperty(PREFIX + "buffer." + buffer.toString()); } } private static final Logger logger = Logger.getLogger(Configuration.class); private Properties properties = new Properties(); private void setUpDefaults() { // set up default logging configuration BasicConfigurator.configure(new ConsoleAppender(new PatternLayout( PatternLayout.TTCC_CONVERSION_PATTERN), ConsoleAppender.SYSTEM_ERR)); } private Properties loadFromResources() { Properties result = new Properties(); try { InputStream in = getClass().getResourceAsStream("/" + CONFIG_FILE_NAME); if (in == null) { logger.error("Missing configuration file in resources - broken package?"); return result; } try { result.load(in); } finally { in.close(); } } catch (IOException e) { logger.error("Unexpected error when parsing properties", e); } return result; } private Properties loadFromFile(String fileName) { Properties result = new Properties(); File configFile = new File(fileName); if (configFile.exists()) { try { InputStream in = new FileInputStream(configFile); try { result.load(in); } finally { in.close(); } } catch (IOException e) { logger.error("Unexpected error when parsing properties", e); } } return result; } private Configuration() { if (DEBUG_MODE) { System.err.println("DEBUG MODE ENABLED!"); } setUpDefaults(); properties.putAll(loadFromResources()); properties.putAll(loadFromFile(getConfigFileName())); PropertyConfigurator.configure(properties); } private static Configuration instance = new Configuration(); private static List<String> splitCommaSeparatedValues(String valuesString) { String[] values = StringUtils.split(valuesString, ","); List<String> result = new ArrayList<String>(); for (String s : values) { result.add(s.toLowerCase().trim()); } return Collections.unmodifiableList(result); } private static int parseInt(String key, int defaultValue) { String widthValue = instance.properties.getProperty(key); if (widthValue == null) { return defaultValue; } try { return Integer.parseInt(widthValue.trim()); } catch (NumberFormatException e) { logger.warn("Incorrect number in " + key, e); return defaultValue; } } private static boolean parseBoolean(String key, boolean defaultValue) { String boolValue = instance.properties.getProperty(key); if (boolValue != null) { return BooleanUtils.toBoolean(boolValue); } else { return defaultValue; } } private static Color parseColor(String key, Color defaultValue) { String colorValue = instance.properties.getProperty(key); if (colorValue == null) { return defaultValue; } try { return Color.decode(colorValue); } catch (NumberFormatException e) { logger.warn("Incorrect color format in " + key, e); return defaultValue; } } static void forceInit() { Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { Configuration.save(); } }); } private static final String CONFIG_FILE_DIR = ".logview"; private static final String CONFIG_FILE_DIR_WIN = "logview"; private static final String CONFIG_FILE_NAME = "logview.properties"; private String getSystemConfigDir() { if (SystemUtils.IS_OS_WINDOWS) { String appdata = System.getenv("APPDATA"); // dirty hack to get eclipse work properly with the environment // variables // when I start project in Debug under JDK 1.6_22 debug JRE it // receives environment variables in CP866 but thinks that they are // in CP1251. My login contains russian letters and APPDATA points // to nowhere :( if (DEBUG_MODE && !(new File(appdata).exists())) { logger.warn("DEBUG_MODE is ON"); logger.warn("Appdata value: " + Arrays.toString(appdata.getBytes())); try { appdata = new String(appdata.getBytes("WINDOWS-1251"), "CP866"); } catch (UnsupportedEncodingException e) { throw new AssertionError(e.toString()); } } return appdata; } else { return SystemUtils.USER_HOME; } } private String getConfigFileDir() { String systemConfig = getSystemConfigDir(); if (systemConfig == null) { return null; } if (SystemUtils.IS_OS_WINDOWS) { return MyStringUtils.joinPath(systemConfig, CONFIG_FILE_DIR_WIN); } else { return MyStringUtils.joinPath(systemConfig, CONFIG_FILE_DIR); } } private String getConfigFileName() { String configDir = getConfigFileDir(); if (configDir == null) { return null; } return MyStringUtils.joinPath(configDir, CONFIG_FILE_NAME); } private void ensureDir() { String dir = getConfigFileDir(); if (dir != null) { File dirFile = new File(dir); if (!dirFile.exists()) { dirFile.mkdirs(); } } } private void saveToFile() { String configFile = getConfigFileName(); if (configFile == null) { logger.error("Could not obtain system config file dir"); return; } File file = new File(configFile); ensureDir(); try { Writer writer = new FileWriter(file); try { properties .store(writer, "Don't edit this file while application is running or your changes will be lost\n"); } finally { writer.close(); } } catch (IOException e) { logger.error("Cannot save properties", e); } } public static void save() { instance.saveToFile(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.core.client; import org.apache.activemq.artemis.api.core.ActiveMQExceptionType; import org.apache.activemq.artemis.api.core.Interceptor; import org.apache.activemq.artemis.core.protocol.core.Packet; import org.jboss.logging.BasicLogger; import org.jboss.logging.Logger; import org.jboss.logging.annotations.Cause; import org.jboss.logging.annotations.LogMessage; import org.jboss.logging.annotations.Message; import org.jboss.logging.annotations.MessageLogger; import org.w3c.dom.Node; /** * Logger Code 21 * <p> * Each message id must be 6 digits long starting with 10, the 3rd digit donates the level so * * <pre> * INF0 1 * WARN 2 * DEBUG 3 * ERROR 4 * TRACE 5 * FATAL 6 * </pre> * * so an INFO message would be 101000 to 101999. * <p> * Once released, methods should not be deleted as they may be referenced by knowledge base * articles. Unused methods should be marked as deprecated. */ @MessageLogger(projectCode = "AMQ") public interface ActiveMQClientLogger extends BasicLogger { /** * The default logger. */ ActiveMQClientLogger LOGGER = Logger.getMessageLogger(ActiveMQClientLogger.class, ActiveMQClientLogger.class.getPackage().getName()); @LogMessage(level = Logger.Level.INFO) @Message(id = 211000, value = "**** Dumping session creation stacks ****", format = Message.Format.MESSAGE_FORMAT) void dumpingSessionStacks(); @LogMessage(level = Logger.Level.INFO) @Message(id = 211001, value = "session created", format = Message.Format.MESSAGE_FORMAT) void dumpingSessionStack(@Cause Exception e); @LogMessage(level = Logger.Level.WARN) @Message(id = 212000, value = "{0}", format = Message.Format.MESSAGE_FORMAT) void warn(String message); @LogMessage(level = Logger.Level.WARN) @Message(id = 212001, value = "Error on clearing messages", format = Message.Format.MESSAGE_FORMAT) void errorClearingMessages(@Cause Throwable e); @LogMessage(level = Logger.Level.WARN) @Message(id = 212002, value = "Timed out waiting for handler to complete processing", format = Message.Format.MESSAGE_FORMAT) void timeOutWaitingForProcessing(); @LogMessage(level = Logger.Level.WARN) @Message(id = 212003, value = "Unable to close session", format = Message.Format.MESSAGE_FORMAT) void unableToCloseSession(@Cause Exception e); @LogMessage(level = Logger.Level.WARN) @Message(id = 212004, value = "Failed to connect to server.", format = Message.Format.MESSAGE_FORMAT) void failedToConnectToServer(); @LogMessage(level = Logger.Level.WARN) @Message(id = 212005, value = "Tried {0} times to connect. Now giving up on reconnecting it.", format = Message.Format.MESSAGE_FORMAT) void failedToConnectToServer(Integer reconnectAttempts); @LogMessage(level = Logger.Level.WARN) @Message(id = 212006, value = "Waiting {0} milliseconds before next retry. RetryInterval={1} and multiplier={2}", format = Message.Format.MESSAGE_FORMAT) void waitingForRetry(Long interval, Long retryInterval, Double multiplier); @LogMessage(level = Logger.Level.WARN) @Message(id = 212007, value = "connector.create or connectorFactory.createConnector should never throw an exception, implementation is badly behaved, but we will deal with it anyway.", format = Message.Format.MESSAGE_FORMAT) void createConnectorException(@Cause Exception e); @LogMessage(level = Logger.Level.WARN) @Message(id = 212008, value = "I am closing a core ClientSessionFactory you left open. Please make sure you close all ClientSessionFactories explicitly " + "before letting them go out of scope! {0}", format = Message.Format.MESSAGE_FORMAT) void factoryLeftOpen(@Cause Exception e, int i); @LogMessage(level = Logger.Level.WARN) @Message(id = 212009, value = "resetting session after failure", format = Message.Format.MESSAGE_FORMAT) void resettingSessionAfterFailure(); @LogMessage(level = Logger.Level.WARN) @Message(id = 212010, value = "Server is starting, retry to create the session {0}", format = Message.Format.MESSAGE_FORMAT) void retryCreateSessionSeverStarting(String name); @LogMessage(level = Logger.Level.WARN) @Message(id = 212011, value = "committing transaction after failover occurred, any non persistent messages may be lost", format = Message.Format.MESSAGE_FORMAT) void commitAfterFailover(); @LogMessage(level = Logger.Level.WARN) @Message(id = 212012, value = "failover occurred during commit throwing XAException.XA_RETRY", format = Message.Format.MESSAGE_FORMAT) void failoverDuringCommit(); @LogMessage(level = Logger.Level.WARN) @Message(id = 212014, value = "failover occurred during prepare rolling back", format = Message.Format.MESSAGE_FORMAT) void failoverDuringPrepareRollingBack(); @LogMessage(level = Logger.Level.WARN) @Message(id = 212015, value = "failover occurred during prepare rolling back", format = Message.Format.MESSAGE_FORMAT) void errorDuringPrepare(@Cause Throwable e); @LogMessage(level = Logger.Level.WARN) @Message(id = 212016, value = "I am closing a core ClientSession you left open. Please make sure you close all ClientSessions explicitly before letting them go out of scope! {0}", format = Message.Format.MESSAGE_FORMAT) void clientSessionNotClosed(@Cause Exception e, int identity); @LogMessage(level = Logger.Level.WARN) @Message(id = 212017, value = "error adding packet", format = Message.Format.MESSAGE_FORMAT) void errorAddingPacket(@Cause Exception e); @LogMessage(level = Logger.Level.WARN) @Message(id = 212018, value = "error calling cancel", format = Message.Format.MESSAGE_FORMAT) void errorCallingCancel(@Cause Exception e); @LogMessage(level = Logger.Level.WARN) @Message(id = 212019, value = "error reading index", format = Message.Format.MESSAGE_FORMAT) void errorReadingIndex(@Cause Exception e); @LogMessage(level = Logger.Level.WARN) @Message(id = 212020, value = "error setting index", format = Message.Format.MESSAGE_FORMAT) void errorSettingIndex(@Cause Exception e); @LogMessage(level = Logger.Level.WARN) @Message(id = 212021, value = "error resetting index", format = Message.Format.MESSAGE_FORMAT) void errorReSettingIndex(@Cause Exception e); @LogMessage(level = Logger.Level.WARN) @Message(id = 212022, value = "error reading LargeMessage file cache", format = Message.Format.MESSAGE_FORMAT) void errorReadingCache(@Cause Exception e); @LogMessage(level = Logger.Level.WARN) @Message(id = 212023, value = "error closing LargeMessage file cache", format = Message.Format.MESSAGE_FORMAT) void errorClosingCache(@Cause Exception e); @LogMessage(level = Logger.Level.WARN) @Message(id = 212024, value = "Exception during finalization for LargeMessage file cache", format = Message.Format.MESSAGE_FORMAT) void errorFinalisingCache(@Cause Exception e); @LogMessage(level = Logger.Level.WARN) @Message(id = 212025, value = "did not connect the cluster connection to other nodes", format = Message.Format.MESSAGE_FORMAT) void errorConnectingToNodes(@Cause Exception e); @LogMessage(level = Logger.Level.WARN) @Message(id = 212026, value = "Timed out waiting for pool to terminate", format = Message.Format.MESSAGE_FORMAT) void timedOutWaitingForTermination(); @LogMessage(level = Logger.Level.WARN) @Message(id = 212027, value = "Timed out waiting for scheduled pool to terminate", format = Message.Format.MESSAGE_FORMAT) void timedOutWaitingForScheduledPoolTermination(); @LogMessage(level = Logger.Level.WARN) @Message(id = 212028, value = "error starting server locator", format = Message.Format.MESSAGE_FORMAT) void errorStartingLocator(@Cause Exception e); @LogMessage(level = Logger.Level.WARN) @Message(id = 212029, value = "Closing a Server Locator left open. Please make sure you close all Server Locators explicitly before letting them go out of scope! {0}", format = Message.Format.MESSAGE_FORMAT) void serverLocatorNotClosed(@Cause Exception e, int identity); @LogMessage(level = Logger.Level.WARN) @Message(id = 212030, value = "error sending topology", format = Message.Format.MESSAGE_FORMAT) void errorSendingTopology(@Cause Throwable e); @LogMessage(level = Logger.Level.WARN) @Message(id = 212031, value = "error sending topology", format = Message.Format.MESSAGE_FORMAT) void errorSendingTopologyNodedown(@Cause Throwable e); @LogMessage(level = Logger.Level.WARN) @Message(id = 212032, value = "Timed out waiting to stop discovery thread", format = Message.Format.MESSAGE_FORMAT) void timedOutStoppingDiscovery(); @LogMessage(level = Logger.Level.WARN) @Message(id = 212033, value = "unable to send notification when discovery group is stopped", format = Message.Format.MESSAGE_FORMAT) void errorSendingNotifOnDiscoveryStop(@Cause Throwable e); @LogMessage(level = Logger.Level.WARN) @Message(id = 212034, value = "There are more than one servers on the network broadcasting the same node id. " + "You will see this message exactly once (per node) if a node is restarted, in which case it can be safely " + "ignored. But if it is logged continuously it means you really do have more than one node on the same network " + "active concurrently with the same node id. This could occur if you have a backup node active at the same time as " + "its live node. nodeID={0}", format = Message.Format.MESSAGE_FORMAT) void multipleServersBroadcastingSameNode(String nodeId); @LogMessage(level = Logger.Level.WARN) @Message(id = 212035, value = "error receiving packet in discovery", format = Message.Format.MESSAGE_FORMAT) void errorReceivingPAcketInDiscovery(@Cause Throwable e); @LogMessage(level = Logger.Level.WARN) @Message(id = 212036, value = "Can not find packet to clear: {0} last received command id first stored command id {1}", format = Message.Format.MESSAGE_FORMAT) void cannotFindPacketToClear(Integer lastReceivedCommandID, Integer firstStoredCommandID); @LogMessage(level = Logger.Level.WARN) @Message(id = 212037, value = "Connection failure has been detected: {0} [code={1}]", format = Message.Format.MESSAGE_FORMAT) void connectionFailureDetected(String message, ActiveMQExceptionType type); @LogMessage(level = Logger.Level.WARN) @Message(id = 212038, value = "Failure in calling interceptor: {0}", format = Message.Format.MESSAGE_FORMAT) void errorCallingInterceptor(@Cause Throwable e, Interceptor interceptor); @LogMessage(level = Logger.Level.WARN) @Message(id = 212040, value = "Timed out waiting for netty ssl close future to complete", format = Message.Format.MESSAGE_FORMAT) void timeoutClosingSSL(); @LogMessage(level = Logger.Level.WARN) @Message(id = 212041, value = "Timed out waiting for netty channel to close", format = Message.Format.MESSAGE_FORMAT) void timeoutClosingNettyChannel(); @LogMessage(level = Logger.Level.WARN) @Message(id = 212042, value = "Timed out waiting for packet to be flushed", format = Message.Format.MESSAGE_FORMAT) void timeoutFlushingPacket(); @LogMessage(level = Logger.Level.WARN) @Message(id = 212043, value = "Property {0} must be an Integer, it is {1}", format = Message.Format.MESSAGE_FORMAT) void propertyNotInteger(String propName, String name); @LogMessage(level = Logger.Level.WARN) @Message(id = 212044, value = "Property {0} must be a Long, it is {1}", format = Message.Format.MESSAGE_FORMAT) void propertyNotLong(String propName, String name); @LogMessage(level = Logger.Level.WARN) @Message(id = 212045, value = "Property {0} must be a Boolean, it is {1}", format = Message.Format.MESSAGE_FORMAT) void propertyNotBoolean(String propName, String name); @LogMessage(level = Logger.Level.WARN) @Message(id = 212046, value = "Cannot find activemq-version.properties on classpath: {0}", format = Message.Format.MESSAGE_FORMAT) void noVersionOnClasspath(String classpath); @LogMessage(level = Logger.Level.WARN) @Message(id = 212047, value = "Warning: JVM allocated more data what would make results invalid {0}:{1}", format = Message.Format.MESSAGE_FORMAT) void jvmAllocatedMoreMemory(Long totalMemory1, Long totalMemory2); @LogMessage(level = Logger.Level.WARN) @Message(id = 212048, value = "local-bind-address specified for broadcast group but no local-bind-port specified so socket will NOT be bound to a local address/port", format = Message.Format.MESSAGE_FORMAT) void broadcastGroupBindError(); @LogMessage(level = Logger.Level.WARN) @Message(id = 212049, value = "Could not bind to {0} ({1} address); " + "make sure your discovery group-address is of the same type as the IP stack (IPv4 or IPv6)." + "\nIgnoring discovery group-address, but this may lead to cross talking.", format = Message.Format.MESSAGE_FORMAT) void ioDiscoveryError(String hostAddress, String s); @LogMessage(level = Logger.Level.WARN) @Message(id = 212050, value = "Compressed large message tried to read {0} bytes from stream {1}", format = Message.Format.MESSAGE_FORMAT) void compressedLargeMessageError(int length, int nReadBytes); @LogMessage(level = Logger.Level.WARN) @Message(id = 212051, value = "Invalid concurrent session usage. Sessions are not supposed to be used by more than one thread concurrently.", format = Message.Format.MESSAGE_FORMAT) void invalidConcurrentSessionUsage(@Cause Throwable t); @LogMessage(level = Logger.Level.WARN) @Message(id = 212052, value = "Packet {0} was answered out of sequence due to a previous server timeout and it''s being ignored", format = Message.Format.MESSAGE_FORMAT) void packetOutOfOrder(Object obj, @Cause Throwable t); /** * Warns about usage of {@link org.apache.activemq.artemis.api.core.client.SendAcknowledgementHandler} or JMS's {@code CompletionWindow} with * confirmations disabled (confirmationWindowSize=-1). */ @LogMessage(level = Logger.Level.WARN) @Message(id = 212053, value = "CompletionListener/SendAcknowledgementHandler used with confirmationWindowSize=-1. Enable confirmationWindowSize to receive acks from server!", format = Message.Format.MESSAGE_FORMAT) void confirmationWindowDisabledWarning(); @LogMessage(level = Logger.Level.WARN) @Message(id = 212054, value = "Destination address={0} is blocked. If the system is configured to block make sure you consume messages on this configuration.", format = Message.Format.MESSAGE_FORMAT) void outOfCreditOnFlowControl(String address); @LogMessage(level = Logger.Level.WARN) @Message(id = 212055, value = "Unable to close consumer", format = Message.Format.MESSAGE_FORMAT) void unableToCloseConsumer(@Cause Exception e); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214000, value = "Failed to call onMessage", format = Message.Format.MESSAGE_FORMAT) void onMessageError(@Cause Throwable e); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214001, value = "failed to cleanup session", format = Message.Format.MESSAGE_FORMAT) void failedToCleanupSession(@Cause Exception e); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214002, value = "Failed to execute failure listener", format = Message.Format.MESSAGE_FORMAT) void failedToExecuteListener(@Cause Throwable t); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214003, value = "Failed to handle failover", format = Message.Format.MESSAGE_FORMAT) void failedToHandleFailover(@Cause Throwable t); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214004, value = "XA end operation failed ", format = Message.Format.MESSAGE_FORMAT) void errorCallingEnd(@Cause Throwable t); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214005, value = "XA start operation failed {0} code:{1}", format = Message.Format.MESSAGE_FORMAT) void errorCallingStart(String message, Integer code); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214006, value = "Session is not XA", format = Message.Format.MESSAGE_FORMAT) void sessionNotXA(); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214007, value = "Received exception asynchronously from server", format = Message.Format.MESSAGE_FORMAT) void receivedExceptionAsynchronously(@Cause Exception e); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214008, value = "Failed to handle packet", format = Message.Format.MESSAGE_FORMAT) void failedToHandlePacket(@Cause Exception e); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214009, value = "Failed to stop discovery group", format = Message.Format.MESSAGE_FORMAT) void failedToStopDiscovery(@Cause Throwable e); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214010, value = "Failed to receive datagram", format = Message.Format.MESSAGE_FORMAT) void failedToReceiveDatagramInDiscovery(@Cause Throwable e); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214011, value = "Failed to call discovery listener", format = Message.Format.MESSAGE_FORMAT) void failedToCallListenerInDiscovery(@Cause Throwable e); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214012, value = "Unexpected error handling packet {0}", format = Message.Format.MESSAGE_FORMAT) void errorHandlingPacket(@Cause Throwable t, Packet packet); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214013, value = "Failed to decode packet", format = Message.Format.MESSAGE_FORMAT) void errorDecodingPacket(@Cause Exception e); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214014, value = "Failed to execute failure listener", format = Message.Format.MESSAGE_FORMAT) void errorCallingFailureListener(@Cause Throwable e); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214015, value = "Failed to execute connection life cycle listener", format = Message.Format.MESSAGE_FORMAT) void errorCallingLifeCycleListener(@Cause Throwable e); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214016, value = "Failed to create netty connection", format = Message.Format.MESSAGE_FORMAT) void errorCreatingNettyConnection(@Cause Throwable e); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214017, value = "Caught unexpected Throwable", format = Message.Format.MESSAGE_FORMAT) void caughtunexpectedThrowable(@Cause Throwable t); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214018, value = "Failed to invoke getTextContent() on node {0}", format = Message.Format.MESSAGE_FORMAT) void errorOnXMLTransform(@Cause Throwable t, Node n); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214019, value = "Invalid configuration", format = Message.Format.MESSAGE_FORMAT) void errorOnXMLTransformInvalidConf(@Cause Throwable t); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214020, value = "Exception happened while stopping Discovery BroadcastEndpoint {0}", format = Message.Format.MESSAGE_FORMAT) void errorStoppingDiscoveryBroadcastEndpoint(Object endpoint, @Cause Throwable t); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214021, value = "Invalid cipher suite specified. Supported cipher suites are: {0}", format = Message.Format.MESSAGE_FORMAT) void invalidCipherSuite(String validSuites); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214022, value = "Invalid protocol specified. Supported protocols are: {0}", format = Message.Format.MESSAGE_FORMAT) void invalidProtocol(String validProtocols); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214023, value = "HTTP Handshake failed, the received accept value %s does not match the expected response %s") void httpHandshakeFailed(String response, String expectedResponse); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214024, value = "HTTP upgrade not supported by remote acceptor") void httpUpgradeNotSupportedByRemoteAcceptor(); @LogMessage(level = Logger.Level.ERROR) @Message(id = 214025, value = "Invalid type {0}, Using default connection factory at {1}", format = Message.Format.MESSAGE_FORMAT) void invalidCFType(String type, String uri); @LogMessage(level = Logger.Level.TRACE) @Message(id = 214026, value = "Failure captured on connectionID={0}, performing failover or reconnection now", format = Message.Format.MESSAGE_FORMAT) void failoverOrReconnect(Object connectionID, @Cause Throwable cause); @LogMessage(level = Logger.Level.DEBUG) @Message(id = 214027, value = "Replaying commands for channelID={0} with lastCommandID from the server={1}", format = Message.Format.MESSAGE_FORMAT) void replayingCommands(Object connectionID, int lastConfirmedCommandID); @LogMessage(level = Logger.Level.DEBUG) @Message(id = 214028, value = "Couldn't reattach session {0}, performing as a failover operation now and recreating objects", format = Message.Format.MESSAGE_FORMAT) void reconnectCreatingNewSession(long id); }
/******************************************************************************* * * Copyright 2015 Impetus Infotech. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. ******************************************************************************/ package com.impetus.client.hbase; import java.io.IOException; import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.impetus.client.hbase.config.HBasePropertyReader; import com.impetus.client.hbase.schemamanager.HBaseSchemaManager; import com.impetus.kundera.KunderaException; import com.impetus.kundera.PersistenceProperties; import com.impetus.kundera.client.Client; import com.impetus.kundera.configure.ClientProperties.DataStore.Connection; import com.impetus.kundera.configure.schema.api.SchemaManager; import com.impetus.kundera.loader.GenericClientFactory; import com.impetus.kundera.metadata.KunderaMetadataManager; import com.impetus.kundera.metadata.model.PersistenceUnitMetadata; /** * HBaseClientFactory, instantiates client for HBase. */ /** * @author Devender Yadav * */ public class HBaseClientFactory extends GenericClientFactory { /** The logger. */ private static Logger logger = LoggerFactory.getLogger(HBaseClientFactory.class); /** The conf. */ private Configuration conf; /** The connection. */ private org.apache.hadoop.hbase.client.Connection connection; /** The Constant DEFAULT_ZOOKEEPER_PORT. */ private static final String DEFAULT_ZOOKEEPER_PORT = "2181"; /* * (non-Javadoc) * * @see * com.impetus.kundera.loader.GenericClientFactory#initialize(java.util.Map) */ @Override public void initialize(Map<String, Object> externalProperty) { setExternalProperties(externalProperty); initializePropertyReader(); PersistenceUnitMetadata puMetadata = KunderaMetadataManager.getPersistenceUnitMetadata(kunderaMetadata, getPersistenceUnit()); String node = null; String port = null; String poolSize = null; if (externalProperty != null) { node = (String) externalProperty.get(PersistenceProperties.KUNDERA_NODES); port = (String) externalProperty.get(PersistenceProperties.KUNDERA_PORT); poolSize = (String) externalProperty.get(PersistenceProperties.KUNDERA_POOL_SIZE_MAX_ACTIVE); } if (node == null) { node = puMetadata.getProperties().getProperty(PersistenceProperties.KUNDERA_NODES); } if (port == null) { port = puMetadata.getProperties().getProperty(PersistenceProperties.KUNDERA_PORT); } if (poolSize == null) { poolSize = puMetadata.getProperties().getProperty(PersistenceProperties.KUNDERA_POOL_SIZE_MAX_ACTIVE); } onValidation(node, port); Configuration hadoopConf = new Configuration(); hadoopConf.set("hbase.master", node + ":" + port); Connection conn = HBasePropertyReader.hsmd.getDataStore() != null ? HBasePropertyReader.hsmd.getDataStore() .getConnection() : null; if (conn != null && conn.getProperties() != null) { String zookeeperHost = conn.getProperties().getProperty("hbase.zookeeper.quorum").trim(); String zookeeperPort = conn.getProperties().getProperty("hbase.zookeeper.property.clientPort").trim(); hadoopConf.set("hbase.zookeeper.quorum", zookeeperHost != null ? zookeeperHost : node); hadoopConf.set("hbase.zookeeper.property.clientPort", zookeeperPort != null ? zookeeperPort : DEFAULT_ZOOKEEPER_PORT); } else { hadoopConf.set("hbase.zookeeper.quorum", node); hadoopConf.set("hbase.zookeeper.property.clientPort", DEFAULT_ZOOKEEPER_PORT); } conf = HBaseConfiguration.create(hadoopConf); reader = new HBaseEntityReader(kunderaMetadata); } /* * (non-Javadoc) * * @see * com.impetus.kundera.loader.GenericClientFactory#createPoolOrConnection() */ @Override protected Object createPoolOrConnection() { try { this.connection = ConnectionFactory.createConnection(conf); return connection; } catch (IOException e) { logger.error("Connection could not be established", e); throw new KunderaException("Connection could not be established", e); } } /* * (non-Javadoc) * * @see * com.impetus.kundera.loader.GenericClientFactory#instantiateClient(java * .lang.String) */ @Override protected Client instantiateClient(String persistenceUnit) { return new HBaseClient(indexManager, conf, connection, reader, persistenceUnit, externalProperties, clientMetadata, kunderaMetadata); } /* * (non-Javadoc) * * @see com.impetus.kundera.loader.GenericClientFactory#isThreadSafe() */ @Override public boolean isThreadSafe() { return false; } /* * (non-Javadoc) * * @see com.impetus.kundera.loader.ClientLifeCycleManager#destroy() */ @Override public void destroy() { try { if (schemaManager != null) { schemaManager.dropSchema(); } externalProperties = null; schemaManager = null; connection.close(); } catch (IOException e) { logger.error("connection already closed", e); throw new KunderaException("connection already closed", e); } } /* * (non-Javadoc) * * @see * com.impetus.kundera.loader.ClientFactory#getSchemaManager(java.util.Map) */ @Override public SchemaManager getSchemaManager(Map<String, Object> externalProperty) { setExternalProperties(externalProperty); if (schemaManager == null) { initializePropertyReader(); schemaManager = new HBaseSchemaManager(HBaseClientFactory.class.getName(), externalProperty, kunderaMetadata); } return schemaManager; } /** * Initialize property reader. */ private void initializePropertyReader() { if (propertyReader == null) { propertyReader = new HBasePropertyReader(externalProperties, kunderaMetadata.getApplicationMetadata() .getPersistenceUnitMetadata(getPersistenceUnit())); propertyReader.read(getPersistenceUnit()); } } /* * (non-Javadoc) * * @see * com.impetus.kundera.loader.GenericClientFactory#initializeLoadBalancer * (java.lang.String) */ @Override protected void initializeLoadBalancer(String loadBalancingPolicyName) { throw new UnsupportedOperationException("Load balancing feature is not supported in " + this.getClass().getSimpleName()); } }
import java.io.*; import java.util.*; import static java.lang.Math.*; public class Main { FastScanner in; PrintWriter out; static final String FILE = "alarm"; int n; boolean can[][]; int power[]; int biggest[]; ArrayList<PairInt> neig[]; ArrayList<Integer> component[]; int root; boolean used[]; void dfsComponent(int v) { if (used[v]) return; used[v] = true; for (Integer it : component[v]) { dfsComponent(it); } } void dfs(int v, int d) { if (d < 0) return; can[root][v] = true; d = max(d, power[v]); if (biggest[v] >= d) return; biggest[v] = d; for (PairInt pair : neig[v]) { dfs(pair.a, d - pair.b); } } public void solve() { n = in.nextInt(); can = new boolean[n][n]; power = new int[n]; biggest = new int[n]; neig = new ArrayList[n]; component = new ArrayList[n]; for (int i = 0; i < n; i++) { neig[i] = new ArrayList<>(); component[i] = new ArrayList<>(); } used = new boolean[n]; for (int i = 0; i < n; i++) power[i] = in.nextInt(); for (int i = 0; i < n - 1; i++) { int a = in.nextInt() - 1, b = in.nextInt() - 1, len = in.nextInt(); neig[a].add(new PairInt(b, len)); neig[b].add(new PairInt(a, len)); } for (int i = 0; i < n; i++) { Arrays.fill(biggest, 0); root = i; dfs(root, 0); } for (int i = 0; i < n; i++) { boolean weak = false; for (int z = 0; z < n; z++) { if (i == z) continue; if (can[z][i] && !can[i][z]) { weak = true; break; } } if (weak) continue; for (int z = 0; z < n; z++) { if (z == i) continue; if (can[z][i] && can[i][z]) { component[i].add(z); } } } int ans = 0; for (int i = 0; i < n; i++) { if (used[i]) continue; boolean weak = false; for (int z = 0; z < n; z++) { if (can[z][i] && !can[i][z]) { weak = true; break; } } if (weak) continue; ans++; dfsComponent(i); } out.print(ans); } public void run() { if (FILE.equals("")) { in = new FastScanner(System.in); out = new PrintWriter(System.out); } else { try { in = new FastScanner(new FileInputStream(FILE + ".in")); out = new PrintWriter(new FileOutputStream(FILE + ".out")); } catch (FileNotFoundException e) { e.printStackTrace(); } } solve(); out.close(); } public static void main(String[] args) { (new Main()).run(); } class FastScanner { BufferedReader br; StringTokenizer st; public FastScanner(InputStream is) { br = new BufferedReader(new InputStreamReader(is)); } public String next() { while (st == null || !st.hasMoreTokens()) { try { st = new StringTokenizer(br.readLine()); } catch (IOException e) { e.printStackTrace(); } } return st.nextToken(); } public String nextLine() { st = null; try { return br.readLine(); } catch (IOException e) { e.printStackTrace(); return ""; } } public int nextInt() { return Integer.parseInt(next()); } public long nextLong() { return Long.parseLong(next()); } public double nextDouble() { return Double.parseDouble(next()); } public float nextFloat() { return Float.parseFloat(next()); } } class Pair<A extends Comparable<A>, B extends Comparable<B>> implements Comparable<Pair<A, B>> { public A a; public B b; public Pair(A a, B b) { this.a = a; this.b = b; } @Override public int compareTo(Pair<A, B> o) { if (o == null || o.getClass() != getClass()) return 1; int cmp = a.compareTo(o.a); if (cmp == 0) return b.compareTo(o.b); return cmp; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Pair<?, ?> pair = (Pair<?, ?>) o; if (a != null ? !a.equals(pair.a) : pair.a != null) return false; return !(b != null ? !b.equals(pair.b) : pair.b != null); } } class PairInt extends Pair<Integer, Integer> { public PairInt(Integer u, Integer v) { super(u, v); } } class PairLong extends Pair<Long, Long> { public PairLong(Long u, Long v) { super(u, v); } } }
package OpenRate.process; import OpenRate.OpenRate; import OpenRate.exception.InitializationException; import OpenRate.exception.ProcessingException; import OpenRate.record.ChargePacket; import OpenRate.record.IRecord; import OpenRate.record.TimePacket; import OpenRate.utils.ConversionUtils; import TestUtils.FrameworkUtils; import TestUtils.TestRatingRecord; import java.net.URL; import java.sql.Connection; import java.sql.SQLException; import java.util.Calendar; import org.junit.*; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; /** * Test the RUM based rating functions. These build on the standard rate * calculation module to give a RUM based rating. There are certain functions * available only in the RUM model, such as automatic multiple rating against * several price models and intelligent time handling. * * @author TGDSPIA1 */ public class AbstractRUMRateCalcTest { private static URL FQConfigFileName; private static AbstractRUMRateCalc instance; // Used for logging and exception handling private static String message; private static OpenRate appl; @BeforeClass public static void setUpClass() throws Exception { FQConfigFileName = new URL("File:src/test/resources/TestRUMRating.properties.xml"); // Set up the OpenRate internal logger - this is normally done by app startup appl = OpenRate.getApplicationInstance(); // Load the properties into the OpenRate object FrameworkUtils.loadProperties(FQConfigFileName); // Get the loggers FrameworkUtils.startupLoggers(); // Get the transaction manager FrameworkUtils.startupTransactionManager(); // Get Data Sources FrameworkUtils.startupDataSources(); // Get a connection Connection JDBCChcon = FrameworkUtils.getDBConnection("RUMRateTestCache"); try { JDBCChcon.prepareStatement("DROP TABLE TEST_PRICE_MODEL").execute(); } catch (SQLException ex) { if ((ex.getMessage().startsWith("Unknown table")) || // Mysql (ex.getMessage().startsWith("user lacks"))) // HSQL { // It's OK } else { // Not OK, fail the case message = "Error dropping table TEST_PRICE_MODEL in test <AbstractRUMRateCalcTest>."; Assert.fail(message); } } try { JDBCChcon.prepareStatement("DROP TABLE TEST_RUM_MAP").execute(); } catch (SQLException ex) { if ((ex.getMessage().startsWith("Unknown table")) || // Mysql (ex.getMessage().startsWith("user lacks"))) // HSQL { // It's OK } else { // Not OK, fail the case message = "Error dropping table TEST_RUM_MAP in test <AbstractRUMRateCalcTest>."; Assert.fail(message); } } // ******************************* PRICE MODEL ***************************** // Create the test table JDBCChcon.prepareStatement("CREATE TABLE TEST_PRICE_MODEL (ID int,PRICE_MODEL varchar(64) NOT NULL,STEP int DEFAULT 0 NOT NULL,TIER_FROM int,TIER_TO int,BEAT int,FACTOR double,CHARGE_BASE int,VALID_FROM DATE)").execute(); // Simplest linear price model possible - 1 (FACTOR) per minute (CHARGE_BASE), with a charge increment of 1 (BEAT) = "per second rating" JDBCChcon.prepareStatement("INSERT INTO TEST_PRICE_MODEL (ID,PRICE_MODEL,STEP,TIER_FROM,TIER_TO,BEAT,FACTOR,CHARGE_BASE,VALID_FROM) values (1,'TestModel1',1,0,999999,60,1,60,'2000-01-01')").execute(); // Two model RUM group - one with a setup price model and one with a scaled price model JDBCChcon.prepareStatement("INSERT INTO TEST_PRICE_MODEL (ID,PRICE_MODEL,STEP,TIER_FROM,TIER_TO,BEAT,FACTOR,CHARGE_BASE,VALID_FROM) values (1,'TestModel2a',1,0,0,60,1,60,'2000-01-01')").execute(); JDBCChcon.prepareStatement("INSERT INTO TEST_PRICE_MODEL (ID,PRICE_MODEL,STEP,TIER_FROM,TIER_TO,BEAT,FACTOR,CHARGE_BASE,VALID_FROM) values (1,'TestModel2b',1,0,999999,60,1,60,'2000-01-01')").execute(); // Event price model - charges 1 per event JDBCChcon.prepareStatement("INSERT INTO TEST_PRICE_MODEL (ID,PRICE_MODEL,STEP,TIER_FROM,TIER_TO,BEAT,FACTOR,CHARGE_BASE,VALID_FROM) values (1,'TestModel3',1,0,999999,60,1,60,'2000-01-01')").execute(); // Threshold model, charges 1 per minute for charges under 60 seconds, otherwise 0.1 per minute JDBCChcon.prepareStatement("INSERT INTO TEST_PRICE_MODEL (ID,PRICE_MODEL,STEP,TIER_FROM,TIER_TO,BEAT,FACTOR,CHARGE_BASE,VALID_FROM) values (1,'TestModel4',1,0,60,60,1,60,'2000-01-01')").execute(); JDBCChcon.prepareStatement("INSERT INTO TEST_PRICE_MODEL (ID,PRICE_MODEL,STEP,TIER_FROM,TIER_TO,BEAT,FACTOR,CHARGE_BASE,VALID_FROM) values (1,'TestModel4',2,60,999999,60,0.1,60,'2000-01-01')").execute(); // Super nasty model. This causes a non-obvious charge packet expansion when rating over a time zone // change. There is a RUM expansion into 2 price models for the off-peak portion, but none in the // peak portion. // Tiered model, charges 1 per minute for charges under 60 seconds, otherwise 0.1 per minute PEAK // charges 0.5 per minute for charges under 60 seconds, otherwise 0.05 per minute OFF-PEAK JDBCChcon.prepareStatement("INSERT INTO TEST_PRICE_MODEL (ID,PRICE_MODEL,STEP,TIER_FROM,TIER_TO,BEAT,FACTOR,CHARGE_BASE,VALID_FROM) values (1,'TestModel5a1',1,0,60,60,1,60,'2000-01-01')").execute(); JDBCChcon.prepareStatement("INSERT INTO TEST_PRICE_MODEL (ID,PRICE_MODEL,STEP,TIER_FROM,TIER_TO,BEAT,FACTOR,CHARGE_BASE,VALID_FROM) values (1,'TestModel5a1',2,60,999999,60,0.1,60,'2000-01-01')").execute(); JDBCChcon.prepareStatement("INSERT INTO TEST_PRICE_MODEL (ID,PRICE_MODEL,STEP,TIER_FROM,TIER_TO,BEAT,FACTOR,CHARGE_BASE,VALID_FROM) values (1,'TestModel5b1',1,0,60,60,0.5,60,'2000-01-01')").execute(); JDBCChcon.prepareStatement("INSERT INTO TEST_PRICE_MODEL (ID,PRICE_MODEL,STEP,TIER_FROM,TIER_TO,BEAT,FACTOR,CHARGE_BASE,VALID_FROM) values (1,'TestModel5b1',2,60,999999,60,0.05,60,'2000-01-01')").execute(); JDBCChcon.prepareStatement("INSERT INTO TEST_PRICE_MODEL (ID,PRICE_MODEL,STEP,TIER_FROM,TIER_TO,BEAT,FACTOR,CHARGE_BASE,VALID_FROM) values (1,'TestModel5b2',1,0,0,60,1,60,'2000-01-01')").execute(); // Tiered beat rounding model. Changes beat between first and second step JDBCChcon.prepareStatement("INSERT INTO TEST_PRICE_MODEL (ID,PRICE_MODEL,STEP,TIER_FROM,TIER_TO,BEAT,FACTOR,CHARGE_BASE,VALID_FROM) values (1,'TestModel6a',1,0,999999,60,2,60,'2000-01-01')").execute(); JDBCChcon.prepareStatement("INSERT INTO TEST_PRICE_MODEL (ID,PRICE_MODEL,STEP,TIER_FROM,TIER_TO,BEAT,FACTOR,CHARGE_BASE,VALID_FROM) values (1,'TestModel6b',1,0,999999,30,1,60,'2000-01-01')").execute(); // Model with a setup step JDBCChcon.prepareStatement("INSERT INTO TEST_PRICE_MODEL (ID,PRICE_MODEL,STEP,TIER_FROM,TIER_TO,BEAT,FACTOR,CHARGE_BASE,VALID_FROM) values (1,'TestModel7a1',1,0,0,1,10,1,'2000-01-01')").execute(); JDBCChcon.prepareStatement("INSERT INTO TEST_PRICE_MODEL (ID,PRICE_MODEL,STEP,TIER_FROM,TIER_TO,BEAT,FACTOR,CHARGE_BASE,VALID_FROM) values (1,'TestModel7a1',2,0,999999,60,0.35,60,'2000-01-01')").execute(); JDBCChcon.prepareStatement("INSERT INTO TEST_PRICE_MODEL (ID,PRICE_MODEL,STEP,TIER_FROM,TIER_TO,BEAT,FACTOR,CHARGE_BASE,VALID_FROM) values (1,'TestModel7b1',1,0,0,1,10,1,'2000-01-01')").execute(); JDBCChcon.prepareStatement("INSERT INTO TEST_PRICE_MODEL (ID,PRICE_MODEL,STEP,TIER_FROM,TIER_TO,BEAT,FACTOR,CHARGE_BASE,VALID_FROM) values (1,'TestModel7b1',2,0,999999,60,0.16875,60,'2000-01-01')").execute(); // ********************************** RUM MAP ****************************** // Create the test table JDBCChcon.prepareStatement("CREATE TABLE TEST_RUM_MAP (ID int, PRICE_GROUP varchar(24), STEP int, PRICE_MODEL varchar(24), RUM varchar(24), RESOURCE varchar(24), RESOURCE_ID int, RUM_TYPE varchar(24), CONSUME_FLAG int)").execute(); // Simplest price model possible - 1 (FACTOR) per minute (CHARGE_BASE), with a charge increment of 1 (BEAT) = "per second rating" JDBCChcon.prepareStatement("INSERT INTO TEST_RUM_MAP (ID,PRICE_GROUP,STEP,PRICE_MODEL,RUM,RESOURCE,RESOURCE_ID,RUM_TYPE,CONSUME_FLAG) VALUES (1,'TestModel1',1,'TestModel1','DUR','EUR',978,'TIERED',0)").execute(); // Two model RUM group - one with a setup price model and one with a scaled price model JDBCChcon.prepareStatement("INSERT INTO TEST_RUM_MAP (ID,PRICE_GROUP,STEP,PRICE_MODEL,RUM,RESOURCE,RESOURCE_ID,RUM_TYPE,CONSUME_FLAG) VALUES (1,'TestModel2',1,'TestModel2a','DUR','EUR',978,'TIERED',0)").execute(); JDBCChcon.prepareStatement("INSERT INTO TEST_RUM_MAP (ID,PRICE_GROUP,STEP,PRICE_MODEL,RUM,RESOURCE,RESOURCE_ID,RUM_TYPE,CONSUME_FLAG) VALUES (1,'TestModel2',1,'TestModel2b','DUR','EUR',978,'TIERED',0)").execute(); // Event price model JDBCChcon.prepareStatement("INSERT INTO TEST_RUM_MAP (ID,PRICE_GROUP,STEP,PRICE_MODEL,RUM,RESOURCE,RESOURCE_ID,RUM_TYPE,CONSUME_FLAG) VALUES (1,'TestModel3',1,'TestModel3','EVT','EUR',978,'EVENT',0)").execute(); // Threshold model JDBCChcon.prepareStatement("INSERT INTO TEST_RUM_MAP (ID,PRICE_GROUP,STEP,PRICE_MODEL,RUM,RESOURCE,RESOURCE_ID,RUM_TYPE,CONSUME_FLAG) VALUES (1,'TestModel4',1,'TestModel4','DUR','EUR',978,'THRESHOLD',0)").execute(); // Super nasty model JDBCChcon.prepareStatement("INSERT INTO TEST_RUM_MAP (ID,PRICE_GROUP,STEP,PRICE_MODEL,RUM,RESOURCE,RESOURCE_ID,RUM_TYPE,CONSUME_FLAG) VALUES (1,'TestModel5a',1,'TestModel5a1','DUR','EUR',978,'TIERED',0)").execute(); JDBCChcon.prepareStatement("INSERT INTO TEST_RUM_MAP (ID,PRICE_GROUP,STEP,PRICE_MODEL,RUM,RESOURCE,RESOURCE_ID,RUM_TYPE,CONSUME_FLAG) VALUES (1,'TestModel5b',1,'TestModel5b1','DUR','EUR',978,'TIERED',0)").execute(); JDBCChcon.prepareStatement("INSERT INTO TEST_RUM_MAP (ID,PRICE_GROUP,STEP,PRICE_MODEL,RUM,RESOURCE,RESOURCE_ID,RUM_TYPE,CONSUME_FLAG) VALUES (1,'TestModel5b',1,'TestModel5b2','DUR','EUR',978,'TIERED',0)").execute(); // Tiered beat rounding model. Changes beat between first and second step JDBCChcon.prepareStatement("INSERT INTO TEST_RUM_MAP (ID,PRICE_GROUP,STEP,PRICE_MODEL,RUM,RESOURCE,RESOURCE_ID,RUM_TYPE,CONSUME_FLAG) VALUES (1,'TestModel6a',1,'TestModel6a','DUR','EUR',978,'TIERED',0)").execute(); JDBCChcon.prepareStatement("INSERT INTO TEST_RUM_MAP (ID,PRICE_GROUP,STEP,PRICE_MODEL,RUM,RESOURCE,RESOURCE_ID,RUM_TYPE,CONSUME_FLAG) VALUES (1,'TestModel6b',1,'TestModel6b','DUR','EUR',978,'TIERED',0)").execute(); // Model with a setup step JDBCChcon.prepareStatement("INSERT INTO TEST_RUM_MAP (ID,PRICE_GROUP,STEP,PRICE_MODEL,RUM,RESOURCE,RESOURCE_ID,RUM_TYPE,CONSUME_FLAG) VALUES (1,'TestModel7a',1,'TestModel7a1','DUR','EUR',978,'TIERED',0)").execute(); JDBCChcon.prepareStatement("INSERT INTO TEST_RUM_MAP (ID,PRICE_GROUP,STEP,PRICE_MODEL,RUM,RESOURCE,RESOURCE_ID,RUM_TYPE,CONSUME_FLAG) VALUES (1,'TestModel7b',1,'TestModel7b1','DUR','EUR',978,'TIERED',0)").execute(); // Get the caches that we are using FrameworkUtils.startupCaches(); } @AfterClass public static void tearDownClass() throws Exception { OpenRate.getApplicationInstance().finaliseApplication(); } @Before public void setUp() { getInstance(); } @After public void tearDown() { releaseInstance(); } /** * Test of the main performRating method, of class AbstractRUMRateCalc. Uses a * simple linear price model. For each non-zero rated value we expect a beat * rounded per minute cost of 1. * * @throws java.lang.Exception */ @Test public void testPerformRatingTieredNonTimeBoundNonTiered() throws Exception { TestRatingRecord ratingRecord; double expResult = 0.0; System.out.println("testPerformRatingTieredNonTimeBoundNonTiered"); ConversionUtils conv = ConversionUtils.getConversionUtilsObject(); conv.setInputDateFormat("yyyy-MM-dd hh:mm:ss"); long CDRDate = conv.convertInputDateToUTC("2010-01-23 00:00:00"); // zero value to rate ratingRecord = getNewRatingRecordDUR(CDRDate, "TestModel1", 0); instance.performRating(ratingRecord); assertEquals(1, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); // intra-beat 1st beat - try all integer values expResult = 1.0; for (int seconds = 1; seconds < 60; seconds++) { ratingRecord = getNewRatingRecordDUR(CDRDate, "TestModel1", seconds); instance.performRating(ratingRecord); assertEquals(1, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); } // intra-beat 2, non integer value ratingRecord = getNewRatingRecordDUR(CDRDate, "TestModel1", 2.654); instance.performRating(ratingRecord); assertEquals(1, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); // intra-beat 2nd beat - try all integer values expResult = 2.0; for (int seconds = 61; seconds < 120; seconds++) { ratingRecord = getNewRatingRecordDUR(CDRDate, "TestModel1", seconds); instance.performRating(ratingRecord); assertEquals(1, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); } // maximum value (according to price model) expResult = 16667.0; ratingRecord = getNewRatingRecordDUR(CDRDate, "TestModel1", 999999); instance.performRating(ratingRecord); assertEquals(1, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); // run off the end of the rating ratingRecord = getNewRatingRecordDUR(CDRDate, "TestModel1", 1000000); instance.performRating(ratingRecord); assertEquals(1, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); // run off the end of the rating some more ratingRecord = getNewRatingRecordDUR(CDRDate, "TestModel1", 1500000); instance.performRating(ratingRecord); assertEquals(1, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); } /** * Test of the main performRating method, of class AbstractRUMRateCalc. Uses a * simple linear price model, but with a RUM expansion. For each non-zero * rated value we expect a setup cost of 1, plus a beat rounded per minute * cost of 1. * * @throws java.lang.Exception */ @Test public void testPerformRatingTieredTwoPriceModelInGroup() throws Exception { TestRatingRecord ratingRecord; double expResult = 0.0; System.out.println("testPerformRatingTieredTwoPriceModelInGroup"); ConversionUtils conv = ConversionUtils.getConversionUtilsObject(); conv.setInputDateFormat("yyyy-MM-dd hh:mm:ss"); long CDRDate = conv.convertInputDateToUTC("2010-01-23 00:00:00"); // zero value to rate ratingRecord = getNewRatingRecordDUR(CDRDate, "TestModel2", 0); instance.performRating(ratingRecord); assertEquals(2, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); // intra-beat 1st beat - try all integer values expResult = 2.0; for (int seconds = 1; seconds < 60; seconds++) { ratingRecord = getNewRatingRecordDUR(CDRDate, "TestModel2", seconds); instance.performRating(ratingRecord); assertEquals(2, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); } // intra-beat 2, non integer value ratingRecord = getNewRatingRecordDUR(CDRDate, "TestModel2", 2.654); instance.performRating(ratingRecord); assertEquals(2, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); // intra-beat 2nd beat - try all integer values expResult = 3.0; for (int seconds = 61; seconds < 120; seconds++) { ratingRecord = getNewRatingRecordDUR(CDRDate, "TestModel2", seconds); instance.performRating(ratingRecord); assertEquals(2, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); } // maximum value (according to price model) expResult = 16668.0; ratingRecord = getNewRatingRecordDUR(CDRDate, "TestModel2", 999999); instance.performRating(ratingRecord); assertEquals(2, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); // run off the end of the rating ratingRecord = getNewRatingRecordDUR(CDRDate, "TestModel2", 1000000); instance.performRating(ratingRecord); assertEquals(2, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); // run off the end of the rating some more ratingRecord = getNewRatingRecordDUR(CDRDate, "TestModel2", 1500000); instance.performRating(ratingRecord); assertEquals(2, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); } /** * Test of the main performRating method, of class AbstractRUMRateCalc. Uses a * simple linear price model. For each non-zero rated value we expect a beat * rounded per minute cost of 1. * * @throws java.lang.Exception */ @Test public void testPerformRatingEvent() throws Exception { TestRatingRecord ratingRecord; double expResult = 0.0; System.out.println("testPerformRatingEvent"); ConversionUtils conv = ConversionUtils.getConversionUtilsObject(); conv.setInputDateFormat("yyyy-MM-dd hh:mm:ss"); long CDRDate = conv.convertInputDateToUTC("2010-01-23 00:00:00"); // zero value to rate ratingRecord = getNewRatingRecordEVT(CDRDate, "TestModel3", 0); instance.performRating(ratingRecord); assertEquals(1, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); // intra-beat 1st beat - try all integer values for (int seconds = 1; seconds < 60; seconds++) { ratingRecord = getNewRatingRecordEVT(CDRDate, "TestModel3", seconds); instance.performRating(ratingRecord); assertEquals(1, ratingRecord.getChargePacketCount()); assertEquals(seconds, getRollUp(ratingRecord), 0.00001); } // intra-beat 2, non integer value expResult = 2.0; ratingRecord = getNewRatingRecordEVT(CDRDate, "TestModel3", 2.654); instance.performRating(ratingRecord); assertEquals(1, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); // intra-beat 1st beat - try all integer values for (int seconds = 61; seconds < 120; seconds++) { ratingRecord = getNewRatingRecordEVT(CDRDate, "TestModel3", seconds); instance.performRating(ratingRecord); assertEquals(1, ratingRecord.getChargePacketCount()); assertEquals(seconds, getRollUp(ratingRecord), 0.00001); } // maximum value (according to price model) expResult = 999999.0; ratingRecord = getNewRatingRecordEVT(CDRDate, "TestModel3", 999999); instance.performRating(ratingRecord); assertEquals(1, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); // run off the end of the rating ratingRecord = getNewRatingRecordEVT(CDRDate, "TestModel3", 1000000); instance.performRating(ratingRecord); assertEquals(1, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); // run off the end of the rating some more ratingRecord = getNewRatingRecordEVT(CDRDate, "TestModel3", 1500000); instance.performRating(ratingRecord); assertEquals(1, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); } /** * Test of the main performRating method, of class AbstractRUMRateCalc. Uses a * simple linear price model. For each non-zero rated value we expect a beat * rounded per minute cost of 1. * * @throws java.lang.Exception */ @Test public void testPerformRatingThresholdNonTimeBoundNonTiered() throws Exception { TestRatingRecord ratingRecord; double expResult = 0.0; System.out.println("testPerformRatingThresholdNonTimeBoundNonTiered"); ConversionUtils conv = ConversionUtils.getConversionUtilsObject(); conv.setInputDateFormat("yyyy-MM-dd hh:mm:ss"); long CDRDate = conv.convertInputDateToUTC("2010-01-23 00:00:00"); // zero value to rate ratingRecord = getNewRatingRecordDUR(CDRDate, "TestModel4", 0); instance.performRating(ratingRecord); assertEquals(1, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); // intra-beat 1st beat - try all integer values expResult = 1.0; for (int seconds = 1; seconds < 60; seconds++) { ratingRecord = getNewRatingRecordDUR(CDRDate, "TestModel4", seconds); instance.performRating(ratingRecord); assertEquals(1, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); } // intra-beat 2, non integer value ratingRecord = getNewRatingRecordDUR(CDRDate, "TestModel4", 2.654); instance.performRating(ratingRecord); assertEquals(1, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); // intra-beat 2nd beat - try all integer values expResult = 0.2; for (int seconds = 61; seconds < 120; seconds++) { ratingRecord = getNewRatingRecordDUR(CDRDate, "TestModel4", seconds); instance.performRating(ratingRecord); assertEquals(1, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); } // maximum value (according to price model) expResult = 1666.7; ratingRecord = getNewRatingRecordDUR(CDRDate, "TestModel4", 999999); instance.performRating(ratingRecord); assertEquals(1, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); // run off the end of the rating expResult = 0.0; ratingRecord = getNewRatingRecordDUR(CDRDate, "TestModel4", 1000000); instance.performRating(ratingRecord); assertEquals(1, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); // run off the end of the rating some more ratingRecord = getNewRatingRecordDUR(CDRDate, "TestModel4", 1500000); instance.performRating(ratingRecord); assertEquals(1, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); } /** * Test of the main performRating method, of class AbstractRUMRateCalc. Uses a * more complex linear price model. There is a RUM expansion in the second * part of the model, but not in the first. * * The rating model here is: * * Time Packet 1 1 per minute in the first minute 0.1 per minute for other * minutes Time Packet 2 1 set up 0.5 per minute in the first minute 0.05 per * minute for other minutes * * Example: A 70 second call will be: Time packet 1 = 1 (first minute) + 0.1 * (second minute) Time packet 2 = 1 (set up) + 0.5 (first minute) + 0.05 * (second minute) * * --> 2.65 * * @throws java.lang.Exception */ @Test public void testPerformRatingTieredAsymmetricRUMExpansion() throws Exception { TestRatingRecord ratingRecord; double expResult = 0.0; System.out.println("testPerformRatingTieredAsymmetricRUMExpansion"); ConversionUtils conv = ConversionUtils.getConversionUtilsObject(); conv.setInputDateFormat("yyyy-MM-dd hh:mm:ss"); long CDRDate = conv.convertInputDateToUTC("2010-01-23 00:00:00"); // zero value to rate ratingRecord = getNewRatingRecordDURTimeSplit(CDRDate, "TestModel5a", "TestModel5b", 0); instance.performRating(ratingRecord); assertEquals(3, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); // intra-beat 1st beat - try all integer values for (int seconds = 1; seconds < 60; seconds++) { expResult = 2.5; ratingRecord = getNewRatingRecordDURTimeSplit(CDRDate, "TestModel5a", "TestModel5b", seconds); instance.performRating(ratingRecord); assertEquals(3, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); } // intra-beat 2, non integer value ratingRecord = getNewRatingRecordDURTimeSplit(CDRDate, "TestModel5a", "TestModel5b", 2.654); instance.performRating(ratingRecord); assertEquals(3, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); // intra-beat 2nd beat - try all integer values expResult = 2.65; for (int seconds = 61; seconds < 120; seconds++) { ratingRecord = getNewRatingRecordDURTimeSplit(CDRDate, "TestModel5a", "TestModel5b", seconds); instance.performRating(ratingRecord); assertEquals(3, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); } // maximum value (according to price model) expResult = 2502.4; ratingRecord = getNewRatingRecordDURTimeSplit(CDRDate, "TestModel5a", "TestModel5b", 999999); instance.performRating(ratingRecord); assertEquals(3, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); // run off the end of the rating ratingRecord = getNewRatingRecordDURTimeSplit(CDRDate, "TestModel5a", "TestModel5b", 1000000); instance.performRating(ratingRecord); assertEquals(3, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); // run off the end of the rating some more ratingRecord = getNewRatingRecordDURTimeSplit(CDRDate, "TestModel5a", "TestModel5b", 1500000); instance.performRating(ratingRecord); assertEquals(3, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); } /** * Test the performance of the main performRating method. Uses a complex price * model with a RUM expansion. We expect way more than 10,000 per second. * * @throws java.lang.Exception */ @Test public void testPerformRatingTieredAsymmetricRUMExpansionPerfomance() throws Exception { TestRatingRecord ratingRecord; double expResult; System.out.println("testPerformRatingTieredAsymmetricRUMExpansionPerfomance"); ConversionUtils conv = ConversionUtils.getConversionUtilsObject(); conv.setInputDateFormat("yyyy-MM-dd hh:mm:ss"); long CDRDate = conv.convertInputDateToUTC("2010-01-23 00:00:00"); // Check that we get the right answer expResult = 2.65; ratingRecord = getNewRatingRecordDURTimeSplit(CDRDate, "TestModel5a", "TestModel5b", 78.4); instance.performRating(ratingRecord); assertEquals(3, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); long startMs = Calendar.getInstance().getTimeInMillis(); for (int i = 1; i < 10000; i++) { ratingRecord = getNewRatingRecordDURTimeSplit(CDRDate, "TestModel5a", "TestModel5b", 78.4); instance.performRating(ratingRecord); } long duration = Calendar.getInstance().getTimeInMillis() - startMs; System.out.println("10000 took " + duration + "mS"); assertTrue(duration < 1000); } /** * Test of the main performRating method, of class AbstractRUMRateCalc. Test * the beat rounding time splitting algorithm. This should apportion as much * of the RUM necessary to each packet to respect the beat rounding of that * model. * * For example, if a 62 second call has 1 second in off-peak, but a 60 second * beat, then 60 seconds should be charged in off peak, and the remaining 2 * seconds in peak. * * Without this splitting algorithm, we would charge 60 seconds in off-peak * (from the 1 second in off-peak), then 120 seconds in peak (61 seconds * rounded up). * * With the example we have we expect the result to be: * * 1 minute at 2 per minute = 2, .5 minutes at 1 per minute = 0.5 * * --> 1 second in peak pulls in a whole beat of 60 seconds into peak = 2 * --> remaining duration in off-peak = 62 - 60 = 2 * --> 2 seconds in off-peak rounded up to 30 seconds because of the model = 0.5 * * @throws java.lang.Exception */ @Test public void testPerformRatingTieredBeatRounding() throws Exception { TestRatingRecord ratingRecord; double expResult = 2.5; System.out.println("testPerformRatingTieredBeatRounding"); ConversionUtils conv = ConversionUtils.getConversionUtilsObject(); conv.setInputDateFormat("yyyy-MM-dd hh:mm:ss"); long CDRDate = conv.convertInputDateToUTC("2010-01-23 00:00:00"); ratingRecord = getNewRatingRecordDURTimeSplitBeatRounding(CDRDate, "TestModel6a", "TestModel6b", 1, 61); instance.performRating(ratingRecord); assertEquals(2, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); } /** * Test of the main performRating method, of class AbstractRUMRateCalc. Test * the time splitting algorithm. This should blindly apportion as much * of the RUM necessary to each packet, ignoring the beat rounding of that * model. * * For example, if a 62 second call has 1 second in off-peak, but a 60 second * beat, then 60 seconds should be charged in off peak, and the remaining 2 * seconds in peak. * * Without this splitting algorithm, we would charge 60 seconds in off-peak * (from the 1 second in off-peak), then 120 seconds in peak (61 seconds * rounded up). * * With the example we have we expect the result to be: * * 1 minute at 2 per minute = 2, 1.5 minutes at 1 per minute = 1.5 * * --> 1 second in peak gets rated as 60 seconds = 2 * --> remaining duration in off-peak = 62 - 1 = 61 * --> 61 seconds in off-peak rounded up to 90 seconds because of the model = 1.5 * * @throws java.lang.Exception */ @Test public void testPerformRatingTieredNoBeatRounding() throws Exception { TestRatingRecord ratingRecord; double expResult = 3.5; System.out.println("testPerformRatingTieredNoBeatRounding"); ConversionUtils conv = ConversionUtils.getConversionUtilsObject(); conv.setInputDateFormat("yyyy-MM-dd hh:mm:ss"); long CDRDate = conv.convertInputDateToUTC("2010-01-23 00:00:00"); ratingRecord = getNewRatingRecordDURTimeSplitNoBeatRounding(CDRDate, "TestModel6a", "TestModel6b", 1, 61); instance.performRating(ratingRecord); assertEquals(2, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); } /** * Test the time splitting algorithm. This should blindly apportion as much * of the RUM necessary to each packet, ignoring the beat rounding of that * model. * * In this case, we are testing that the setup step is not triggered in the * second packet (it should have been triggered in the first packet) * * We expect: * * --> a setup step = 10 * --> 1 second rated in peak at 60 second beat = 0.35 * --> remaining duration in off-peak = 62 - 1 = 61 * --> 61 seconds in off-peak rounded up to 120 seconds because of the model = 0.16875*2 = 0.3375 * * --> 10.6875 * * @throws java.lang.Exception */ @Test public void testPerformRatingTieredNoBeatRoundingSetup() throws Exception { TestRatingRecord ratingRecord; double expResult = 10.6875; System.out.println("testPerformRatingTieredNoBeatRoundingSetup"); ConversionUtils conv = ConversionUtils.getConversionUtilsObject(); conv.setInputDateFormat("yyyy-MM-dd hh:mm:ss"); long CDRDate = conv.convertInputDateToUTC("2010-01-23 00:00:00"); ratingRecord = getNewRatingRecordDURTimeSplitNoBeatRounding(CDRDate, "TestModel7a", "TestModel7b", 1, 61); instance.performRating(ratingRecord); assertEquals(2, ratingRecord.getChargePacketCount()); assertEquals(expResult, getRollUp(ratingRecord), 0.00001); } /** * Roll up the charged values from each of the charge packets. * * @param ratingRecord The record to check * @return The rolled up rated amount */ private double getRollUp(TestRatingRecord ratingRecord) { double actualResult = 0; for (ChargePacket resCP : ratingRecord.getChargePackets()) { actualResult += resCP.chargedValue; } return actualResult; } public class AbstractRUMRateCalcImpl extends AbstractRUMRateCalc { /** * Override the unused event handling routines. * * @param r input record * @return return record * @throws ProcessingException */ @Override public IRecord procValidRecord(IRecord r) throws ProcessingException { return r; } /** * Override the unused event handling routines. * * @param r input record * @return return record * @throws ProcessingException */ @Override public IRecord procErrorRecord(IRecord r) throws ProcessingException { return r; } } /** * Method to get an instance of the implementation. Done this way to allow * tests to be executed individually. * * @throws InitializationException */ private void getInstance() { if (instance == null) { // Get an initialise the cache instance = new AbstractRUMRateCalcTest.AbstractRUMRateCalcImpl(); try { // Get the instance instance.init("DBTestPipe", "AbstractRUMRateCalcTest"); } catch (InitializationException ex) { org.junit.Assert.fail(); } } else { org.junit.Assert.fail("Instance already allocated"); } } /** * Method to release an instance of the implementation. */ private void releaseInstance() { instance = null; } /** * Create a rating record initialised with the information necessary for * performing a rating. * * @param CDRDate Date of the CDR * @param newPriceGroup The price group to use * @param durationValue The duration value to use * @return The record, ready to go */ private TestRatingRecord getNewRatingRecordDUR(long CDRDate, String newPriceGroup, double durationValue) { TestRatingRecord ratingRecord = new TestRatingRecord(); ratingRecord.utcEventDate = CDRDate; ChargePacket tmpCP = new ChargePacket(); TimePacket tmpTZ = new TimePacket(); tmpTZ.priceGroup = newPriceGroup; tmpCP.addTimeZone(tmpTZ); ratingRecord.addChargePacket(tmpCP); ratingRecord.setRUMValue("DUR", durationValue); return ratingRecord; } /** * Create a rating record initialised with the information necessary for * performing a rating. * * @param CDRDate Date of the CDR * @param newPriceGroup The price group to use * @param durationValue The duration value to use * @return The record, ready to go */ private TestRatingRecord getNewRatingRecordEVT(long CDRDate, String newPriceGroup, double durationValue) { TestRatingRecord ratingRecord = new TestRatingRecord(); ratingRecord.utcEventDate = CDRDate; ChargePacket tmpCP = new ChargePacket(); TimePacket tmpTZ = new TimePacket(); tmpTZ.priceGroup = newPriceGroup; tmpCP.addTimeZone(tmpTZ); ratingRecord.addChargePacket(tmpCP); ratingRecord.setRUMValue("EVT", durationValue); return ratingRecord; } /** * Create a rating record initialised with the information necessary for * performing a rating. This simulates a record that has undergone time * splitting. * * @param CDRDate Date of the CDR * @param newPriceGroup1 The price group to use * @param durationValue The duration value to use * @return The record, ready to go */ private TestRatingRecord getNewRatingRecordDURTimeSplit(long CDRDate, String newPriceGroup1, String newPriceGroup2, double durationValue) { TestRatingRecord ratingRecord = new TestRatingRecord(); ratingRecord.utcEventDate = CDRDate; ChargePacket tmpCP = new ChargePacket(); TimePacket tmpTZ1 = new TimePacket(); tmpTZ1.priceGroup = newPriceGroup1; tmpCP.addTimeZone(tmpTZ1); TimePacket tmpTZ2 = new TimePacket(); tmpTZ2.priceGroup = newPriceGroup2; tmpCP.addTimeZone(tmpTZ2); ratingRecord.addChargePacket(tmpCP); ratingRecord.setRUMValue("DUR", durationValue); return ratingRecord; } /** * Create a rating record initialised with the information necessary for * performing a rating. This simulates a record that has undergone time * splitting. * * @param CDRDate Date of the CDR * @param newPriceGroup1 The price group to use * @param durationValue The duration value to use * @return The record, ready to go */ private TestRatingRecord getNewRatingRecordDURTimeSplitBeatRounding(long CDRDate, String newPriceGroup1, String newPriceGroup2, int durationValue1, int durationValue2) { TestRatingRecord ratingRecord = new TestRatingRecord(); ratingRecord.utcEventDate = CDRDate; ChargePacket tmpCP = new ChargePacket(); tmpCP.timeSplitting = AbstractRUMTimeMatch.TIME_SPLITTING_CHECK_SPLITTING_BEAT_ROUNDING; TimePacket tmpTZ1 = new TimePacket(); tmpTZ1.priceGroup = newPriceGroup1; tmpTZ1.duration = durationValue1; tmpTZ1.totalDuration = durationValue1 + durationValue2; tmpCP.addTimeZone(tmpTZ1); TimePacket tmpTZ2 = new TimePacket(); tmpTZ2.priceGroup = newPriceGroup2; tmpTZ2.duration = durationValue2; tmpTZ2.totalDuration = durationValue1 + durationValue2; tmpCP.addTimeZone(tmpTZ2); ratingRecord.addChargePacket(tmpCP); ratingRecord.setRUMValue("DUR", durationValue1 + durationValue2); return ratingRecord; } /** * Create a rating record initialised with the information necessary for * performing a rating. This simulates a record that has undergone time * splitting. * * @param CDRDate Date of the CDR * @param newPriceGroup1 The price group to use * @param durationValue The duration value to use * @return The record, ready to go */ private TestRatingRecord getNewRatingRecordDURTimeSplitNoBeatRounding(long CDRDate, String newPriceGroup1, String newPriceGroup2, int durationValue1, int durationValue2) { TestRatingRecord ratingRecord = new TestRatingRecord(); ratingRecord.utcEventDate = CDRDate; ChargePacket tmpCP = new ChargePacket(); tmpCP.timeSplitting = AbstractRUMTimeMatch.TIME_SPLITTING_CHECK_SPLITTING; TimePacket tmpTZ1 = new TimePacket(); tmpTZ1.priceGroup = newPriceGroup1; tmpTZ1.duration = durationValue1; tmpTZ1.totalDuration = durationValue1 + durationValue2; tmpCP.addTimeZone(tmpTZ1); TimePacket tmpTZ2 = new TimePacket(); tmpTZ2.priceGroup = newPriceGroup2; tmpTZ2.duration = durationValue2; tmpTZ2.totalDuration = durationValue1 + durationValue2; tmpCP.addTimeZone(tmpTZ2); ratingRecord.addChargePacket(tmpCP); ratingRecord.setRUMValue("DUR", durationValue1 + durationValue2); return ratingRecord; } }
package garden.delights.earthly.imageserver.randomizer; public class RectangleRandomizerUtil { @FunctionalInterface public static interface Converter<T extends Number> { T valueOf(Number d); } public static class Point <T extends Number> { final T x; final T y; final Converter<T> convert; public Point(final T x, final T y, Converter<T> convert) { this.x = x; this.y = y; this.convert = convert; } final T hypothenuse() { double hypot = Math.hypot(x.doubleValue(), y.doubleValue()); if (x instanceof Integer) { return convert.valueOf((int)hypot); } else if (x instanceof Float) { return convert.valueOf((float)hypot); } else if (x instanceof Double) { return convert.valueOf(hypot); } else if (x instanceof Byte) { return convert.valueOf((byte)hypot); } else if (x instanceof Short) { return convert.valueOf((short)hypot); } else if (x instanceof Long) { return convert.valueOf((long)hypot); } else { throw new UnsupportedOperationException(); } } @Override public String toString() { return "Point(" + this.x + "," + this.y + ")"; } } public static class Dimension <T extends Number> { final T w; final T h; final Converter<T> convert; public Dimension(final T w, final T h, Converter<T> convert) { this.w = w; this.h = h; this.convert = convert; } public Dimension<T> increment() { Number x; Number y; if (w instanceof Integer) { x = w.intValue() + 1; y = h.intValue() + 1; } else if (w instanceof Long) { x = w.longValue() + 1L; y = h.longValue() + 1L; } else if (w instanceof Double) { x = w.doubleValue() + 1.; y = h.doubleValue() + 1.; } else if (w instanceof Float) { x = w.floatValue() + 1f; y = h.floatValue() + 1f; } else if (w instanceof Byte) { x = w.byteValue() + 1; y = h.byteValue() + 1; } else if (w instanceof Short) { x = w.shortValue() + 1; y = h.shortValue() + 1; } else { throw new UnsupportedOperationException(); } return new Dimension<T>(convert.valueOf(x), convert.valueOf(y), convert); } public Dimension<T> incrementX() { Number x; if (w instanceof Integer) { x = w.intValue() + 1; } else if (w instanceof Long) { x = w.longValue() + 1L; } else if (w instanceof Double) { x = w.doubleValue() + 1.; } else if (w instanceof Float) { x = w.floatValue() + 1f; } else if (w instanceof Byte) { x = w.byteValue() + 1; } else if (w instanceof Short) { x = w.shortValue() + 1; } else { throw new UnsupportedOperationException(); } return new Dimension<T>(convert.valueOf(x), h, convert); } public Dimension<T> incrementY() { Number y; if (w instanceof Integer) { y = h.intValue() + 1; } else if (w instanceof Long) { y = h.longValue() + 1L; } else if (w instanceof Double) { y = h.doubleValue() + 1.; } else if (w instanceof Float) { y = h.floatValue() + 1f; } else if (w instanceof Byte) { y = h.byteValue() + 1; } else if (w instanceof Short) { y = h.shortValue() + 1; } else { throw new UnsupportedOperationException(); } return new Dimension<T>(w, convert.valueOf(y), convert); } public T getSize() { if (w instanceof Integer) { return convert.valueOf(w.intValue() * h.intValue()); } else if (w instanceof Float) { return convert.valueOf((float)Math.ceil(w.floatValue()) * (float)Math.ceil(h.floatValue())); } else if (w instanceof Double) { return convert.valueOf((double)Math.ceil(w.doubleValue()) * (double)Math.ceil(h.doubleValue())); } else if (w instanceof Byte) { return convert.valueOf(w.byteValue() * h.byteValue()); } else if (w instanceof Short) { return convert.valueOf(w.shortValue() * h.shortValue()); } else if (w instanceof Long) { return convert.valueOf(w.longValue() * h.longValue()); } else { throw new UnsupportedOperationException(); } } public Point<T> pointFromIndex(T index) { return pointFromIndex(index, this, null); } public Point<T> pointFromIndex(T index, Dimension<T>dim, Converter<T> convert) { if (convert == null) { convert = this.convert; } final Point<T> p; if (index instanceof Integer) { p = new Point<T>( convert.valueOf(index.intValue() % dim.w.intValue()), convert.valueOf((int)Math.floor(index.intValue() / dim.w.intValue())), convert); } else if (index instanceof Float) { p = new Point<T>( convert.valueOf(index.floatValue() % dim.w.floatValue()), convert.valueOf((float)Math.floor(index.floatValue() / dim.w.floatValue())), convert); } else if (index instanceof Double) { p = new Point<T>( convert.valueOf(index.doubleValue() % dim.w.doubleValue()), convert.valueOf((double)Math.floor(index.doubleValue() / dim.w.doubleValue())), convert); } else if (index instanceof Byte) { p = new Point<T>( convert.valueOf(index.byteValue() % dim.w.byteValue()), convert.valueOf((byte)Math.floor(index.byteValue() / dim.w.byteValue())), convert); } else if (index instanceof Short) { p = new Point<T>( convert.valueOf(index.shortValue() % dim.w.shortValue()), convert.valueOf((short)Math.floor(index.shortValue() / dim.w.shortValue())), convert); } else if (index instanceof Long) { p = new Point<T>( convert.valueOf(index.longValue() % dim.w.longValue()), convert.valueOf((long)Math.floor(index.longValue() / dim.w.longValue())), convert); } else { throw new UnsupportedOperationException(); } if (p.x.doubleValue() >= dim.w.doubleValue() || p.y.doubleValue() >= dim.h.doubleValue()) { throw new IllegalArgumentException(p.toString() + " not in "+dim); } return p; } public T indexFromPoint(Point<T> p) { return indexFromPoint(p, this); } public T indexFromPoint(Point<T> p, Dimension<T>dim) { if (p.x.doubleValue() >= dim.w.doubleValue() || p.y.doubleValue() >= dim.h.doubleValue()) { throw new IllegalArgumentException(p.toString() + " not in "+dim); } if (p.x instanceof Integer) { return convert.valueOf(p.y.intValue() * dim.w.intValue() + p.x.intValue()); } else if (p.x instanceof Float) { return convert.valueOf(Math.round(p.y.floatValue() * Math.round(dim.w.floatValue()) + p.x.floatValue())); } else if (p.x instanceof Double) { return convert.valueOf(p.y.doubleValue() * Math.round(dim.w.doubleValue()) + p.x.doubleValue()); } else if (p.x instanceof Byte) { return convert.valueOf(p.y.byteValue() * dim.w.byteValue() + p.x.byteValue()); } else if (p.x instanceof Short) { return convert.valueOf(p.y.shortValue() * dim.w.shortValue() + p.x.shortValue()); } else if (p.x instanceof Long) { return convert.valueOf(p.y.longValue() * dim.w.longValue() + p.x.longValue()); } else { throw new UnsupportedOperationException(); } } Dimension<T> clone(Dimension<Double>scale){ if (scale == null || (scale.w == 1. && scale.h == 1.)) { return new Dimension<T>(w, h, convert); } else { if (w instanceof Integer) { return new Dimension<T>( convert.valueOf( (int)(scale.w * w.intValue() )), convert.valueOf( (int)(scale.h * h.intValue() )), convert ); } else if (w instanceof Byte) { return new Dimension<T>( convert.valueOf( (byte)(scale.w * w.byteValue() )), convert.valueOf( (byte)(scale.h * h.byteValue() )), convert ); } else if (w instanceof Double) { return new Dimension<T>( convert.valueOf( (double)(scale.w * w.doubleValue() )), convert.valueOf( (double)(scale.h * h.doubleValue() )), convert ); } else if (w instanceof Float) { return new Dimension<T>( convert.valueOf( (float)(scale.w * w.floatValue() )), convert.valueOf( (float)(scale.h * h.floatValue() )), convert ); } else if (w instanceof Short) { return new Dimension<T>( convert.valueOf( (short)(scale.w * w.shortValue() )), convert.valueOf( (short)(scale.h * h.shortValue() )), convert ); } else if (w instanceof Long) { return new Dimension<T>( convert.valueOf( (long)(scale.w * w.longValue() )), convert.valueOf( (long)(scale.h * h.longValue() )), convert ); } else { throw new UnsupportedOperationException(); } } } @Override public String toString() { return "Dimension(" + this.w + "x" + this.h + ")"; } } public static final class Rectangle<T extends Number> { final Point<T> p; final Dimension<T> d; final public T x; final public T y; final public T w; final public T h; Rectangle(final Point<T> p, final Dimension<T>d) { this.p = p; this.d = d; this.x = p.x; this.y = p.y; this.w = d.w; this.h = d.h; } public Rectangle(final T x, final T y, final T w, final T h, final Converter<T> convert) { this.p = new Point<T>(x,y, convert); this.d = new Dimension<T>(w,h, convert); this.x = p.x; this.y = p.y; this.w = d.w; this.h = d.h; } } }
package com.rho.rhoelements.apd.transport; import java.io.IOException; import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.Socket; import java.util.regex.Matcher; import java.util.regex.Pattern; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.net.NetworkInfo; import android.net.NetworkInfo.DetailedState; import android.net.wifi.WifiManager; import com.rho.rhoelements.Common; import com.rho.rhoelements.ElementsCore; import com.rho.rhoelements.LogEntry; import com.rho.rhoelements.apd.ApdConfiguration; /** * @author FPC843 * Allows APD to communicate to WLan devices */ public class ApdWlan extends ApdTransport { private ConnectionStatus mConnStatus; private Socket mSocket = null; //private boolean mTimeoutFlag = false; //private static final int TIMEOUT = 15000; //Operation timeout //private Timer mOperationTimer = null; public ApdWlan(ApdConfiguration apdConfiguration) throws Exception { super(apdConfiguration); mConnStatus = ConnectionStatus.IDLE; IntentFilter filter = new IntentFilter(WifiManager.NETWORK_STATE_CHANGED_ACTION); Common.mainActivity.registerReceiver(mReceiver, filter); } /** * @see ApdTransport */ @Override public ApdTransportError open() { ApdTransportError res = ApdTransportError.OK; mConnStatus = ConnectionStatus.IDLE; //Check that the ip address and the ip port are well formed Pattern ipAddressPattern = Pattern.compile(ApdConfiguration.IP_ADDRESS_DOTS_REGEX); Matcher ipAddressMatcher = ipAddressPattern.matcher(mApdConfiguration.getIpAddress()); //Validate the ip port Pattern ipPortPattern = Pattern.compile(ApdConfiguration.PORT_REGEX); Matcher ipPortMatcher = ipPortPattern.matcher(String.valueOf(mApdConfiguration.getIpPort())); if (ipAddressMatcher.find() == ipPortMatcher.find() == false) { Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, mApdConfiguration.getIpAddress() + ":" + mApdConfiguration.getIpPort() + " is not a valid ip address")); return ApdTransportError.IOERROR; } if (mConnStatus.ordinal() < ConnectionStatus.CONNECTING.ordinal()) { try { mSocket = new Socket(); mConnStatus = ConnectionStatus.CONNECTING; showDialog(ElementsCore.APD_WAIT_DIALOG); mSocket.connect(new InetSocketAddress(mApdConfiguration.getIpAddress(), mApdConfiguration.getIpPort())); mConnStatus = ConnectionStatus.CONNECTED; Common.logger.add(new LogEntry(LogEntry.PB_LOG_INFO, "Printer connected succesfully")); } catch (IOException e) { res = ApdTransportError.IOERROR; connectionFailed(); Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, "Error with connecting to the printer")); } finally { dismissDialog(ElementsCore.APD_WAIT_DIALOG); } } return res; } /** * @see ApdTransport */ @Override public ApdTransportError write(byte[] out) { OutputStream outStream = null; // Connect to the printer first ApdTransportError res = ApdTransportError.OK; if (mConnStatus == ConnectionStatus.IDLE) res = open(); if (mConnStatus == ConnectionStatus.CONNECTED) { try { final int bufferLength = out.length; int bytesAvailable = out.length; int toSendSize = Math.min(bytesAvailable, MAX_BUFFER_SIZE); mOffset = 0; outStream = mSocket.getOutputStream(); showDialog(ElementsCore.APD_PROGRESS_DIALOG); mConnStatus = ConnectionStatus.TRANSFERRING; while (bytesAvailable > 0) { outStream.write(out, mOffset, toSendSize); outStream.flush(); bytesAvailable -= toSendSize; mOffset += toSendSize; if (sProgressEnabled) { Common.mainActivity.runOnUiThread(new Runnable() { @Override public void run() { int progress = mOffset * 100 / bufferLength; Common.elementsCore.mProgressHandler.sendEmptyMessage(progress); } }); Thread.sleep(400); //Needed to get the message delivered to the handler in the ElementsActivity } toSendSize = Math.min(bytesAvailable, MAX_BUFFER_SIZE); } } catch (IOException e) { res = ApdTransportError.IOERROR; connectionLost(); Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, "Error while sending data to the printer")); } catch (InterruptedException e) { res = ApdTransportError.IOERROR; Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, "Error while sending data to the printer")); } finally { if (outStream != null) { try { outStream.close(); } catch (IOException e) { e.printStackTrace(); } } close(); } } else { res = ApdTransportError.IOERROR; Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, "Error while sending data to the printer")); } return res; } /** * @see ApdTransport */ @Override public ApdTransportError close() { ApdTransportError res = ApdTransportError.OK; if (mSocket != null) { try { mSocket.close(); } catch (IOException e) { e.printStackTrace(); res = ApdTransportError.IOERROR; } } mSocket = null; mConnStatus = ConnectionStatus.IDLE; return res; } /** * @see ApdTransport */ @Override public void destroy() { Common.logger.add(new LogEntry(LogEntry.PB_LOG_DEBUG, null)); try { Common.mainActivity.unregisterReceiver(mReceiver); } catch (IllegalArgumentException e) { Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, "Receiver was not registered")); } close(); } /** * Indicate that the connection attempt failed and notify the UI Activity. */ private void connectionFailed() { Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, "Connection to printer failed")); close(); } /** * Indicate that the connection was lost and notify the UI Activity. */ private void connectionLost() { Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, "Connection to printer lost")); close(); } private BroadcastReceiver mReceiver = new BroadcastReceiver() { public void onReceive(Context context, Intent intent) { if (intent.getAction().equals(WifiManager.NETWORK_STATE_CHANGED_ACTION)) { NetworkInfo info = (NetworkInfo)intent.getParcelableExtra(WifiManager.EXTRA_NETWORK_INFO); Common.logger.add(new LogEntry(LogEntry.PB_LOG_ERROR, "Connection lost")); if (info.getDetailedState() == DetailedState.DISCONNECTED) { connectionLost(); } } } }; }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.singlethreader; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.pentaho.di.core.CheckResult; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.Const; import org.pentaho.di.core.ObjectLocationSpecificationMethod; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.HasRepositoryInterface; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.repository.RepositoryDirectory; import org.pentaho.di.repository.RepositoryDirectoryInterface; import org.pentaho.di.repository.RepositoryImportLocation; import org.pentaho.di.repository.RepositoryObject; import org.pentaho.di.repository.RepositoryObjectType; import org.pentaho.di.repository.StringObjectId; import org.pentaho.di.resource.ResourceDefinition; import org.pentaho.di.resource.ResourceEntry; import org.pentaho.di.resource.ResourceEntry.ResourceType; import org.pentaho.di.resource.ResourceNamingInterface; import org.pentaho.di.resource.ResourceReference; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.TransMeta.TransformationType; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.metastore.api.IMetaStore; import org.w3c.dom.Node; /** * Meta-data for the Mapping step: contains name of the (sub-)transformation to execute * * @since 22-nov-2005 * @author Matt * */ public class SingleThreaderMeta extends BaseStepMeta implements StepMetaInterface, HasRepositoryInterface { private static Class<?> PKG = SingleThreaderMeta.class; // for i18n purposes, needed by Translator2!! private String transName; private String fileName; private String directoryPath; private ObjectId transObjectId; private ObjectLocationSpecificationMethod specificationMethod; private String batchSize; private String batchTime; private String injectStep; private String retrieveStep; private boolean passingAllParameters; private String[] parameters; private String[] parameterValues; private IMetaStore metaStore; public SingleThreaderMeta() { super(); // allocate BaseStepMeta setDefault(); } public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { try { String method = XMLHandler.getTagValue( stepnode, "specification_method" ); specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode( method ); String transId = XMLHandler.getTagValue( stepnode, "trans_object_id" ); transObjectId = Const.isEmpty( transId ) ? null : new StringObjectId( transId ); transName = XMLHandler.getTagValue( stepnode, "trans_name" ); fileName = XMLHandler.getTagValue( stepnode, "filename" ); directoryPath = XMLHandler.getTagValue( stepnode, "directory_path" ); batchSize = XMLHandler.getTagValue( stepnode, "batch_size" ); batchTime = XMLHandler.getTagValue( stepnode, "batch_time" ); injectStep = XMLHandler.getTagValue( stepnode, "inject_step" ); retrieveStep = XMLHandler.getTagValue( stepnode, "retrieve_step" ); Node parametersNode = XMLHandler.getSubNode( stepnode, "parameters" ); String passAll = XMLHandler.getTagValue( parametersNode, "pass_all_parameters" ); passingAllParameters = Const.isEmpty( passAll ) || "Y".equalsIgnoreCase( passAll ); int nrParameters = XMLHandler.countNodes( parametersNode, "parameter" ); allocate( nrParameters ); for ( int i = 0; i < nrParameters; i++ ) { Node knode = XMLHandler.getSubNodeByNr( parametersNode, "parameter", i ); parameters[i] = XMLHandler.getTagValue( knode, "name" ); parameterValues[i] = XMLHandler.getTagValue( knode, "value" ); } } catch ( Exception e ) { throw new KettleXMLException( BaseMessages.getString( PKG, "SingleThreaderMeta.Exception.ErrorLoadingTransformationStepFromXML" ), e ); } } public void allocate( int nrParameters ) { parameters = new String[nrParameters]; parameterValues = new String[nrParameters]; } public Object clone() { SingleThreaderMeta retval = (SingleThreaderMeta) super.clone(); int nrParameters = parameters.length; retval.allocate( nrParameters ); System.arraycopy( parameters, 0, retval.parameters, 0, nrParameters ); System.arraycopy( parameterValues, 0, retval.parameterValues, 0, nrParameters ); return retval; } public String getXML() { StringBuilder retval = new StringBuilder( 300 ); retval.append( " " ).append( XMLHandler.addTagValue( "specification_method", specificationMethod == null ? null : specificationMethod .getCode() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "trans_object_id", transObjectId == null ? null : transObjectId.toString() ) ); // Export a little bit of extra information regarding the reference since it doesn't really matter outside the same // repository. // if ( repository != null && transObjectId != null ) { try { RepositoryObject objectInformation = repository.getObjectInformation( transObjectId, RepositoryObjectType.TRANSFORMATION ); if ( objectInformation != null ) { transName = objectInformation.getName(); directoryPath = objectInformation.getRepositoryDirectory().getPath(); } } catch ( KettleException e ) { // Ignore object reference problems. It simply means that the reference is no longer valid. } } retval.append( " " ).append( XMLHandler.addTagValue( "trans_name", transName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "filename", fileName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "directory_path", directoryPath ) ); retval.append( " " ).append( XMLHandler.addTagValue( "batch_size", batchSize ) ); retval.append( " " ).append( XMLHandler.addTagValue( "batch_time", batchTime ) ); retval.append( " " ).append( XMLHandler.addTagValue( "inject_step", injectStep ) ); retval.append( " " ).append( XMLHandler.addTagValue( "retrieve_step", retrieveStep ) ); if ( parameters != null ) { retval.append( " " ).append( XMLHandler.openTag( "parameters" ) ); retval.append( " " ).append( XMLHandler.addTagValue( "pass_all_parameters", passingAllParameters ) ); for ( int i = 0; i < parameters.length; i++ ) { // This is a better way of making the XML file than the arguments. retval.append( " " ).append( XMLHandler.openTag( "parameter" ) ); retval.append( " " ).append( XMLHandler.addTagValue( "name", parameters[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "value", parameterValues[i] ) ); retval.append( " " ).append( XMLHandler.closeTag( "parameter" ) ); } retval.append( " " ).append( XMLHandler.closeTag( "parameters" ) ); } return retval.toString(); } public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { String method = rep.getStepAttributeString( id_step, "specification_method" ); specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode( method ); String transId = rep.getStepAttributeString( id_step, "trans_object_id" ); transObjectId = Const.isEmpty( transId ) ? null : new StringObjectId( transId ); transName = rep.getStepAttributeString( id_step, "trans_name" ); fileName = rep.getStepAttributeString( id_step, "filename" ); directoryPath = rep.getStepAttributeString( id_step, "directory_path" ); batchSize = rep.getStepAttributeString( id_step, "batch_size" ); batchTime = rep.getStepAttributeString( id_step, "batch_time" ); injectStep = rep.getStepAttributeString( id_step, "inject_step" ); retrieveStep = rep.getStepAttributeString( id_step, "retrieve_step" ); // The parameters... // int parameternr = rep.countNrStepAttributes( id_step, "parameter_name" ); parameters = new String[parameternr]; parameterValues = new String[parameternr]; // Read all parameters ... for ( int a = 0; a < parameternr; a++ ) { parameters[a] = rep.getStepAttributeString( id_step, a, "parameter_name" ); parameterValues[a] = rep.getStepAttributeString( id_step, a, "parameter_value" ); } passingAllParameters = rep.getStepAttributeBoolean( id_step, 0, "pass_all_parameters", true ); } public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { rep.saveStepAttribute( id_transformation, id_step, "specification_method", specificationMethod == null ? null : specificationMethod.getCode() ); rep.saveStepAttribute( id_transformation, id_step, "trans_object_id", transObjectId == null ? null : transObjectId.toString() ); rep.saveStepAttribute( id_transformation, id_step, "filename", fileName ); rep.saveStepAttribute( id_transformation, id_step, "trans_name", transName ); rep.saveStepAttribute( id_transformation, id_step, "directory_path", directoryPath ); rep.saveStepAttribute( id_transformation, id_step, "batch_size", batchSize ); rep.saveStepAttribute( id_transformation, id_step, "batch_time", batchTime ); rep.saveStepAttribute( id_transformation, id_step, "inject_step", injectStep ); rep.saveStepAttribute( id_transformation, id_step, "retrieve_step", retrieveStep ); // The parameters... // // Save the parameters... if ( parameters != null ) { for ( int i = 0; i < parameters.length; i++ ) { rep.saveStepAttribute( id_transformation, id_step, i, "parameter_name", parameters[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "parameter_value", Const .NVL( parameterValues[i], "" ) ); } } rep.saveStepAttribute( id_transformation, id_step, "pass_all_parameters", passingAllParameters ); } public void setDefault() { specificationMethod = ObjectLocationSpecificationMethod.FILENAME; batchSize = "100"; batchTime = ""; passingAllParameters = true; parameters = new String[0]; parameterValues = new String[0]; } public void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { // First load some interesting data... // // Then see which fields get added to the row. // TransMeta mappingTransMeta = null; try { mappingTransMeta = loadSingleThreadedTransMeta( this, repository, space ); } catch ( KettleException e ) { throw new KettleStepException( BaseMessages.getString( PKG, "SingleThreaderMeta.Exception.UnableToLoadMappingTransformation" ), e ); } row.clear(); // Let's keep it simple! // if ( !Const.isEmpty( space.environmentSubstitute( retrieveStep ) ) ) { RowMetaInterface stepFields = mappingTransMeta.getStepFields( retrieveStep ); row.addRowMeta( stepFields ); } } public static final synchronized TransMeta loadSingleThreadedTransMeta( SingleThreaderMeta mappingMeta, Repository rep, VariableSpace space ) throws KettleException { return loadSingleThreadedTransMeta( mappingMeta, rep, null, space ); } public static final synchronized TransMeta loadSingleThreadedTransMeta( SingleThreaderMeta mappingMeta, Repository rep, IMetaStore metaStore, VariableSpace space ) throws KettleException { TransMeta mappingTransMeta = null; switch ( mappingMeta.getSpecificationMethod() ) { case FILENAME: String realFilename = space.environmentSubstitute( mappingMeta.getFileName() ); try { // OK, load the meta-data from file... // // Don't set internal variables: they belong to the parent thread! // mappingTransMeta = new TransMeta( realFilename, false ); mappingTransMeta.getLogChannel().logDetailed( "Loading Mapping from repository", "Mapping transformation was loaded from XML file [" + realFilename + "]" ); } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "SingleThreaderMeta.Exception.UnableToLoadMapping" ), e ); } break; case REPOSITORY_BY_NAME: String realTransname = space.environmentSubstitute( mappingMeta.getTransName() ); String realDirectory = space.environmentSubstitute( mappingMeta.getDirectoryPath() ); if ( !Const.isEmpty( realTransname ) && !Const.isEmpty( realDirectory ) && rep != null ) { RepositoryDirectoryInterface repdir = rep.findDirectory( realDirectory ); if ( repdir != null ) { try { // reads the last revision in the repository... // mappingTransMeta = rep.loadTransformation( realTransname, repdir, null, true, null ); mappingTransMeta.getLogChannel().logDetailed( "Loading Mapping from repository", "Mapping transformation [" + realTransname + "] was loaded from the repository" ); } catch ( Exception e ) { throw new KettleException( "Unable to load transformation [" + realTransname + "]", e ); } } else { throw new KettleException( BaseMessages.getString( PKG, "SingleThreaderMeta.Exception.UnableToLoadTransformation", realTransname ) + realDirectory ); } } break; case REPOSITORY_BY_REFERENCE: // Read the last revision by reference... mappingTransMeta = rep.loadTransformation( mappingMeta.getTransObjectId(), null ); break; default: break; } // Pass some important information to the mapping transformation metadata: // mappingTransMeta.copyVariablesFrom( space ); mappingTransMeta.setRepository( rep ); mappingTransMeta.setMetaStore( metaStore ); mappingTransMeta.setFilename( mappingTransMeta.getFilename() ); return mappingTransMeta; } public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { CheckResult cr; if ( prev == null || prev.size() == 0 ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_WARNING, BaseMessages.getString( PKG, "SingleThreaderMeta.CheckResult.NotReceivingAnyFields" ), stepMeta ); remarks.add( cr ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SingleThreaderMeta.CheckResult.StepReceivingFields", prev.size() + "" ), stepMeta ); remarks.add( cr ); } // See if we have input streams leading to this step! if ( input.length > 0 ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "SingleThreaderMeta.CheckResult.StepReceivingFieldsFromOtherSteps" ), stepMeta ); remarks.add( cr ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "SingleThreaderMeta.CheckResult.NoInputReceived" ), stepMeta ); remarks.add( cr ); } } public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta tr, Trans trans ) { return new SingleThreader( stepMeta, stepDataInterface, cnr, tr, trans ); } public StepDataInterface getStepData() { return new SingleThreaderData(); } /** * @return the directoryPath */ public String getDirectoryPath() { return directoryPath; } /** * @param directoryPath * the directoryPath to set */ public void setDirectoryPath( String directoryPath ) { this.directoryPath = directoryPath; } /** * @return the fileName */ public String getFileName() { return fileName; } /** * @param fileName * the fileName to set */ public void setFileName( String fileName ) { this.fileName = fileName; } /** * @return the transName */ public String getTransName() { return transName; } /** * @param transName * the transName to set */ public void setTransName( String transName ) { this.transName = transName; } @Override public List<ResourceReference> getResourceDependencies( TransMeta transMeta, StepMeta stepInfo ) { List<ResourceReference> references = new ArrayList<ResourceReference>( 5 ); String realFilename = transMeta.environmentSubstitute( fileName ); String realTransname = transMeta.environmentSubstitute( transName ); ResourceReference reference = new ResourceReference( stepInfo ); references.add( reference ); if ( !Const.isEmpty( realFilename ) ) { // Add the filename to the references, including a reference to this step // meta data. // reference.getEntries().add( new ResourceEntry( realFilename, ResourceType.ACTIONFILE ) ); } else if ( !Const.isEmpty( realTransname ) ) { // Add the filename to the references, including a reference to this step // meta data. // reference.getEntries().add( new ResourceEntry( realTransname, ResourceType.ACTIONFILE ) ); references.add( reference ); } return references; } @Override public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException { try { // Try to load the transformation from repository or file. // Modify this recursively too... // // NOTE: there is no need to clone this step because the caller is // responsible for this. // // First load the mapping metadata... // TransMeta mappingTransMeta = loadSingleThreadedTransMeta( this, repository, space ); // Also go down into the mapping transformation and export the files // there. (mapping recursively down) // String proposedNewFilename = mappingTransMeta.exportResources( mappingTransMeta, definitions, resourceNamingInterface, repository, metaStore ); // To get a relative path to it, we inject // ${Internal.Job.Filename.Directory} // String newFilename = "${" + Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY + "}/" + proposedNewFilename; // Set the correct filename inside the XML. // mappingTransMeta.setFilename( newFilename ); // exports always reside in the root directory, in case we want to turn // this into a file repository... // mappingTransMeta.setRepositoryDirectory( new RepositoryDirectory() ); // change it in the job entry // fileName = newFilename; return proposedNewFilename; } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "SingleThreaderMeta.Exception.UnableToLoadTransformation", fileName ) ); } } /** * @return the repository */ public Repository getRepository() { return repository; } /** * @param repository * the repository to set */ public void setRepository( Repository repository ) { this.repository = repository; } /** * @return the transObjectId */ public ObjectId getTransObjectId() { return transObjectId; } /** * @param transObjectId * the transObjectId to set */ public void setTransObjectId( ObjectId transObjectId ) { this.transObjectId = transObjectId; } /** * @return the specificationMethod */ public ObjectLocationSpecificationMethod getSpecificationMethod() { return specificationMethod; } /** * @param specificationMethod * the specificationMethod to set */ public void setSpecificationMethod( ObjectLocationSpecificationMethod specificationMethod ) { this.specificationMethod = specificationMethod; } public TransformationType[] getSupportedTransformationTypes() { return new TransformationType[] { TransformationType.Normal, }; } /** * @return the batchSize */ public String getBatchSize() { return batchSize; } /** * @param batchSize * the batchSize to set */ public void setBatchSize( String batchSize ) { this.batchSize = batchSize; } /** * @return the injectStep */ public String getInjectStep() { return injectStep; } /** * @param injectStep * the injectStep to set */ public void setInjectStep( String injectStep ) { this.injectStep = injectStep; } /** * @return the retrieveStep */ public String getRetrieveStep() { return retrieveStep; } /** * @param retrieveStep * the retrieveStep to set */ public void setRetrieveStep( String retrieveStep ) { this.retrieveStep = retrieveStep; } /** * @return the passingAllParameters */ public boolean isPassingAllParameters() { return passingAllParameters; } /** * @param passingAllParameters * the passingAllParameters to set */ public void setPassingAllParameters( boolean passingAllParameters ) { this.passingAllParameters = passingAllParameters; } /** * @return the parameters */ public String[] getParameters() { return parameters; } /** * @param parameters * the parameters to set */ public void setParameters( String[] parameters ) { this.parameters = parameters; } /** * @return the parameterValues */ public String[] getParameterValues() { return parameterValues; } /** * @param parameterValues * the parameterValues to set */ public void setParameterValues( String[] parameterValues ) { this.parameterValues = parameterValues; } /** * @return the batchTime */ public String getBatchTime() { return batchTime; } /** * @param batchTime * the batchTime to set */ public void setBatchTime( String batchTime ) { this.batchTime = batchTime; } @Override public boolean hasRepositoryReferences() { return specificationMethod == ObjectLocationSpecificationMethod.REPOSITORY_BY_REFERENCE; } @Override public void lookupRepositoryReferences( Repository repository ) throws KettleException { // The correct reference is stored in the trans name and directory attributes... // RepositoryDirectoryInterface repositoryDirectoryInterface = RepositoryImportLocation.getRepositoryImportLocation().findDirectory( directoryPath ); transObjectId = repository.getTransformationID( transName, repositoryDirectoryInterface ); } /** * @return The objects referenced in the step, like a mapping, a transformation, a job, ... */ public String[] getReferencedObjectDescriptions() { return new String[] { BaseMessages.getString( PKG, "SingleThreaderMeta.ReferencedObject.Description" ), }; } private boolean isTransformationDefined() { return !Const.isEmpty( fileName ) || transObjectId != null || ( !Const.isEmpty( this.directoryPath ) && !Const.isEmpty( transName ) ); } public boolean[] isReferencedObjectEnabled() { return new boolean[] { isTransformationDefined(), }; } /** * Load the referenced object * * @param meta * The metadata that references * @param index * the object index to load * @param rep * the repository * @param space * the variable space to use * @return the referenced object once loaded * @throws KettleException */ @Deprecated public Object loadReferencedObject( int index, Repository rep, VariableSpace space ) throws KettleException { return loadSingleThreadedTransMeta( this, rep, space ); } public Object loadReferencedObject( int index, Repository rep, IMetaStore metaStore, VariableSpace space ) throws KettleException { return loadSingleThreadedTransMeta( this, rep, metaStore, space ); } @Override public boolean supportsErrorHandling() { return true; } @Override public boolean excludeFromCopyDistributeVerification() { return true; } public void setMetaStore( IMetaStore metaStore ) { this.metaStore = metaStore; } public IMetaStore getMetaStore() { return metaStore; } }
package cc.mallet.fst; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.logging.Logger; import cc.mallet.optimize.Optimizable; import cc.mallet.types.FeatureSequence; import cc.mallet.types.FeatureVectorSequence; import cc.mallet.types.Instance; import cc.mallet.types.InstanceList; import cc.mallet.types.MatrixOps; import cc.mallet.util.MalletLogger; /** * Implements label likelihood gradient computations for batches of data, can be * easily parallelized. <p> * * The gradient computations are the same as that of * <tt>CRFOptimizableByLabelLikelihood</tt>. <p> * * *Note*: Expectations corresponding to each batch of data can be computed in * parallel. During gradient computation, the prior and the constraints are * incorporated into the expectations of the last batch (see * <tt>getBatchValue, getBatchValueGradient</tt>). * * *Note*: This implementation ignores instances with infinite weights (see * <tt>getExpectationValue</tt>). * * @author Gaurav Chandalia */ public class CRFOptimizableByBatchLabelLikelihood implements Optimizable.ByCombiningBatchGradient, Serializable { private static Logger logger = MalletLogger.getLogger(CRFOptimizableByBatchLabelLikelihood.class.getName()); static final double DEFAULT_GAUSSIAN_PRIOR_VARIANCE = 1.0; static final double DEFAULT_HYPERBOLIC_PRIOR_SLOPE = 0.2; static final double DEFAULT_HYPERBOLIC_PRIOR_SHARPNESS = 10.0; protected CRF crf; protected InstanceList trainingSet; // number of batches of training set protected int numBatches; // batch specific expectations protected List<CRF.Factors> expectations; // constraints over whole training set protected CRF.Factors constraints; // value and gradient for each batch, to avoid sharing protected double[] cachedValue; protected List<double[]> cachedGradient; boolean usingHyperbolicPrior = false; double gaussianPriorVariance = DEFAULT_GAUSSIAN_PRIOR_VARIANCE; double hyperbolicPriorSlope = DEFAULT_HYPERBOLIC_PRIOR_SLOPE; double hyperbolicPriorSharpness = DEFAULT_HYPERBOLIC_PRIOR_SHARPNESS; public CRFOptimizableByBatchLabelLikelihood(CRF crf, InstanceList ilist, int numBatches) { // set up this.crf = crf; this.trainingSet = ilist; this.numBatches = numBatches; cachedValue = new double[this.numBatches]; cachedGradient = new ArrayList<double[]>(this.numBatches); expectations = new ArrayList<CRF.Factors>(this.numBatches); int numFactors = crf.parameters.getNumFactors(); for (int i = 0; i < this.numBatches; ++i) { cachedGradient.add(new double[numFactors]); expectations.add(new CRF.Factors(crf.parameters)); } constraints = new CRF.Factors(crf.parameters); gatherConstraints(ilist); } /** * Set the constraints by running forward-backward with the <i>output label * sequence provided</i>, thus restricting it to only those paths that agree with * the label sequence. */ protected void gatherConstraints(InstanceList ilist) { logger.info("Gathering constraints..."); assert (constraints.structureMatches(crf.parameters)); constraints.zero(); for (Instance instance : ilist) { FeatureVectorSequence input = (FeatureVectorSequence) instance.getData(); FeatureSequence output = (FeatureSequence) instance.getTarget(); double instanceWeight = ilist.getInstanceWeight(instance); Transducer.Incrementor incrementor = instanceWeight == 1.0 ? constraints.new Incrementor() : constraints.new WeightedIncrementor(instanceWeight); new SumLatticeDefault (this.crf, input, output, incrementor); } constraints.assertNotNaNOrInfinite(); } /** * Computes log probability of a batch of training data, fill in corresponding * expectations as well */ protected double getExpectationValue(int batchIndex, int[] batchAssignments) { // Reset expectations to zero before we fill them again CRF.Factors batchExpectations = expectations.get(batchIndex); batchExpectations.zero(); // count the number of instances that have infinite weight int numInfLabeledWeight = 0; int numInfUnlabeledWeight = 0; int numInfWeight = 0; double value = 0; double unlabeledWeight, labeledWeight, weight; for (int ii = batchAssignments[0]; ii < batchAssignments[1]; ii++) { Instance instance = trainingSet.get(ii); double instanceWeight = trainingSet.getInstanceWeight(instance); FeatureVectorSequence input = (FeatureVectorSequence) instance.getData(); FeatureSequence output = (FeatureSequence) instance.getTarget(); labeledWeight = new SumLatticeDefault (this.crf, input, output, null).getTotalWeight(); if (Double.isInfinite (labeledWeight)) { ++numInfLabeledWeight; } Transducer.Incrementor incrementor = instanceWeight == 1.0 ? batchExpectations.new Incrementor() : batchExpectations.new WeightedIncrementor (instanceWeight); unlabeledWeight = new SumLatticeDefault (this.crf, input, null, incrementor).getTotalWeight(); if (Double.isInfinite (unlabeledWeight)) { ++numInfUnlabeledWeight; } // weight is log(conditional probability correct label sequence) weight = labeledWeight - unlabeledWeight; if (Double.isInfinite(weight)) { ++numInfWeight; } else { // Weights are log probabilities, and we want to return a log probability value += weight * instanceWeight; } } batchExpectations.assertNotNaNOrInfinite(); if (numInfLabeledWeight > 0 || numInfUnlabeledWeight > 0 || numInfWeight > 0) { logger.warning("Batch: " + batchIndex + ", Number of instances with:\n" + "\t -infinite labeled weight: " + numInfLabeledWeight + "\n" + "\t -infinite unlabeled weight: " + numInfUnlabeledWeight + "\n" + "\t -infinite weight: " + numInfWeight); } return value; } /** * Returns the log probability of a batch of training sequence labels and the prior over * parameters, if last batch then incorporate the prior on parameters as well. */ public double getBatchValue(int batchIndex, int[] batchAssignments) { assert(batchIndex < this.numBatches) : "Incorrect batch index: " + batchIndex + ", range(0, " + this.numBatches + ")"; assert(batchAssignments.length == 2 && batchAssignments[0] <= batchAssignments[1]) : "Invalid batch assignments: " + Arrays.toString(batchAssignments); // Get the value of all the true labels for current batch, also filling in expectations double value = getExpectationValue(batchIndex, batchAssignments); if (batchIndex == numBatches-1) { if (usingHyperbolicPrior) // Hyperbolic prior value += crf.parameters.hyberbolicPrior(hyperbolicPriorSlope, hyperbolicPriorSharpness); else // Gaussian prior value += crf.parameters.gaussianPrior(gaussianPriorVariance); } assert(!(Double.isNaN(value) || Double.isInfinite(value))) : "Label likelihood is NaN/Infinite, batchIndex: " + batchIndex + "batchAssignments: " + Arrays.toString(batchAssignments); // update cache cachedValue[batchIndex] = value; return value; } public void getBatchValueGradient(double[] buffer, int batchIndex, int[] batchAssignments) { assert(batchIndex < this.numBatches) : "Incorrect batch index: " + batchIndex + ", range(0, " + this.numBatches + ")"; assert(batchAssignments.length == 2 && batchAssignments[0] <= batchAssignments[1]) : "Invalid batch assignments: " + Arrays.toString(batchAssignments); CRF.Factors batchExpectations = expectations.get(batchIndex); if (batchIndex == numBatches-1) { // crf parameters' check has to be done only once, infinite values are allowed crf.parameters.assertNotNaN(); // factor the constraints and the prior into the expectations of last batch // Gradient = (constraints - expectations + prior) = -(expectations - constraints - prior) // The minus sign is factored in combineGradients method after all gradients are computed batchExpectations.plusEquals(constraints, -1.0); if (usingHyperbolicPrior) batchExpectations.plusEqualsHyperbolicPriorGradient(crf.parameters, -hyperbolicPriorSlope, hyperbolicPriorSharpness); else batchExpectations.plusEqualsGaussianPriorGradient(crf.parameters, -gaussianPriorVariance); batchExpectations.assertNotNaNOrInfinite(); } double[] gradient = cachedGradient.get(batchIndex); // set the cached gradient batchExpectations.getParameters(gradient); System.arraycopy(gradient, 0, buffer, 0, gradient.length); } /** * Adds gradients from all batches. <p> * <b>Note:</b> assumes buffer is already initialized. */ public void combineGradients(Collection<double[]> batchGradients, double[] buffer) { assert(buffer.length == crf.parameters.getNumFactors()) : "Incorrect buffer length: " + buffer.length + ", expected: " + crf.parameters.getNumFactors(); Arrays.fill(buffer, 0); for (double[] gradient : batchGradients) { MatrixOps.plusEquals(buffer, gradient); } // -(...) from getBatchValueGradient MatrixOps.timesEquals(buffer, -1.0); } public int getNumBatches() { return numBatches; } public void setUseHyperbolicPrior (boolean f) { usingHyperbolicPrior = f; } public void setHyperbolicPriorSlope (double p) { hyperbolicPriorSlope = p; } public void setHyperbolicPriorSharpness (double p) { hyperbolicPriorSharpness = p; } public double getUseHyperbolicPriorSlope () { return hyperbolicPriorSlope; } public double getUseHyperbolicPriorSharpness () { return hyperbolicPriorSharpness; } public void setGaussianPriorVariance (double p) { gaussianPriorVariance = p; } public double getGaussianPriorVariance () { return gaussianPriorVariance; } public int getNumParameters () {return crf.parameters.getNumFactors();} public void getParameters (double[] buffer) { crf.parameters.getParameters(buffer); } public double getParameter (int index) { return crf.parameters.getParameter(index); } public void setParameters (double [] buff) { crf.parameters.setParameters(buff); crf.weightsValueChanged(); } public void setParameter (int index, double value) { crf.parameters.setParameter(index, value); crf.weightsValueChanged(); } private static final long serialVersionUID = 1; private static final int CURRENT_SERIAL_VERSION = 0; private void writeObject (ObjectOutputStream out) throws IOException { out.writeInt (CURRENT_SERIAL_VERSION); out.writeObject(trainingSet); out.writeObject(crf); out.writeInt(numBatches); out.writeObject(cachedValue); for (double[] gradient : cachedGradient) out.writeObject(gradient); } private void readObject (ObjectInputStream in) throws IOException, ClassNotFoundException { in.readInt (); trainingSet = (InstanceList) in.readObject(); crf = (CRF)in.readObject(); numBatches = in.readInt(); cachedValue = (double[]) in.readObject(); cachedGradient = new ArrayList<double[]>(numBatches); for (int i = 0; i < numBatches; ++i) cachedGradient.set(i, (double[]) in.readObject()); } public static class Factory { public Optimizable.ByCombiningBatchGradient newCRFOptimizable (CRF crf, InstanceList trainingData, int numBatches) { return new CRFOptimizableByBatchLabelLikelihood (crf, trainingData, numBatches); } } }
package mil.dds.anet.test.resources; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.fail; import com.google.common.collect.ImmutableList; import com.graphql_java_generator.exception.GraphQLRequestExecutionException; import com.graphql_java_generator.exception.GraphQLRequestPreparationException; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.text.Collator; import java.time.Instant; import java.time.ZonedDateTime; import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Base64; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; import javax.ws.rs.ForbiddenException; import mil.dds.anet.AnetObjectEngine; import mil.dds.anet.test.client.AnetBeanList_Organization; import mil.dds.anet.test.client.AnetBeanList_Person; import mil.dds.anet.test.client.AnetBeanList_Position; import mil.dds.anet.test.client.CustomSensitiveInformation; import mil.dds.anet.test.client.CustomSensitiveInformationInput; import mil.dds.anet.test.client.Organization; import mil.dds.anet.test.client.OrganizationSearchQueryInput; import mil.dds.anet.test.client.OrganizationType; import mil.dds.anet.test.client.Person; import mil.dds.anet.test.client.PersonInput; import mil.dds.anet.test.client.PersonPositionHistory; import mil.dds.anet.test.client.PersonPositionHistoryInput; import mil.dds.anet.test.client.PersonSearchQueryInput; import mil.dds.anet.test.client.PersonSearchSortBy; import mil.dds.anet.test.client.Position; import mil.dds.anet.test.client.PositionInput; import mil.dds.anet.test.client.PositionSearchQueryInput; import mil.dds.anet.test.client.PositionType; import mil.dds.anet.test.client.RecurseStrategy; import mil.dds.anet.test.client.Role; import mil.dds.anet.test.client.SortOrder; import mil.dds.anet.test.client.Status; import mil.dds.anet.test.client.util.MutationExecutor; import mil.dds.anet.test.client.util.QueryExecutor; import mil.dds.anet.test.utils.UtilsTest; import mil.dds.anet.utils.DaoUtils; import org.junit.jupiter.api.Test; public class PersonResourceTest extends AbstractResourceTest { private static final String BIRTHDAY_FIELD = "birthday"; private static final String POLITICAL_POSITION_FIELD = "politicalPosition"; private static final String _CUSTOM_SENSITIVE_INFORMATION_FIELDS = "customSensitiveInformation { uuid customFieldName customFieldValue" + " relatedObjectType relatedObjectUuid createdAt updatedAt }"; private static final String _POSITION_FIELDS = "uuid name code type status organization { uuid }"; private static final String _PERSON_FIELDS = "uuid name status role emailAddress phoneNumber rank biography country avatar code" + " gender endOfTourDate domainUsername openIdSubject pendingVerification createdAt updatedAt" + " customFields"; public static final String PERSON_FIELDS_ONLY_HISTORY = "{ uuid previousPositions { startTime endTime position { uuid } } }"; public static final String POSITION_FIELDS = String.format("{ %s person { %s } %s }", _POSITION_FIELDS, _PERSON_FIELDS, _CUSTOM_SENSITIVE_INFORMATION_FIELDS); public static final String FIELDS = String.format("{ %s position { %s } %s }", _PERSON_FIELDS, _POSITION_FIELDS, _CUSTOM_SENSITIVE_INFORMATION_FIELDS); // 200 x 200 avatar final File DEFAULT_AVATAR = new File(PersonResourceTest.class.getResource("/assets/default_avatar.png").getFile()); @Test public void testCreatePerson() throws IOException, GraphQLRequestExecutionException, GraphQLRequestPreparationException { final Person jack = getJackJackson(); Person retPerson = jackQueryExecutor.person(FIELDS, jack.getUuid()); assertThat(retPerson).isNotNull(); assertThat(retPerson.getUuid()).isEqualTo(jack.getUuid()); final PersonInput newPersonInput = PersonInput.builder().withName("testCreatePerson Person") .withRole(Role.ADVISOR).withStatus(Status.ACTIVE) // set HTML of biography .withBiography(UtilsTest.getCombinedHtmlTestCase().getInput()) // set JSON of customFields .withCustomFields(UtilsTest.getCombinedJsonTestCase().getInput()).withGender("Female") .withCountry("Canada").withCode("123456") .withEndOfTourDate( ZonedDateTime.of(2020, 4, 1, 0, 0, 0, 0, DaoUtils.getServerNativeZoneId()).toInstant()) .build(); final Person newPerson = adminMutationExecutor.createPerson(FIELDS, newPersonInput); assertThat(newPerson).isNotNull(); assertThat(newPerson.getUuid()).isNotNull(); assertThat(newPerson.getName()).isEqualTo("testCreatePerson Person"); // check that HTML of biography is sanitized after create assertThat(newPerson.getBiography()).isEqualTo(UtilsTest.getCombinedHtmlTestCase().getOutput()); // check that JSON of customFields is sanitized after create assertThat(newPerson.getCustomFields()) .isEqualTo(UtilsTest.getCombinedJsonTestCase().getOutput()); newPerson.setName("testCreatePerson updated name"); newPerson.setCountry("The Commonwealth of Canada"); newPerson.setCode("A123456"); // update avatar byte[] fileContent = Files.readAllBytes(DEFAULT_AVATAR.toPath()); String defaultAvatarData = Base64.getEncoder().encodeToString(fileContent); newPerson.setAvatar(defaultAvatarData); // update HTML of biography newPerson.setBiography(UtilsTest.getCombinedHtmlTestCase().getInput()); // update JSON of customFields newPerson.setCustomFields(UtilsTest.getCombinedJsonTestCase().getInput()); Integer nrUpdated = adminMutationExecutor.updatePerson("", getPersonInput(newPerson)); assertThat(nrUpdated).isEqualTo(1); retPerson = jackQueryExecutor.person(FIELDS, newPerson.getUuid()); assertThat(retPerson.getName()).isEqualTo(newPerson.getName()); assertThat(retPerson.getCode()).isEqualTo(newPerson.getCode()); assertThat(retPerson.getAvatar()).isNotNull(); // check that HTML of biography is sanitized after update assertThat(retPerson.getBiography()).isEqualTo(UtilsTest.getCombinedHtmlTestCase().getOutput()); // check that JSON of customFields is sanitized after update assertThat(retPerson.getCustomFields()) .isEqualTo(UtilsTest.getCombinedJsonTestCase().getOutput()); // Test creating a person with a position already set. final OrganizationSearchQueryInput query = OrganizationSearchQueryInput.builder() .withText("EF 6").withType(OrganizationType.ADVISOR_ORG).build(); final AnetBeanList_Organization orgs = jackQueryExecutor.organizationList(getListFields("{ uuid shortName }"), query); assertThat(orgs.getList().size()).isGreaterThan(0); Organization org = orgs.getList().stream() .filter(o -> o.getShortName().equalsIgnoreCase("EF 6")).findFirst().get(); final PositionInput newPosInput = PositionInput.builder().withType(PositionType.ADVISOR) .withName("Test Position").withOrganization(getOrganizationInput(org)) .withLocation(getLocationInput(getGeneralHospital())).withStatus(Status.ACTIVE).build(); final Position newPos = adminMutationExecutor.createPosition(POSITION_FIELDS, newPosInput); assertThat(newPos).isNotNull(); assertThat(newPos.getUuid()).isNotNull(); final PersonInput newPerson2Input = PersonInput.builder().withName("Namey McNameface") .withRole(Role.ADVISOR).withStatus(Status.ACTIVE).withDomainUsername("testcreateperson") .withPosition(getPositionInput(newPos)).build(); final Person newPerson2 = adminMutationExecutor.createPerson(FIELDS, newPerson2Input); assertThat(newPerson2).isNotNull(); assertThat(newPerson2.getUuid()).isNotNull(); assertThat(newPerson2.getPosition()).isNotNull(); assertThat(newPerson2.getPosition().getUuid()).isEqualTo(newPos.getUuid()); // Change this person w/ a new position, and ensure it gets changed. final PositionInput newPos2Input = PositionInput.builder().withType(PositionType.ADVISOR) .withName("A Second Test Position").withOrganization(getOrganizationInput(org)) .withLocation(getLocationInput(getGeneralHospital())).withStatus(Status.ACTIVE).build(); final Position newPos2 = adminMutationExecutor.createPosition(POSITION_FIELDS, newPos2Input); assertThat(newPos2).isNotNull(); assertThat(newPos2.getUuid()).isNotNull(); newPerson2.setName("Changey McChangeface"); newPerson2.setPosition(newPos2); // A person cannot change their own position final MutationExecutor newPerson2MutationExecutor = getMutationExecutor(newPerson2.getDomainUsername()); try { newPerson2MutationExecutor.updatePerson("", getPersonInput(newPerson2)); fail("Expected ForbiddenException"); } catch (ForbiddenException expectedException) { } nrUpdated = adminMutationExecutor.updatePerson("", getPersonInput(newPerson2)); assertThat(nrUpdated).isEqualTo(1); retPerson = adminQueryExecutor.person(FIELDS, newPerson2.getUuid()); assertThat(retPerson).isNotNull(); assertThat(retPerson.getName()).isEqualTo(newPerson2.getName()); assertThat(retPerson.getPosition()).isNotNull(); assertThat(retPerson.getPosition().getUuid()).isEqualTo(newPos2.getUuid()); // Now newPerson2 who is a super user, should NOT be able to edit newPerson // Because they are not in newPerson2's organization. try { newPerson2MutationExecutor.updatePerson("", getPersonInput(newPerson)); fail("Expected ForbiddenException"); } catch (ForbiddenException expectedException) { } // Add some scary HTML to newPerson2's profile and ensure it gets stripped out. newPerson2.setBiography( "<b>Hello world</b>. I like script tags! <script>window.alert('hello world')</script>"); nrUpdated = adminMutationExecutor.updatePerson("", getPersonInput(newPerson2)); assertThat(nrUpdated).isEqualTo(1); retPerson = adminQueryExecutor.person(FIELDS, newPerson2.getUuid()); assertThat(retPerson.getBiography()).contains("<b>Hello world</b>"); assertThat(retPerson.getBiography()).doesNotContain("<script>window.alert"); } @Test public void searchPerson() throws GraphQLRequestExecutionException, GraphQLRequestPreparationException { PersonSearchQueryInput query = PersonSearchQueryInput.builder().withText("bob").build(); AnetBeanList_Person searchResults = jackQueryExecutor.personList(getListFields(FIELDS), query); assertThat(searchResults.getTotalCount()).isGreaterThan(0); assertThat(searchResults.getList().stream().filter(p -> p.getName().equals("BOBTOWN, Bob")) .findFirst()).isNotEmpty(); final OrganizationSearchQueryInput queryOrgs = OrganizationSearchQueryInput.builder() .withText("EF 1").withType(OrganizationType.ADVISOR_ORG).build(); final AnetBeanList_Organization orgs = jackQueryExecutor.organizationList(getListFields("{ uuid shortName }"), queryOrgs); assertThat(orgs.getList().size()).isGreaterThan(0); Organization org = orgs.getList().stream() .filter(o -> o.getShortName().equalsIgnoreCase("EF 1.1")).findFirst().get(); query.setText(null); query.setOrgUuid(org.getUuid()); searchResults = jackQueryExecutor.personList(getListFields(FIELDS), query); assertThat(searchResults.getList()).isNotEmpty(); query.setOrgUuid(null); query.setStatus(Status.INACTIVE); searchResults = jackQueryExecutor.personList(getListFields(FIELDS), query); assertThat(searchResults.getList()).isNotEmpty(); assertThat( searchResults.getList().stream().filter(p -> p.getStatus() == Status.INACTIVE).count()) .isEqualTo(searchResults.getList().size()); // Search with children orgs org = orgs.getList().stream().filter(o -> o.getShortName().equalsIgnoreCase("EF 1")).findFirst() .get(); query.setStatus(null); query.setOrgUuid(org.getUuid()); // First don't include child orgs and then increase the scope and verify results increase. final AnetBeanList_Person parentOnlyResults = jackQueryExecutor.personList(getListFields(FIELDS), query); query.setOrgRecurseStrategy(RecurseStrategy.CHILDREN); searchResults = jackQueryExecutor.personList(getListFields(FIELDS), query); assertThat(searchResults.getList()).isNotEmpty(); final Set<String> srUuids = searchResults.getList().stream().map(p -> p.getUuid()).collect(Collectors.toSet()); final Set<String> poUuids = parentOnlyResults.getList().stream().map(p -> p.getUuid()).collect(Collectors.toSet()); assertThat(srUuids).containsAll(poUuids); query.setOrgRecurseStrategy(RecurseStrategy.CHILDREN); searchResults = jackQueryExecutor.personList(getListFields(FIELDS), query); assertThat(searchResults.getList()).isNotEmpty(); query.setOrgUuid(null); query.setText(null); query.setRole(Role.ADVISOR); searchResults = jackQueryExecutor.personList(getListFields(FIELDS), query); assertThat(searchResults.getList().size()).isGreaterThan(1); query.setRole(null); query.setText("e"); query.setSortBy(PersonSearchSortBy.NAME); query.setSortOrder(SortOrder.DESC); searchResults = jackQueryExecutor.personList(getListFields(FIELDS), query); final Collator collator = Collator.getInstance(); collator.setStrength(Collator.PRIMARY); String prevName = null; for (final Person p : searchResults.getList()) { if (prevName != null) { assertThat(collator.compare(p.getName(), prevName)).isLessThanOrEqualTo(0); } prevName = p.getName(); } // Search for a person with the name "A Dvisor" query = PersonSearchQueryInput.builder().withText("Dvisor").withRole(Role.ADVISOR).build(); searchResults = jackQueryExecutor.personList(getListFields(FIELDS), query); long matchCount = searchResults.getList().stream().filter(p -> p.getName().equals("DVISOR, A")).count(); assertThat(matchCount).isEqualTo(1); // Search for same person from an autocomplete box. query.setText("Dvisor*"); searchResults = jackQueryExecutor.personList(getListFields(FIELDS), query); matchCount = searchResults.getList().stream().filter(p -> p.getName().equals("DVISOR, A")).count(); assertThat(matchCount).isEqualTo(1); // Search by email Address query.setText("hunter+arthur@example.com"); searchResults = jackQueryExecutor.personList(getListFields(FIELDS), query); matchCount = searchResults.getList().stream() .filter(p -> p.getEmailAddress().equals("hunter+arthur@example.com")).count(); assertThat(matchCount).isEqualTo(1); // TODO: should we enforce that this query returns ONLY arthur? I think not since we're using // the plus addressing for testing.. // Search for persons with biography filled query = PersonSearchQueryInput.builder().withHasBiography(true).build(); searchResults = jackQueryExecutor.personList(getListFields(FIELDS), query); assertThat(searchResults.getList()).isNotEmpty(); // Search for persons with empty biography query = PersonSearchQueryInput.builder().withHasBiography(false).build(); searchResults = jackQueryExecutor.personList(getListFields(FIELDS), query); assertThat(searchResults.getList()).isNotEmpty(); } @Test public void testInactivatePerson() throws GraphQLRequestExecutionException, GraphQLRequestPreparationException { final OrganizationSearchQueryInput query = OrganizationSearchQueryInput.builder() .withText("EF 6").withType(OrganizationType.ADVISOR_ORG).build(); final AnetBeanList_Organization orgs = jackQueryExecutor.organizationList(getListFields("{ uuid shortName }"), query); assertThat(orgs.getList().size()).isGreaterThan(0); final Organization org = orgs.getList().stream() .filter(o -> o.getShortName().equalsIgnoreCase("EF 6")).findFirst().get(); assertThat(org.getUuid()).isNotNull(); final PositionInput newPosInput = PositionInput.builder().withType(PositionType.ADVISOR) .withName("Test Position").withOrganization(getOrganizationInput(org)) .withLocation(getLocationInput(getGeneralHospital())).withStatus(Status.ACTIVE).build(); final Position retPos = adminMutationExecutor.createPosition(POSITION_FIELDS, newPosInput); assertThat(retPos).isNotNull(); assertThat(retPos.getUuid()).isNotNull(); final PersonInput newPersonInput = PersonInput.builder().withName("Namey McNameface").withRole(Role.ADVISOR) .withStatus(Status.ACTIVE).withDomainUsername("namey_" + Instant.now().toEpochMilli()) .withPosition(getPositionInput(retPos)).build(); final Person retPerson = adminMutationExecutor.createPerson(FIELDS, newPersonInput); assertThat(retPerson).isNotNull(); assertThat(retPerson.getUuid()).isNotNull(); assertThat(retPerson.getPosition()).isNotNull(); retPerson.setStatus(Status.INACTIVE); final Integer nrUpdated = adminMutationExecutor.updatePerson("", getPersonInput(retPerson)); assertThat(nrUpdated).isEqualTo(1); final Person retPerson2 = adminQueryExecutor.person(FIELDS, retPerson.getUuid()); assertThat(retPerson2.getDomainUsername()).isNull(); assertThat(retPerson2.getOpenIdSubject()).isNull(); assertThat(retPerson2.getPosition()).isNull(); } @Test public void personCreateSuperUserPermissionTest() throws GraphQLRequestExecutionException, GraphQLRequestPreparationException { createPerson(getSuperUser()); } @Test public void personCreateRegularUserPermissionTest() throws GraphQLRequestExecutionException, GraphQLRequestPreparationException { createPerson(getRegularUser()); } private void createPerson(Person user) throws GraphQLRequestExecutionException, GraphQLRequestPreparationException { final QueryExecutor userQueryExecutor = getQueryExecutor(user.getDomainUsername()); final MutationExecutor userMutationExecutor = getMutationExecutor(user.getDomainUsername()); final Position position = user.getPosition(); final boolean isSuperUser = position.getType() == PositionType.SUPER_USER; final Organization organization = position.getOrganization(); // principal final PersonInput principalInput = PersonInput.builder().withName("Namey McNameface") .withRole(Role.PRINCIPAL).withStatus(Status.ACTIVE) .withDomainUsername("namey_" + Instant.now().toEpochMilli()).build(); try { final Person p = userMutationExecutor.createPerson(FIELDS, principalInput); if (isSuperUser) { assertThat(p).isNotNull(); assertThat(p.getUuid()).isNotNull(); } else { fail("Expected ForbiddenException"); } } catch (ForbiddenException expectedException) { if (isSuperUser) { fail("Unexpected ForbiddenException"); } } // advisor with no position final PersonInput advisorNoPositionInput = PersonInput.builder().withName("Namey McNameface") .withRole(Role.ADVISOR).withStatus(Status.ACTIVE) .withDomainUsername("namey_" + Instant.now().toEpochMilli()).build(); try { final Person anp = userMutationExecutor.createPerson(FIELDS, advisorNoPositionInput); if (isSuperUser) { assertThat(anp).isNotNull(); assertThat(anp.getUuid()).isNotNull(); } else { fail("Expected ForbiddenException"); } } catch (ForbiddenException expectedException) { if (isSuperUser) { fail("Unexpected ForbiddenException"); } } // advisor with position in own organization final PositionSearchQueryInput query = PositionSearchQueryInput.builder() .withOrganizationUuid(organization.getUuid()).withIsFilled(false).build(); final AnetBeanList_Position searchObjects = userQueryExecutor.positionList(getListFields(POSITION_FIELDS), query); assertThat(searchObjects).isNotNull(); assertThat(searchObjects.getList()).isNotEmpty(); final Position freePos = searchObjects.getList().get(0); final PersonInput advisorPositionInput = PersonInput.builder().withName("Namey McNameface").withRole(Role.ADVISOR) .withStatus(Status.ACTIVE).withDomainUsername("namey_" + Instant.now().toEpochMilli()) .withPosition(getPositionInput(freePos)).build(); try { final Person ap = userMutationExecutor.createPerson(FIELDS, advisorPositionInput); if (isSuperUser) { assertThat(ap).isNotNull(); assertThat(ap.getUuid()).isNotNull(); } else { fail("Expected ForbiddenException"); } } catch (ForbiddenException expectedException) { if (isSuperUser) { fail("Unexpected ForbiddenException"); } } // advisor with position in other organization final List<PositionType> positionTypes = new ArrayList<>(); positionTypes.add(PositionType.ADVISOR); final PositionSearchQueryInput query2 = PositionSearchQueryInput.builder().withType(positionTypes).withIsFilled(false).build(); final AnetBeanList_Position searchObjects2 = userQueryExecutor.positionList(getListFields(POSITION_FIELDS), query2); assertThat(searchObjects2).isNotNull(); assertThat(searchObjects2.getList()).isNotEmpty(); final Optional<Position> foundPos2 = searchObjects2.getList().stream() .filter(p -> !organization.getUuid().equals(p.getOrganization().getUuid())).findFirst(); assertThat(foundPos2.isPresent()).isTrue(); final Position freePos2 = foundPos2.get(); final PersonInput advisorPosition2Input = PersonInput.builder().withName("Namey McNameface").withRole(Role.ADVISOR) .withStatus(Status.ACTIVE).withDomainUsername("namey_" + Instant.now().toEpochMilli()) .withPosition(getPositionInput(freePos2)).build(); try { userMutationExecutor.createPerson(FIELDS, advisorPosition2Input); fail("Expected ForbiddenException"); } catch (ForbiddenException expectedException) { } } @Test public void testReadCustomSensitiveInformation() throws GraphQLRequestExecutionException, GraphQLRequestPreparationException { // Steve already has sensitive data final String steveUuid = getSteveSteveson().getUuid(); // Elizabeth can read all sensitive data of her counterpart Steve checkSensitiveInformation(steveUuid, "elizabeth", ImmutableList.of(BIRTHDAY_FIELD, POLITICAL_POSITION_FIELD)); // Jim has no access to Steve's sensitive data checkSensitiveInformation(steveUuid, "jim", ImmutableList.of()); } @Test public void testInsertCustomSensitiveInformation() throws GraphQLRequestExecutionException, GraphQLRequestPreparationException { // Christopf has no sensitive data yet final String christopfUuid = getChristopfTopferness().getUuid(); // Admin has access to everything checkSensitiveInformationEdit(christopfUuid, adminUser, ImmutableList.of(BIRTHDAY_FIELD, POLITICAL_POSITION_FIELD), true); // Henry has access to Christopf's birthday checkSensitiveInformationEdit(christopfUuid, "henry", ImmutableList.of(BIRTHDAY_FIELD), true); // Bob has access to Christopf's politicalPosition checkSensitiveInformationEdit(christopfUuid, "bob", ImmutableList.of(POLITICAL_POSITION_FIELD), true); } @Test public void testUpdatePersonHistory() throws Exception { final OrganizationSearchQueryInput query = OrganizationSearchQueryInput.builder() .withText("EF 6").withType(OrganizationType.ADVISOR_ORG).build(); final AnetBeanList_Organization orgs = jackQueryExecutor.organizationList(getListFields("{ uuid shortName }"), query); assertThat(orgs.getList().size()).isGreaterThan(0); final Organization org = orgs.getList().stream() .filter(o -> o.getShortName().equalsIgnoreCase("EF 6")).findFirst().get(); assertThat(org.getUuid()).isNotNull(); final PersonInput persInput = PersonInput.builder().withRole(Role.ADVISOR) .withName("Test person for edit history").build(); final Person person = adminMutationExecutor.createPerson(FIELDS, persInput); assertThat(person).isNotNull(); assertThat(person.getUuid()).isNotNull(); // Create a Position final PositionInput testInput1 = PositionInput.builder().withType(PositionType.ADVISOR) .withName("Test Position for person history edit 1") .withOrganization(getOrganizationInput(org)) .withLocation(getLocationInput(getGeneralHospital())).withStatus(Status.ACTIVE).build(); final Position createdPos1 = adminMutationExecutor.createPosition(POSITION_FIELDS, testInput1); assertThat(createdPos1).isNotNull(); assertThat(createdPos1.getUuid()).isNotNull(); assertThat(createdPos1.getName()).isEqualTo(testInput1.getName()); final PositionInput posInput1 = PositionInput.builder().withUuid(createdPos1.getUuid()).build(); final PositionInput testInput2 = PositionInput.builder().withType(PositionType.ADVISOR) .withName("Test Position for person history edit 2") .withOrganization(getOrganizationInput(org)) .withLocation(getLocationInput(getGeneralHospital())).withStatus(Status.ACTIVE).build(); final Position createdPos2 = adminMutationExecutor.createPosition(POSITION_FIELDS, testInput2); assertThat(createdPos2).isNotNull(); assertThat(createdPos2.getUuid()).isNotNull(); assertThat(createdPos2.getName()).isEqualTo(testInput2.getName()); final PositionInput posInput2 = PositionInput.builder().withUuid(createdPos2.getUuid()).build(); final PersonPositionHistoryInput hist1 = PersonPositionHistoryInput.builder() .withCreatedAt(Instant.now().minus(100, ChronoUnit.DAYS)) .withStartTime(Instant.now().minus(100, ChronoUnit.DAYS)) .withEndTime(Instant.now().minus(50, ChronoUnit.DAYS)).withPosition(posInput1).build(); final PersonPositionHistoryInput hist2 = PersonPositionHistoryInput.builder().withCreatedAt(Instant.now().minus(49, ChronoUnit.DAYS)) .withStartTime(Instant.now().minus(49, ChronoUnit.DAYS)).withEndTime(Instant.now()) .withPosition(posInput2).build(); final List<PersonPositionHistoryInput> historyList = new ArrayList<>(); historyList.add(hist1); historyList.add(hist2); final PersonInput personInput = getPersonInput(person); personInput.setPreviousPositions(historyList); adminMutationExecutor.updatePersonHistory("", personInput); final Person personUpdated = adminQueryExecutor.person(PERSON_FIELDS_ONLY_HISTORY, personInput.getUuid()); assertThat(personUpdated).isNotNull(); final List<PersonPositionHistory> previousPositions = personUpdated.getPreviousPositions(); assertThat(previousPositions).isNotNull(); assertThat(previousPositions.size() == 2); } @Test public void testUpdateCustomSensitiveInformation() throws GraphQLRequestExecutionException, GraphQLRequestPreparationException { // Steve already has sensitive data final String steveUuid = getSteveSteveson().getUuid(); // Admin has access to everything checkSensitiveInformationEdit(steveUuid, adminUser, ImmutableList.of(BIRTHDAY_FIELD, POLITICAL_POSITION_FIELD), false); // Henry has access to Steve's birthday checkSensitiveInformationEdit(steveUuid, "henry", ImmutableList.of(BIRTHDAY_FIELD), false); // Bob has access to Steve's politicalPosition checkSensitiveInformationEdit(steveUuid, "bob", ImmutableList.of(POLITICAL_POSITION_FIELD), false); } private Person checkSensitiveInformation(final String personUuid, final String user, // List should be in alphabetical order final ImmutableList<String> customSensitiveFields) throws GraphQLRequestExecutionException, GraphQLRequestPreparationException { final QueryExecutor queryExecutor = getQueryExecutor(user); final int size = customSensitiveFields.size(); final Person person = queryExecutor.person(FIELDS, personUuid); assertThat(person).isNotNull(); assertThat(person.getCustomSensitiveInformation()).hasSize(size); assertThat(person.getCustomSensitiveInformation()) .allMatch(csi -> customSensitiveFields.contains(csi.getCustomFieldName())); return person; } private void checkSensitiveInformationEdit(final String personUuid, final String user, // List should be in alphabetical order final ImmutableList<String> customSensitiveFields, final boolean doInsert) throws GraphQLRequestExecutionException, GraphQLRequestPreparationException { final Person person = checkSensitiveInformation(personUuid, user, doInsert ? ImmutableList.of() : customSensitiveFields); final QueryExecutor queryExecutor = getQueryExecutor(user); final MutationExecutor mutationExecutor = getMutationExecutor(user); final int size = customSensitiveFields.size(); final PersonInput personInput = getInput(person, PersonInput.class); if (doInsert) { final List<CustomSensitiveInformationInput> csiInput = customSensitiveFields.stream() .map(csf -> CustomSensitiveInformationInput.builder().withCustomFieldName(csf) .withCustomFieldValue(getCustomFieldValue(csf, UUID.randomUUID().toString())).build()) .collect(Collectors.toList()); personInput.setCustomSensitiveInformation(csiInput); } else { personInput.getCustomSensitiveInformation().stream() .forEach(csiInput -> csiInput.setCustomFieldValue( getCustomFieldValue(csiInput.getCustomFieldName(), UUID.randomUUID().toString()))); } final Integer nrUpdated = mutationExecutor.updatePerson("", personInput); assertThat(nrUpdated).isEqualTo(1); final Person personUpdated = queryExecutor.person(FIELDS, personInput.getUuid()); assertThat(personUpdated).isNotNull(); assertThat(personUpdated.getCustomSensitiveInformation()).hasSize(size); for (int i = 0; i < size; i++) { final CustomSensitiveInformationInput csiInput = personInput.getCustomSensitiveInformation().get(i); final CustomSensitiveInformation csiUpdated = personUpdated.getCustomSensitiveInformation().get(i); if (doInsert) { assertThat(csiUpdated.getUpdatedAt()).isNotNull(); } else { assertThat(csiUpdated.getUpdatedAt()).isAfter(csiInput.getUpdatedAt()); } assertThat(csiUpdated.getCustomFieldValue()).isEqualTo(csiInput.getCustomFieldValue()); } if (doInsert) { // Delete customSensitiveInformation again final int nrDeleted = AnetObjectEngine.getInstance().getCustomSensitiveInformationDao().deleteFor(personUuid); assertThat(nrDeleted).isEqualTo(size); } else { // Restore previous values final PersonInput personInputRestore = getInput(person, PersonInput.class); personInput.getCustomSensitiveInformation().stream() .forEach(csiInput -> csiInput.setCustomFieldValue( getCustomFieldValue(csiInput.getCustomFieldName(), UUID.randomUUID().toString()))); final Integer nrUpdatedRestore = mutationExecutor.updatePerson("", personInputRestore); assertThat(nrUpdatedRestore).isEqualTo(1); } } private String getCustomFieldValue(String fieldName, String value) { return String.format("{\"%1$s\":\"%2$s\"}", fieldName, value); } @Test public void testUnauthorizedCustomSensitiveInformation() throws GraphQLRequestExecutionException, GraphQLRequestPreparationException { // Try to do some updates that are not allowed final String steveUuid = getSteveSteveson().getUuid(); // Henry only has access to Steve's birthday checkUnauthorizedSensitiveInformation(steveUuid, "henry", ImmutableList.of(POLITICAL_POSITION_FIELD)); // Bob only has access to Steve's politicalPosition checkUnauthorizedSensitiveInformation(steveUuid, "bob", ImmutableList.of(BIRTHDAY_FIELD)); } private void checkUnauthorizedSensitiveInformation(final String personUuid, final String user, // List should be in alphabetical order final ImmutableList<String> customSensitiveFields) throws GraphQLRequestExecutionException, GraphQLRequestPreparationException { final QueryExecutor queryExecutor = getQueryExecutor(user); final MutationExecutor mutationExecutor = getMutationExecutor(user); final Person person = queryExecutor.person(FIELDS, personUuid); assertThat(person).isNotNull(); assertThat(person.getCustomSensitiveInformation()) .noneMatch(csi -> customSensitiveFields.contains(csi.getCustomFieldName())); final String customFieldValue = "__UPDATE_NOT_ALLOWED__"; final PersonInput personInput = getInput(person, PersonInput.class); final List<CustomSensitiveInformationInput> csiInput = customSensitiveFields.stream() .map(csf -> CustomSensitiveInformationInput.builder().withCustomFieldName(csf) .withCustomFieldValue(getCustomFieldValue(csf, customFieldValue)).build()) .collect(Collectors.toList()); personInput.setCustomSensitiveInformation(csiInput); final Instant beforeUpdate = Instant.now(); final Integer nrUpdated = mutationExecutor.updatePerson("", personInput); assertThat(nrUpdated).isEqualTo(1); final Person personUpdated = adminQueryExecutor.person(FIELDS, personInput.getUuid()); assertThat(personUpdated).isNotNull(); assertThat(personUpdated.getCustomSensitiveInformation()) .allMatch(csi -> !customFieldValue.equals(csi.getCustomFieldValue()) && beforeUpdate.isAfter(csi.getUpdatedAt())); } @Test public void testIllegalCustomSensitiveInformation() throws GraphQLRequestExecutionException, GraphQLRequestPreparationException { // Try to do some updates that are illegal final Person person = adminQueryExecutor.person(FIELDS, getSteveSteveson().getUuid()); assertThat(person).isNotNull(); assertThat(person.getCustomSensitiveInformation()).isNotEmpty(); // Test with non-existing UUID PersonInput personInput = getInput(person, PersonInput.class); personInput.getCustomSensitiveInformation().stream() .forEach(csiInput -> csiInput.setUuid(UUID.randomUUID().toString())); checkIllegalSensitiveInformation(person, personInput, personInput); // Test with wrong customFieldName personInput = getInput(person, PersonInput.class); personInput.getCustomSensitiveInformation().stream() .forEach(csiInput -> csiInput.setCustomFieldName( BIRTHDAY_FIELD.equals(csiInput.getCustomFieldName()) ? POLITICAL_POSITION_FIELD : BIRTHDAY_FIELD)); checkIllegalSensitiveInformation(person, personInput, personInput); // Test with wrong relatedObjectUuid personInput = getInput(person, PersonInput.class); final PersonInput otherPersonInput = getInput(getNickNicholson(), PersonInput.class); otherPersonInput.setCustomSensitiveInformation(personInput.getCustomSensitiveInformation()); checkIllegalSensitiveInformation(person, otherPersonInput, personInput); final Person otherPersonUpdated = adminQueryExecutor.person(FIELDS, otherPersonInput.getUuid()); assertThat(otherPersonUpdated).isNotNull(); assertThat(otherPersonUpdated.getCustomSensitiveInformation()).isEmpty(); // Test with wrong relatedObjectType personInput = getInput(person, PersonInput.class); final PositionInput positionInput = personInput.getPosition(); positionInput.setCustomSensitiveInformation(personInput.getCustomSensitiveInformation()); final Integer nrUpdated = adminMutationExecutor.updatePosition("", positionInput); assertThat(nrUpdated).isEqualTo(1); final Position positionUpdated = adminQueryExecutor.position(POSITION_FIELDS, positionInput.getUuid()); assertThat(positionUpdated).isNotNull(); assertThat(positionUpdated.getCustomSensitiveInformation()).isEmpty(); final Person personUpdated = adminQueryExecutor.person(FIELDS, personInput.getUuid()); assertThat(personUpdated).isNotNull(); assertCsi(personUpdated.getCustomSensitiveInformation(), person.getCustomSensitiveInformation()); } private void checkIllegalSensitiveInformation(final Person person, final PersonInput personToUpdate, final PersonInput personToCheck) throws GraphQLRequestExecutionException, GraphQLRequestPreparationException { final Integer nrUpdated = adminMutationExecutor.updatePerson("", personToUpdate); assertThat(nrUpdated).isEqualTo(1); final Person personUpdated = adminQueryExecutor.person(FIELDS, personToCheck.getUuid()); assertThat(personUpdated).isNotNull(); assertCsi(personUpdated.getCustomSensitiveInformation(), person.getCustomSensitiveInformation()); } private void assertCsi(final List<CustomSensitiveInformation> csiList1, List<CustomSensitiveInformation> csiList2) { assertThat(csiList1).hasSameSizeAs(csiList2); for (int i = 0; i < csiList1.size(); i++) { final CustomSensitiveInformation csi1 = csiList1.get(i); final CustomSensitiveInformation csi2 = csiList2.get(i); assertThat(csi1.getUuid()).isEqualTo(csi2.getUuid()); assertThat(csi1.getCustomFieldName()).isEqualTo(csi2.getCustomFieldName()); assertThat(csi1.getCustomFieldValue()).isEqualTo(csi2.getCustomFieldValue()); assertThat(csi1.getRelatedObjectType()).isEqualTo(csi2.getRelatedObjectType()); assertThat(csi1.getRelatedObjectUuid()).isEqualTo(csi2.getRelatedObjectUuid()); assertThat(csi1.getCreatedAt()).isEqualTo(csi2.getCreatedAt()); assertThat(csi1.getUpdatedAt()).isEqualTo(csi2.getUpdatedAt()); } } }
/* * Copyright 2013-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.contract.verifier.builder; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.function.Function; import org.apache.commons.text.StringEscapeUtils; import org.springframework.cloud.contract.spec.ContractTemplate; import org.springframework.cloud.contract.spec.internal.BodyMatchers; import org.springframework.cloud.contract.spec.internal.ExecutionProperty; import org.springframework.cloud.contract.verifier.config.TestFramework; import org.springframework.cloud.contract.verifier.file.SingleContractMetadata; import org.springframework.cloud.contract.verifier.template.HandlebarsTemplateProcessor; import org.springframework.cloud.contract.verifier.template.TemplateProcessor; import org.springframework.cloud.contract.verifier.util.ContentType; import org.springframework.cloud.contract.verifier.util.MapConverter; import org.springframework.util.StringUtils; import static org.springframework.cloud.contract.verifier.util.ContentType.DEFINED; import static org.springframework.cloud.contract.verifier.util.ContentType.FORM; import static org.springframework.cloud.contract.verifier.util.ContentType.JSON; import static org.springframework.cloud.contract.verifier.util.ContentType.TEXT; class GenericJsonBodyThen implements Then { private final BlockBuilder blockBuilder; private final GeneratedClassMetaData generatedClassMetaData; private final BodyParser bodyParser; private final BodyAssertionLineCreator bodyAssertionLineCreator; private final TemplateProcessor templateProcessor; private final ContractTemplate contractTemplate; private final ComparisonBuilder comparisonBuilder; GenericJsonBodyThen(BlockBuilder blockBuilder, GeneratedClassMetaData metaData, BodyParser bodyParser, ComparisonBuilder comparisonBuilder) { this.blockBuilder = blockBuilder; this.bodyParser = bodyParser; this.comparisonBuilder = comparisonBuilder; this.bodyAssertionLineCreator = new BodyAssertionLineCreator(blockBuilder, metaData, this.bodyParser.byteArrayString(), this.comparisonBuilder); this.generatedClassMetaData = metaData; this.templateProcessor = new HandlebarsTemplateProcessor(); this.contractTemplate = new HandlebarsTemplateProcessor(); } @Override public MethodVisitor<Then> apply(SingleContractMetadata metadata) { BodyMatchers bodyMatchers = this.bodyParser.responseBodyMatchers(metadata); Object convertedResponseBody = this.bodyParser.convertResponseBody(metadata); ContentType contentType = metadata.getOutputTestContentType(); if (TEXT != contentType && FORM != contentType && DEFINED != contentType) { boolean dontParseStrings = contentType == JSON && convertedResponseBody instanceof Map; Function parsingClosure = dontParseStrings ? Function.identity() : MapConverter.JSON_PARSING_FUNCTION; convertedResponseBody = MapConverter.getTestSideValues(convertedResponseBody, parsingClosure); } else { convertedResponseBody = StringEscapeUtils.escapeJava(convertedResponseBody.toString()); } addJsonBodyVerification(metadata, convertedResponseBody, bodyMatchers); return this; } private void addJsonBodyVerification(SingleContractMetadata contractMetadata, Object responseBody, BodyMatchers bodyMatchers) { JsonBodyVerificationBuilder jsonBodyVerificationBuilder = new JsonBodyVerificationBuilder( this.generatedClassMetaData.configProperties.getAssertJsonSize(), this.templateProcessor, this.contractTemplate, contractMetadata.getContract(), Optional.of(this.blockBuilder.getLineEnding()), bodyParser::postProcessJsonPath); // TODO: Refactor spock from should comment out bdd blocks Object convertedResponseBody = jsonBodyVerificationBuilder.addJsonResponseBodyCheck(this.blockBuilder, responseBody, bodyMatchers, this.bodyParser.responseAsString(), this.generatedClassMetaData.configProperties.getTestFramework() != TestFramework.SPOCK); if (!(convertedResponseBody instanceof Map || convertedResponseBody instanceof List || convertedResponseBody instanceof ExecutionProperty)) { simpleTextResponseBodyCheck(contractMetadata, convertedResponseBody); } processBodyElement("", "", convertedResponseBody); } private void processBodyElement(String oldProp, String property, Object value) { String propDiff = subtract(property, oldProp); String prop = wrappedWithBracketsForDottedProp(propDiff); String mergedProp = StringUtils.hasText(property) ? oldProp + "." + prop : ""; if (value instanceof ExecutionProperty) { processBodyElement(mergedProp, (ExecutionProperty) value); } else if (value instanceof Map.Entry) { processBodyElement(mergedProp, (Map.Entry) value); } else if (value instanceof Map) { processBodyElement(mergedProp, (Map) value); } else if (value instanceof List) { processBodyElement(mergedProp, (List) value); } } private void processBodyElement(String property, ExecutionProperty exec) { this.blockBuilder.addLineWithEnding( exec.insertValue(this.bodyParser.postProcessJsonPath("parsedJson.read(\"$" + property + "\")"))); } private void processBodyElement(String property, Map.Entry entry) { processBodyElement(property, getMapKeyReferenceString(property, entry), entry.getValue()); } private void processBodyElement(String property, Map map) { map.entrySet().forEach(o -> processBodyElement(property, (Map.Entry) o)); } private void processBodyElement(String property, List list) { Iterator iterator = list.iterator(); int index = -1; while (iterator.hasNext()) { Object listElement = iterator.next(); index = index + 1; String prop = getPropertyInListString(property, index); processBodyElement(property, prop, listElement); } } private String getPropertyInListString(String property, Integer listIndex) { return property + "[" + listIndex + "]"; } private String getMapKeyReferenceString(String property, Map.Entry entry) { return provideProperJsonPathNotation(property) + "." + entry.getKey(); } private String provideProperJsonPathNotation(String property) { return property.replaceAll("(get\\(\\\\\")(.*)(\\\\\"\\))", "$2"); } private String wrappedWithBracketsForDottedProp(String key) { String remindingKey = trailingKey(key); if (remindingKey.contains(".")) { return "['" + remindingKey + "']"; } return remindingKey; } private String trailingKey(String key) { if (key.startsWith(".")) { return key.substring(1); } return key; } private String subtract(String self, String text) { int index = self.indexOf(text); if (index == -1) { return self; } int end = index + text.length(); if (self.length() > end) { return self.substring(0, index) + self.substring(end); } return self.substring(0, index); } private void simpleTextResponseBodyCheck(SingleContractMetadata metadata, Object convertedResponseBody) { this.blockBuilder.addLineWithEnding(getSimpleResponseBodyString(this.bodyParser.responseAsString())); this.bodyAssertionLineCreator.appendBodyAssertionLine(metadata, "", convertedResponseBody); this.blockBuilder.addEndingIfNotPresent(); } private String getSimpleResponseBodyString(String responseString) { return "String responseBody = " + responseString + this.blockBuilder.getLineEnding(); } @Override public boolean accept(SingleContractMetadata metadata) { ContentType outputTestContentType = metadata.getOutputTestContentType(); return JSON == outputTestContentType || mostLikelyJson(outputTestContentType, metadata); } private boolean mostLikelyJson(ContentType outputTestContentType, SingleContractMetadata metadata) { return DEFINED == outputTestContentType && metadata.evaluatesToJson(); } }
package org.openqa.selenium.server; /* * Copyright 2006 BEA, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import org.openqa.selenium.net.Urls; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import java.util.logging.Logger; /** * <p> * Manages sets of CommandQueues corresponding to windows and frames in a single browser session. * </p> * * @author nelsons */ public class FrameGroupCommandQueueSet { private static final Logger log = Logger.getLogger(FrameGroupCommandQueueSet.class.getName()); static private final Map<String, FrameGroupCommandQueueSet> queueSets = new ConcurrentHashMap<String, FrameGroupCommandQueueSet>(); static private Lock dataLock = new ReentrantLock(); // static private Condition resultArrivedOnAnyQueue = dataLock.newCondition(); /** * JavaScript expression telling where the frame is within the current window (i.e., "local" to * the current window). */ private String currentLocalFrameAddress; /** * the name of the user-level window in selenium's record-keeping. * <p/> * The initial browser window has a blank name. When a test calls waitForPopUp, that call's * argument is the window name as far as selenium is concerned. */ private String currentSeleniumWindowName; /** * combines currentSeleniumWindowName and currentLocalFrameAddress to form an address of a frame * which is unique across all windows */ private FrameAddress currentFrameAddress = null; private String currentUniqueId = null; private final Set<File> tempFilesForSession = Collections.synchronizedSet(new HashSet<File>()); private Map<String, CommandQueue> uniqueIdToCommandQueue = new ConcurrentHashMap<String, CommandQueue>(); private Map<String, Boolean> frameAddressToJustLoaded = new ConcurrentHashMap<String, Boolean>(); private int pageLoadTimeoutInMilliseconds = 30000; private AtomicInteger millisecondDelayBetweenOperations; /** * A unique string denoting a session with a browser. * <p/> * In most cases this session begins with the selenium server configuring and starting a browser * process, and ends with a selenium server killing that process. */ private final String sessionId; private final boolean proxyInjectionMode; /** * Queues which will not be used anymore, but which cannot be immediately destroyed because their * corresponding windows may still be listening. */ private Set<CommandQueue> orphanedQueues = new HashSet<CommandQueue>(); public static final String DEFAULT_LOCAL_FRAME_ADDRESS = "top"; /** * Each user-visible window group has a selenium window name. The name of the initial browser * window is "". Even if the page reloads, the JavaScript is able to determine that it is this * initial window because window.opener==null. Any window for whom window.opener!=null is a * "pop-up". */ public static final String DEFAULT_SELENIUM_WINDOW_NAME = ""; private int portDriversShouldContact; private RemoteControlConfiguration configuration; /** * The extension Javascript particular to this session. */ private String extensionJs; public FrameGroupCommandQueueSet(String sessionId, int portDriversShouldContact, RemoteControlConfiguration configuration) { this.sessionId = sessionId; this.portDriversShouldContact = portDriversShouldContact; this.configuration = configuration; this.extensionJs = ""; proxyInjectionMode = configuration.getProxyInjectionModeArg(); /* * Initialize delay, using the static CommandQueue getSpeed in order to imitate previous * behavior, wherein that static field would control the speed for all sessions. The speed for a * frame group's queues will only be changed if they're changed via this class's setSpeed(). */ millisecondDelayBetweenOperations = new AtomicInteger(CommandQueue.getSpeed()); } private String selectWindow(String seleniumWindowName) { if (!proxyInjectionMode) { String result; try { result = doCommand("selectWindow", seleniumWindowName, ""); } catch (RemoteCommandException rce) { result = rce.getMessage(); } return result; } if (seleniumWindowName == null) { seleniumWindowName = DEFAULT_SELENIUM_WINDOW_NAME; } if (seleniumWindowName.startsWith("title=")) { return selectWindowByRemoteTitle(seleniumWindowName.substring(6)); } // TODO separate name and var into separate functions if (seleniumWindowName.startsWith("name=")) { seleniumWindowName = seleniumWindowName.substring(5); return selectWindowByNameOrVar(seleniumWindowName); } if (seleniumWindowName.startsWith("var=")) { seleniumWindowName = seleniumWindowName.substring(4); return selectWindowByNameOrVar(seleniumWindowName); } // no locator prefix; try the default strategies String match = findMatchingFrameAddress(uniqueIdToCommandQueue.keySet(), seleniumWindowName, DEFAULT_LOCAL_FRAME_ADDRESS); // If we didn't find a match, try finding the frame address by window title if (match == null) { return selectWindowByRemoteTitle(seleniumWindowName); } // we found a match setCurrentFrameAddress(match); return "OK"; } private String selectWindowByNameOrVar(String seleniumWindowName) { String match = findMatchingFrameAddress(uniqueIdToCommandQueue.keySet(), seleniumWindowName, DEFAULT_LOCAL_FRAME_ADDRESS); if (match == null) { return "ERROR: could not find window " + seleniumWindowName; } setCurrentFrameAddress(match); return "OK"; } private String selectWindowByRemoteTitle(String title) { String match = null; boolean windowFound = false; for (String uniqueId : uniqueIdToCommandQueue.keySet()) { CommandQueue commandQueue = uniqueIdToCommandQueue.get(uniqueId); String windowName; try { windowName = getRemoteWindowTitle(commandQueue); } catch (WindowClosedException e) { // If the window is closed, then it can't be the window we're looking for continue; } if (windowName.equals(title)) { windowFound = true; match = uniqueId; break; } } // Return with an error if we didn't find the window if (!windowFound) { return "ERROR: could not find window " + title; } setCurrentFrameAddress(match); return "OK"; } public CommandQueue getCommandQueue() { return getCommandQueue(currentUniqueId); } /** * Retrieves a FrameGroupCommandQueueSet for the specified sessionId */ static public FrameGroupCommandQueueSet getQueueSet(String sessionId) { if (sessionId == null) { throw new NullPointerException( "sessionId should not be null; has this session been started yet?"); } FrameGroupCommandQueueSet queueSet = FrameGroupCommandQueueSet.queueSets.get(sessionId); if (queueSet == null) { throw new RuntimeException("sessionId " + sessionId + " doesn't exist; perhaps this session was already stopped?"); } return queueSet; } /** * Creates a FrameGroupCommandQueueSet for the specifed sessionId */ static public FrameGroupCommandQueueSet makeQueueSet(String sessionId, int portDriversShouldContact, RemoteControlConfiguration configuration) { synchronized (queueSets) { FrameGroupCommandQueueSet queueSet = FrameGroupCommandQueueSet.queueSets.get(sessionId); if (queueSet != null) { throw new RuntimeException("sessionId " + sessionId + " already exists"); } queueSet = new FrameGroupCommandQueueSet(sessionId, portDriversShouldContact, configuration); FrameGroupCommandQueueSet.queueSets.put(sessionId, queueSet); return queueSet; } } /** * Deletes the specified FrameGroupCommandQueueSet */ static public void clearQueueSet(String sessionId) { log.fine("clearing queue set"); FrameGroupCommandQueueSet queue = FrameGroupCommandQueueSet.queueSets.get(sessionId); if (null != queue) { queue.endOfLife(); FrameGroupCommandQueueSet.queueSets.remove(sessionId); } } public CommandQueue getCommandQueue(String uniqueId) { CommandQueue q = uniqueIdToCommandQueue.get(uniqueId); if (q == null) { log.fine("---------allocating new CommandQueue for " + uniqueId); q = new CommandQueue(sessionId, uniqueId, millisecondDelayBetweenOperations.get(), configuration); uniqueIdToCommandQueue.put(uniqueId, q); } else { log.fine("---------retrieving CommandQueue for " + uniqueId); } return uniqueIdToCommandQueue.get(uniqueId); } /** * Sets this frame group's speed, and updates all command queues to use this speed. * * @param i millisecond delay between queue operations */ protected void setSpeed(int i) { millisecondDelayBetweenOperations.set(i); for (CommandQueue queue : uniqueIdToCommandQueue.values()) { queue.setQueueDelay(i); } } /** * Returns the delay for this frame group's command queues * * @return millisecond delay between queue operations */ protected int getSpeed() { return millisecondDelayBetweenOperations.get(); } /** * Schedules the specified command to be retrieved by the next call to handle command result, and * returns the result of that command. * * @param command - the remote command verb * @param arg - the first remote argument (meaning depends on the verb) * @param value - the second remote argument * @return - the command result, defined by the remote JavaScript. "getX" style commands may * return data from the browser; other "doX" style commands may just return "OK" or an * error message. * @throws RemoteCommandException if a waitForLoad failed. */ public String doCommand(String command, String arg, String value) throws RemoteCommandException { if (proxyInjectionMode) { if ("selectFrame".equals(command)) { if ("".equals(arg)) { arg = "top"; } boolean newFrameFound = false; // DGF iterate in lexical order for testability Set<String> idSet = uniqueIdToCommandQueue.keySet(); String[] ids = idSet.toArray(new String[0]); Arrays.sort(ids); for (String uniqueId : ids) { CommandQueue frameQ = uniqueIdToCommandQueue.get(uniqueId); if (frameQ.isClosed()) { continue; } FrameAddress frameAddress = frameQ.getFrameAddress(); if (frameAddress.getWindowName().equals(currentSeleniumWindowName)) { if (queueMatchesFrameAddress(frameQ, currentLocalFrameAddress, arg)) { setCurrentFrameAddress(uniqueId); newFrameFound = true; break; } } } if (!newFrameFound) { return "ERROR: starting from frame " + currentFrameAddress + ", could not find frame " + arg; } return "OK"; } if ("selectWindow".equals(command)) { return selectWindow(arg); } if ("waitForPopUp".equals(command)) { String waitingForThisWindowName = arg; long timeoutInMilliseconds = Long.parseLong(value); String uniqueId; try { // Wait for the popup window to load, if it throws // an exception then we should simply return the // command result uniqueId = waitForLoad(waitingForThisWindowName, "top", (int) (timeoutInMilliseconds / 1000l)); // if (!result.equals("OK")) { // return result; // } } catch (RemoteCommandException ex) { return ex.getResult(); } // Return the result of selecting the frame address, not the window name setCurrentFrameAddress(uniqueId); return "OK"; } if ("waitForPageToLoad".equals(command)) { return waitForLoad(arg); } if ("waitForFrameToLoad".equals(command)) { String waitingForThisFrameName = arg; long timeoutInMilliseconds = Long.parseLong(value); String currentWindowName = getCommandQueue().getFrameAddress().getWindowName(); String result; try { result = waitForLoad(currentWindowName, waitingForThisFrameName, (int) (timeoutInMilliseconds / 1000l)); } catch (RemoteCommandException e) { return e.getMessage(); } setCurrentFrameAddress(result); return "OK"; } if ("setTimeout".equals(command)) { try { pageLoadTimeoutInMilliseconds = Integer.parseInt(arg); } catch (NumberFormatException e) { return "ERROR: setTimeout arg is not a number: " + arg; } return "OK"; } if ("getAllWindowNames".equals(command)) { return getAttributeFromAllWindows("name"); } if ("getAllWindowTitles".equals(command)) { return getAttributeFromAllWindows("document.title"); } if ("getAllWindowIds".equals(command)) { return getAttributeFromAllWindows("id"); } if ("getAttributeFromAllWindows".equals(command)) { return getAttributeFromAllWindows(arg); } // handle closed queue (the earlier commands don't care about closed queues) CommandQueue queue = getCommandQueue(); if (queue.isClosed()) { try { String uniqueId = waitForLoad(currentSeleniumWindowName, currentLocalFrameAddress, 1); setCurrentFrameAddress(uniqueId); } catch (RemoteCommandException e) { return WindowClosedException.WINDOW_CLOSED_ERROR; } } if ("open".equals(command)) { markWhetherJustLoaded(currentUniqueId, false); String t = getCommandQueue().doCommand(command, arg, value); if (!"OK".equals(t)) { return t; } return waitForLoad(pageLoadTimeoutInMilliseconds); } // strip off AndWait - in PI mode we handle this in the server rather than in core... if (command.endsWith("AndWait")) { markWhetherJustLoaded(currentUniqueId, false); command = command.substring(0, command.length() - "AndWait".length()); String t = getCommandQueue().doCommand(command, arg, value); if (!t.startsWith("OK")) { return t; } return waitForLoad(pageLoadTimeoutInMilliseconds); } } // if (proxyInjectionMode) markWhetherJustLoaded(currentUniqueId, false); return getCommandQueue().doCommand(command, arg, value); } /** * Generates a CSV string from the given string array. * * @param stringArray Array of strings to generate a CSV. */ public String getStringArrayAccessorCSV(String[] stringArray) { StringBuffer sb = new StringBuffer(); for (int i = 0; i < stringArray.length; i++) { // Obey specs for String Array accessor responses String str = stringArray[i]; // If the string contains a slash make it appear as \\ in the protocol // 1 slash in Java/regex is \\\\ str = str.replaceAll("\\\\", "\\\\\\\\"); str = str.replaceAll(",", "\\\\,"); sb.append(str); if ((i + 1) < stringArray.length) { sb.append('\\'); sb.append(','); sb.append(" "); } } return sb.toString(); } /** * Get all window attributes from the server. Since the JS in the browser cannot possibly know * about all windows. */ private String getAttributeFromAllWindows(String attributeName) { // If we're not in PI mode, send the command back to the browser. if (!proxyInjectionMode) { String result; try { result = doCommand("getAttributeFromAllWindows", "", ""); } catch (RemoteCommandException rce) { result = rce.getMessage(); } return result; } Set<String> frameAddressSet = uniqueIdToCommandQueue.keySet(); List<String> windowTitles = new ArrayList<String>(); // Find all window names in the set of frame addresses for (String uniqueId : frameAddressSet) { CommandQueue q = uniqueIdToCommandQueue.get(uniqueId); String attribute; try { attribute = getRemoteString(q, "getEval", "window." + attributeName, ""); } catch (WindowClosedException e) { continue; } windowTitles.add(attribute); } String frameAddressCSV = getStringArrayAccessorCSV(windowTitles.toArray(new String[0])); return "OK," + frameAddressCSV; } /** * Get a window title in the given CommandQueue. * * @param queue CommandQueue to get the title from. * @return Returns the title if it is found. * @throws WindowClosedException */ private String getRemoteWindowTitle(CommandQueue queue) throws WindowClosedException { return getRemoteString(queue, "getTitle", "", ""); } private String getRemoteString(CommandQueue queue, String command, String arg1, String arg2) throws WindowClosedException { String cmdResult = queue.doCommand(command, arg1, arg2); if (cmdResult == null) cmdResult = ""; if (cmdResult.startsWith("OK,")) { // Parse out and remove the OK, from the command result cmdResult = cmdResult.substring(3); return cmdResult; } if (WindowClosedException.WINDOW_CLOSED_ERROR.equals(cmdResult)) { throw new WindowClosedException(); } throw new RuntimeException("unexpected browser error from getTitle: " + cmdResult); } public String waitForLoad(long timeoutInMilliseconds) throws RemoteCommandException { final String uniqueId; int timeoutInSeconds = (int) (timeoutInMilliseconds / 1000l); if (timeoutInSeconds == 0) { timeoutInSeconds = 1; } uniqueId = waitForLoad(currentSeleniumWindowName, currentLocalFrameAddress, timeoutInSeconds); setCurrentFrameAddress(uniqueId); if (uniqueId == null) { throw new RuntimeException("uniqueId is null in waitForLoad...this should not happen."); } return "OK"; } private String waitForLoad(String timeoutInMilliseconds) throws RemoteCommandException { return waitForLoad(Long.parseLong(timeoutInMilliseconds)); } private String waitForLoad(String waitingForThisWindowName, String waitingForThisLocalFrame, int timeoutInSeconds) throws RemoteCommandException { for (String matchingFrameAddress = null; timeoutInSeconds >= 0; timeoutInSeconds--) { dataLock.lock(); try { log.fine("waiting for window '" + waitingForThisWindowName + "' local frame '" + waitingForThisLocalFrame + "' for " + timeoutInSeconds + " more secs"); matchingFrameAddress = findMatchingFrameAddress(frameAddressToJustLoaded.keySet(), waitingForThisWindowName, waitingForThisLocalFrame); if (null != matchingFrameAddress) { log.fine("wait is over: window '" + waitingForThisWindowName + "' was seen at last (" + matchingFrameAddress + ")"); /* * Remove it from the list of matching frame addresses since it just loaded. Mark whether * just loaded to aid debugging. */ markWhetherJustLoaded(matchingFrameAddress, false); return matchingFrameAddress; } waitUntilSignalOrNumSecondsPassed(resultArrivedOnAnyQueue, 1); } finally { dataLock.unlock(); } } String result = "timed out waiting for window '" + waitingForThisWindowName + "' to appear"; throw new RemoteCommandException(result, result); } /** * Waits on the condition, making sure to wait at least as many seconds as specified, unless the * condition is signaled first. * * @param condition * @param numSeconds */ protected static boolean waitUntilSignalOrNumSecondsPassed(Condition condition, int numSeconds) { boolean result = false; if (numSeconds > 0) { long now = System.currentTimeMillis(); long deadline = now + (numSeconds * 1000); while (now < deadline) { try { log.fine("waiting for condition for " + (deadline - now) + " more ms"); result = condition.await(deadline - now, TimeUnit.MILLISECONDS); log.fine("got condition? : " + result); now = deadline; } catch (InterruptedException ie) { now = System.currentTimeMillis(); } } } return result; } protected static void sleepForAtLeast(long ms) { if (ms > 0) { long now = System.currentTimeMillis(); long deadline = now + ms; while (now < deadline) { try { Thread.sleep(deadline - now); now = deadline; // terminates loop } catch (InterruptedException ie) { now = System.currentTimeMillis(); } } } } private String findMatchingFrameAddress(Set<String> uniqueIds, String windowName, String localFrame) { for (String uniqueId : uniqueIds) { if (matchesFrameAddress(uniqueId, windowName, localFrame)) { return uniqueId; } } return null; } /** * Does uniqueId point at a window that matches 'windowName'/'localFrame'? * * @param uniqueId * @param windowName * @param localFrame * @return True if the frame addressed by uniqueId is addressable by window name 'windowName' and * local frame address 'localFrame'. */ private boolean matchesFrameAddress(String uniqueId, String windowName, String localFrame) { // it's an odd selenium convention: "null" maps to the initial, main window: if (windowName == null || windowName.equals("null")) { windowName = DEFAULT_SELENIUM_WINDOW_NAME; } if (localFrame == null) { localFrame = "top"; } CommandQueue queue = uniqueIdToCommandQueue.get(uniqueId); if (queue.isClosed()) { return false; } boolean windowJustLoaded = justLoaded(uniqueId); FrameAddress frameAddress = queue.getFrameAddress(); if (frameAddress == null || !frameAddress.getLocalFrameAddress().equals(localFrame)) { return false; } // DGF Windows that have just loaded may not know their true identity if (windowJustLoaded) { String title; try { title = getRemoteWindowTitle(queue); } catch (WindowClosedException e) { return false; } markWhetherJustLoaded(uniqueId, true); if (title.equals(windowName)) { return true; } } String actualWindowName = frameAddress.getWindowName(); if (windowName.equals(actualWindowName)) { return true; } if (windowName.equals("_blank") && actualWindowName.startsWith("selenium_blank")) { // DGF the API automatically changed target="_blank" to target="selenium_blank12345" return true; } return uniqueIdToCommandQueue.get(uniqueId).isWindowPointedToByJsVariable(windowName); } /** * <p> * Accepts a command reply, and retrieves the next command to run. * </p> * * @param commandResult - the reply from the previous command, or null * @param incomingFrameAddress - frame from which the reply came * @param uniqueId * @param justLoaded * @param jsWindowNameVars * @return - the next command to run */ public RemoteCommand handleCommandResult(String commandResult, FrameAddress incomingFrameAddress, String uniqueId, boolean justLoaded, List<?> jsWindowNameVars) { CommandQueue queue = getCommandQueue(uniqueId); queue.setFrameAddress(incomingFrameAddress); if (jsWindowNameVars != null) { for (Object jsWindowNameVar : jsWindowNameVars) { queue.addJsWindowNameVar((String) jsWindowNameVar); } } if (justLoaded) { markWhetherJustLoaded(uniqueId, true); commandResult = null; } if (WindowClosedException.WINDOW_CLOSED_ERROR.equals(commandResult)) { queue.declareClosed(); return new DefaultRemoteCommand("testComplete", "", ""); } return queue.handleCommandResult(commandResult); } /** * <p> * Empty queues, and thereby wake up any threads that are hanging around. */ public void endOfLife() { removeTemporaryFiles(); for (CommandQueue frameQ : uniqueIdToCommandQueue.values()) { frameQ.endOfLife(); } } private boolean justLoaded(String uniqueId) { boolean result = false; if (null != uniqueId) { result = frameAddressToJustLoaded.containsKey(uniqueId); } return result; } private void markWhetherJustLoaded(String frameAddress, boolean justLoaded) { boolean oldState = justLoaded(frameAddress); if (oldState != justLoaded) { dataLock.lock(); try { if (justLoaded) { log.fine(frameAddress + " marked as just loaded"); frameAddressToJustLoaded.put(frameAddress, true); } else { log.fine(frameAddress + " marked as NOT just loaded"); frameAddressToJustLoaded.remove(frameAddress); } resultArrivedOnAnyQueue.signalAll(); } finally { dataLock.unlock(); } } } private void setCurrentFrameAddress(String uniqueId) { assert uniqueId != null; FrameAddress frameAddress = uniqueIdToCommandQueue.get(uniqueId).getFrameAddress(); this.currentUniqueId = uniqueId; this.currentFrameAddress = frameAddress; this.currentSeleniumWindowName = frameAddress.getWindowName(); this.currentLocalFrameAddress = frameAddress.getLocalFrameAddress(); markWhetherJustLoaded(uniqueId, false); log.fine("Current uniqueId set to " + uniqueId + ", frameAddress = " + frameAddress); } public static FrameAddress makeFrameAddress(String seleniumWindowName, String localFrameAddress) { if (seleniumWindowName == null) { // we are talking to a version of selenium core which isn't telling us the // seleniumWindowName. Set it to the default, which will be right most of // the time. seleniumWindowName = DEFAULT_SELENIUM_WINDOW_NAME; } return FrameAddress.make(seleniumWindowName, localFrameAddress); } // /** // * TODO: someone should call this // */ // public void garbageCollectOrphans() { // /** // * The list of orphaned queues was assembled in the browser session // * preceding the current one. At this point it is safe to get rid // * of them; their windows must have long since being destroyed. // */ // for (CommandQueue q : orphanedQueues) { // q.endOfLife(); // } // orphanedQueues.clear(); // } public void reset(String baseUrl) { log.fine("resetting frame group"); if (proxyInjectionMode) { // shut down all but the primary top level connection List<FrameAddress> newOrphans = new LinkedList<FrameAddress>(); for (String uniqueId : uniqueIdToCommandQueue.keySet()) { CommandQueue q = getCommandQueue(uniqueId); FrameAddress frameAddress = q.getFrameAddress(); if (frameAddress.getLocalFrameAddress().equals(DEFAULT_LOCAL_FRAME_ADDRESS) && frameAddress.getWindowName().equals(DEFAULT_SELENIUM_WINDOW_NAME)) { continue; } if (frameAddress.getLocalFrameAddress().equals(DEFAULT_LOCAL_FRAME_ADDRESS)) { log.fine("Trying to close " + frameAddress); try { q.doCommandWithoutWaitingForAResponse("close", "", ""); } catch (WindowClosedException e) { log.fine("Window was already closed"); } } orphanedQueues.add(q); newOrphans.add(frameAddress); } for (FrameAddress frameAddress : newOrphans) { uniqueIdToCommandQueue.remove(frameAddress); } } removeTemporaryFiles(); selectWindow(DEFAULT_SELENIUM_WINDOW_NAME); // String defaultUrl = "http://localhost:" StringBuilder openUrl = new StringBuilder(); if (proxyInjectionMode) { openUrl.append("http://localhost:"); openUrl.append(portDriversShouldContact); openUrl.append("/selenium-server/core/InjectedRemoteRunner.html"); } else { openUrl.append(Urls.toProtocolHostAndPort(baseUrl)); } try { doCommand("open", openUrl.toString(), ""); // will close out subframes } catch (RemoteCommandException rce) { log.fine("RemoteCommandException in reset: " + rce.getMessage()); } } protected void removeTemporaryFiles() { for (File file : tempFilesForSession) { boolean deleteSuccessful = file.delete(); if (!deleteSuccessful) { log.warning("temp file for session " + sessionId + " not deleted " + file.getAbsolutePath()); } } tempFilesForSession.clear(); } protected void addTemporaryFile(File tf) { tempFilesForSession.add(tf); } private boolean queueMatchesFrameAddress(CommandQueue queue, String localFrameAddress, String newFrameAddressExpression) { boolean result; try { result = doBooleanCommand(queue, "getWhetherThisFrameMatchFrameExpression", localFrameAddress, newFrameAddressExpression); } catch (WindowClosedException e) { return false; } return result; } private boolean doBooleanCommand(CommandQueue queue, String command, String arg1, String arg2) throws WindowClosedException { String booleanResult = queue.doCommand(command, arg1, arg2); boolean result; if ("OK,true".equals(booleanResult)) { result = true; } else if ("OK,false".equals(booleanResult)) { result = false; } else { if (WindowClosedException.WINDOW_CLOSED_ERROR.equals(booleanResult)) { throw new WindowClosedException(); } throw new RuntimeException("unexpected return " + booleanResult + " from boolean command " + command); } log.fine("doBooleancommand(" + command + "(" + arg1 + ", " + arg2 + ") -> " + result); return result; } public void setExtensionJs(String extensionJs) { this.extensionJs = extensionJs; } public String getExtensionJs() { return extensionJs; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.utils; import java.io.*; import java.lang.reflect.Field; import java.math.BigInteger; import java.net.InetAddress; import java.net.NetworkInterface; import java.net.SocketException; import java.net.URL; import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.*; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.zip.Checksum; import com.google.common.base.Joiner; import com.google.common.collect.AbstractIterator; import org.apache.cassandra.io.util.FileUtils; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.auth.IAuthenticator; import org.apache.cassandra.auth.IAuthorizer; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.db.DecoratedKey; import org.apache.cassandra.dht.IPartitioner; import org.apache.cassandra.dht.Range; import org.apache.cassandra.dht.Token; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.io.IVersionedSerializer; import org.apache.cassandra.io.util.DataOutputBuffer; import org.apache.cassandra.io.util.IAllocator; import org.apache.cassandra.net.AsyncOneResponse; import org.apache.thrift.TBase; import org.apache.thrift.TDeserializer; import org.apache.thrift.TException; import org.apache.thrift.TSerializer; import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.map.ObjectMapper; public class FBUtilities { private static final Logger logger = LoggerFactory.getLogger(FBUtilities.class); private static ObjectMapper jsonMapper = new ObjectMapper(new JsonFactory()); public static final BigInteger TWO = new BigInteger("2"); private static final String DEFAULT_TRIGGER_DIR = "triggers"; private static final String OPERATING_SYSTEM = System.getProperty("os.name").toLowerCase(); private static volatile InetAddress localInetAddress; private static volatile InetAddress broadcastInetAddress; public static int getAvailableProcessors() { if (System.getProperty("cassandra.available_processors") != null) return Integer.parseInt(System.getProperty("cassandra.available_processors")); else return Runtime.getRuntime().availableProcessors(); } private static final ThreadLocal<MessageDigest> localMD5Digest = new ThreadLocal<MessageDigest>() { @Override protected MessageDigest initialValue() { return newMessageDigest("MD5"); } @Override public MessageDigest get() { MessageDigest digest = super.get(); digest.reset(); return digest; } }; private static final ThreadLocal<Random> localRandom = new ThreadLocal<Random>() { @Override protected Random initialValue() { return new Random(); } }; public static final int MAX_UNSIGNED_SHORT = 0xFFFF; public static MessageDigest threadLocalMD5Digest() { return localMD5Digest.get(); } public static MessageDigest newMessageDigest(String algorithm) { try { return MessageDigest.getInstance(algorithm); } catch (NoSuchAlgorithmException nsae) { throw new RuntimeException("the requested digest algorithm (" + algorithm + ") is not available", nsae); } } public static Random threadLocalRandom() { return localRandom.get(); } /** * Please use getBroadcastAddress instead. You need this only when you have to listen/connect. */ public static InetAddress getLocalAddress() { if (localInetAddress == null) try { localInetAddress = DatabaseDescriptor.getListenAddress() == null ? InetAddress.getLocalHost() : DatabaseDescriptor.getListenAddress(); } catch (UnknownHostException e) { throw new RuntimeException(e); } return localInetAddress; } public static InetAddress getBroadcastAddress() { if (broadcastInetAddress == null) broadcastInetAddress = DatabaseDescriptor.getBroadcastAddress() == null ? getLocalAddress() : DatabaseDescriptor.getBroadcastAddress(); return broadcastInetAddress; } public static Collection<InetAddress> getAllLocalAddresses() { Set<InetAddress> localAddresses = new HashSet<InetAddress>(); try { Enumeration<NetworkInterface> nets = NetworkInterface.getNetworkInterfaces(); if (nets != null) { while (nets.hasMoreElements()) localAddresses.addAll(Collections.list(nets.nextElement().getInetAddresses())); } } catch (SocketException e) { throw new AssertionError(e); } return localAddresses; } /** * Given two bit arrays represented as BigIntegers, containing the given * number of significant bits, calculate a midpoint. * * @param left The left point. * @param right The right point. * @param sigbits The number of bits in the points that are significant. * @return A midpoint that will compare bitwise halfway between the params, and * a boolean representing whether a non-zero lsbit remainder was generated. */ public static Pair<BigInteger,Boolean> midpoint(BigInteger left, BigInteger right, int sigbits) { BigInteger midpoint; boolean remainder; if (left.compareTo(right) < 0) { BigInteger sum = left.add(right); remainder = sum.testBit(0); midpoint = sum.shiftRight(1); } else { BigInteger max = TWO.pow(sigbits); // wrapping case BigInteger distance = max.add(right).subtract(left); remainder = distance.testBit(0); midpoint = distance.shiftRight(1).add(left).mod(max); } return Pair.create(midpoint, remainder); } public static int compareUnsigned(byte[] bytes1, byte[] bytes2, int offset1, int offset2, int len1, int len2) { return FastByteComparisons.compareTo(bytes1, offset1, len1, bytes2, offset2, len2); } public static int compareUnsigned(byte[] bytes1, byte[] bytes2) { return compareUnsigned(bytes1, bytes2, 0, 0, bytes1.length, bytes2.length); } /** * @return The bitwise XOR of the inputs. The output will be the same length as the * longer input, but if either input is null, the output will be null. */ public static byte[] xor(byte[] left, byte[] right) { if (left == null || right == null) return null; if (left.length > right.length) { byte[] swap = left; left = right; right = swap; } // left.length is now <= right.length byte[] out = Arrays.copyOf(right, right.length); for (int i = 0; i < left.length; i++) { out[i] = (byte)((left[i] & 0xFF) ^ (right[i] & 0xFF)); } return out; } public static BigInteger hashToBigInteger(ByteBuffer data) { byte[] result = hash(data); BigInteger hash = new BigInteger(result); return hash.abs(); } public static byte[] hash(ByteBuffer... data) { MessageDigest messageDigest = localMD5Digest.get(); for (ByteBuffer block : data) { if (block.hasArray()) messageDigest.update(block.array(), block.arrayOffset() + block.position(), block.remaining()); else messageDigest.update(block.duplicate()); } return messageDigest.digest(); } @Deprecated public static void serialize(TSerializer serializer, TBase struct, DataOutput out) throws IOException { assert serializer != null; assert struct != null; assert out != null; byte[] bytes; try { bytes = serializer.serialize(struct); } catch (TException e) { throw new RuntimeException(e); } out.writeInt(bytes.length); out.write(bytes); } @Deprecated public static void deserialize(TDeserializer deserializer, TBase struct, DataInput in) throws IOException { assert deserializer != null; assert struct != null; assert in != null; byte[] bytes = new byte[in.readInt()]; in.readFully(bytes); try { deserializer.deserialize(struct, bytes); } catch (TException ex) { throw new IOException(ex); } } public static void sortSampledKeys(List<DecoratedKey> keys, Range<Token> range) { if (range.left.compareTo(range.right) >= 0) { // range wraps. have to be careful that we sort in the same order as the range to find the right midpoint. final Token right = range.right; Comparator<DecoratedKey> comparator = new Comparator<DecoratedKey>() { public int compare(DecoratedKey o1, DecoratedKey o2) { if ((right.compareTo(o1.token) < 0 && right.compareTo(o2.token) < 0) || (right.compareTo(o1.token) > 0 && right.compareTo(o2.token) > 0)) { // both tokens are on the same side of the wrap point return o1.compareTo(o2); } return o2.compareTo(o1); } }; Collections.sort(keys, comparator); } else { // unwrapped range (left < right). standard sort is all we need. Collections.sort(keys); } } public static String resourceToFile(String filename) throws ConfigurationException { ClassLoader loader = FBUtilities.class.getClassLoader(); URL scpurl = loader.getResource(filename); if (scpurl == null) throw new ConfigurationException("unable to locate " + filename); return scpurl.getFile(); } public static File cassandraTriggerDir() { File triggerDir = null; if (System.getProperty("cassandra.triggers_dir") != null) { triggerDir = new File(System.getProperty("cassandra.triggers_dir")); } else { URL confDir = FBUtilities.class.getClassLoader().getResource(DEFAULT_TRIGGER_DIR); if (confDir != null) triggerDir = new File(confDir.getFile()); } if (triggerDir == null || !triggerDir.exists()) { logger.warn("Trigger directory doesn't exist, please create it and try again."); return null; } return triggerDir; } public static String getReleaseVersionString() { InputStream in = null; try { in = FBUtilities.class.getClassLoader().getResourceAsStream("org/apache/cassandra/config/version.properties"); if (in == null) { return "Unknown"; } Properties props = new Properties(); props.load(in); return props.getProperty("CassandraVersion"); } catch (Exception e) { logger.warn("Unable to load version.properties", e); return "debug version"; } finally { FileUtils.closeQuietly(in); } } public static long timestampMicros() { // we use microsecond resolution for compatibility with other client libraries, even though // we can't actually get microsecond precision. return System.currentTimeMillis() * 1000; } public static void waitOnFutures(Iterable<Future<?>> futures) { for (Future f : futures) waitOnFuture(f); } public static <T> T waitOnFuture(Future<T> future) { try { return future.get(); } catch (ExecutionException ee) { throw new RuntimeException(ee); } catch (InterruptedException ie) { throw new AssertionError(ie); } } public static void waitOnFutures(List<AsyncOneResponse> results, long ms) throws TimeoutException { for (AsyncOneResponse result : results) result.get(ms, TimeUnit.MILLISECONDS); } public static IPartitioner newPartitioner(String partitionerClassName) throws ConfigurationException { if (!partitionerClassName.contains(".")) partitionerClassName = "org.apache.cassandra.dht." + partitionerClassName; return FBUtilities.construct(partitionerClassName, "partitioner"); } public static IAllocator newOffHeapAllocator(String offheap_allocator) throws ConfigurationException { if (!offheap_allocator.contains(".")) offheap_allocator = "org.apache.cassandra.io.util." + offheap_allocator; return FBUtilities.construct(offheap_allocator, "off-heap allocator"); } public static IAuthorizer newAuthorizer(String className) throws ConfigurationException { if (!className.contains(".")) className = "org.apache.cassandra.auth." + className; return FBUtilities.construct(className, "authorizer"); } public static IAuthenticator newAuthenticator(String className) throws ConfigurationException { if (!className.contains(".")) className = "org.apache.cassandra.auth." + className; return FBUtilities.construct(className, "authenticator"); } /** * @return The Class for the given name. * @param classname Fully qualified classname. * @param readable Descriptive noun for the role the class plays. * @throws ConfigurationException If the class cannot be found. */ public static <T> Class<T> classForName(String classname, String readable) throws ConfigurationException { try { return (Class<T>)Class.forName(classname); } catch (ClassNotFoundException e) { throw new ConfigurationException(String.format("Unable to find %s class '%s'", readable, classname)); } catch (NoClassDefFoundError e) { throw new ConfigurationException(String.format("Unable to find %s class '%s'", readable, classname)); } } /** * Constructs an instance of the given class, which must have a no-arg or default constructor. * @param classname Fully qualified classname. * @param readable Descriptive noun for the role the class plays. * @throws ConfigurationException If the class cannot be found. */ public static <T> T construct(String classname, String readable) throws ConfigurationException { Class<T> cls = FBUtilities.classForName(classname, readable); try { return cls.newInstance(); } catch (IllegalAccessException e) { throw new ConfigurationException(String.format("Default constructor for %s class '%s' is inaccessible.", readable, classname)); } catch (InstantiationException e) { throw new ConfigurationException(String.format("Cannot use abstract class '%s' as %s.", classname, readable)); } catch (Exception e) { // Catch-all because Class.newInstance() "propagates any exception thrown by the nullary constructor, including a checked exception". if (e.getCause() instanceof ConfigurationException) throw (ConfigurationException)e.getCause(); throw new ConfigurationException(String.format("Error instantiating %s class '%s'.", readable, classname), e); } } public static <T extends Comparable> SortedSet<T> singleton(T column) { return new TreeSet<T>(Arrays.asList(column)); } public static String toString(Map<?,?> map) { Joiner.MapJoiner joiner = Joiner.on(", ").withKeyValueSeparator(":"); return joiner.join(map); } /** * Used to get access to protected/private field of the specified class * @param klass - name of the class * @param fieldName - name of the field * @return Field or null on error */ public static Field getProtectedField(Class klass, String fieldName) { Field field; try { field = klass.getDeclaredField(fieldName); field.setAccessible(true); } catch (Exception e) { throw new AssertionError(e); } return field; } public static <T> CloseableIterator<T> closeableIterator(Iterator<T> iterator) { return new WrappedCloseableIterator<T>(iterator); } public static Map<String, String> fromJsonMap(String json) { try { return jsonMapper.readValue(json, Map.class); } catch (IOException e) { throw new RuntimeException(e); } } public static List<String> fromJsonList(String json) { try { return jsonMapper.readValue(json, List.class); } catch (IOException e) { throw new RuntimeException(e); } } public static String json(Object object) { try { return jsonMapper.writeValueAsString(object); } catch (IOException e) { throw new RuntimeException(e); } } /** * Starts and waits for the given @param pb to finish. * @throws java.io.IOException on non-zero exit code */ public static void exec(ProcessBuilder pb) throws IOException { Process p = pb.start(); try { int errCode = p.waitFor(); if (errCode != 0) { BufferedReader in = new BufferedReader(new InputStreamReader(p.getInputStream())); BufferedReader err = new BufferedReader(new InputStreamReader(p.getErrorStream())); StringBuilder sb = new StringBuilder(); String str; while ((str = in.readLine()) != null) sb.append(str).append(System.getProperty("line.separator")); while ((str = err.readLine()) != null) sb.append(str).append(System.getProperty("line.separator")); throw new IOException("Exception while executing the command: "+ StringUtils.join(pb.command(), " ") + ", command error Code: " + errCode + ", command output: "+ sb.toString()); } } catch (InterruptedException e) { throw new AssertionError(e); } } public static void updateChecksumInt(Checksum checksum, int v) { checksum.update((v >>> 24) & 0xFF); checksum.update((v >>> 16) & 0xFF); checksum.update((v >>> 8) & 0xFF); checksum.update((v >>> 0) & 0xFF); } private static final class WrappedCloseableIterator<T> extends AbstractIterator<T> implements CloseableIterator<T> { private final Iterator<T> source; public WrappedCloseableIterator(Iterator<T> source) { this.source = source; } protected T computeNext() { if (!source.hasNext()) return endOfData(); return source.next(); } public void close() {} } public static <T> byte[] serialize(T object, IVersionedSerializer<T> serializer, int version) { try { int size = (int) serializer.serializedSize(object, version); DataOutputBuffer buffer = new DataOutputBuffer(size); serializer.serialize(object, buffer, version); assert buffer.getLength() == size && buffer.getData().length == size : String.format("Final buffer length %s to accommodate data size of %s (predicted %s) for %s", buffer.getData().length, buffer.getLength(), size, object); return buffer.getData(); } catch (IOException e) { // We're doing in-memory serialization... throw new AssertionError(e); } } public static long copy(InputStream from, OutputStream to, long limit) throws IOException { byte[] buffer = new byte[64]; // 64 byte buffer long copied = 0; int toCopy = buffer.length; while (true) { if (limit < buffer.length + copied) toCopy = (int) (limit - copied); int sofar = from.read(buffer, 0, toCopy); if (sofar == -1) break; to.write(buffer, 0, sofar); copied += sofar; if (limit == copied) break; } return copied; } public static File getToolsOutputDirectory() { File historyDir = new File(System.getProperty("user.home"), ".cassandra"); FileUtils.createDirectory(historyDir); return historyDir; } public static boolean isUnix() { return OPERATING_SYSTEM.contains("nix") || OPERATING_SYSTEM.contains("nux") || OPERATING_SYSTEM.contains("aix"); } }
package rgms.controller; import javax.servlet.http.*; import javax.servlet.annotation.*; import java.util.logging.*; import rgms.mvc.*; import rgms.infrastructure.*; import rgms.model.*; import rgms.datacontext.*; import java.util.*; import java.io.*; /** * Servlet to handle requests for accessing a user's account. Manages redirection * if no cookie exists for user's session, logging in, registering, editing and * updating profiles and logging out. * * @author Tyler Haigh - C3182929 * @author Simon Hartcher - C3185790 * @author Josh Crompton - C3165877 * */ @WebServlet(urlPatterns = { "/account/*", "/account" }) @MultipartConfig public class AccountController extends Controller { private static Logger logger = Logger.getLogger("rgms.AccountController"); /** * Constructor for the account controller. Provides no functionality */ public AccountController() { } public static Cookie getUserCookie(HttpServletRequest req) { List<Cookie> cookies = Arrays.asList(req.getCookies()); for (Cookie c: cookies) { if (c.getName().equals("userCookie")) return c; } return null; } /** * Redirects the user to the login page if no Cookie exists for the user's session * @param req The HTTP Request * @param res The HTTP Response * @return True if the user was redirected to the login page, false otherwise */ public static boolean redirectIfNoCookie(HttpServletRequest req, HttpServletResponse res) { Session userSession = null; //Check if there is a cookie Cookie[] cookies = req.getCookies(); if (cookies != null) { for (Cookie cookie : cookies) { if (cookie.getName().equals("userCookie")) { //Create the User UserManager userMan = new UserManager(); int userId = Integer.parseInt(cookie.getValue()); User user = userMan.get(userId); //Create a session for the user userSession = new Session(false, userId, user); break; } } } if (userSession != null) { HttpSession session = req.getSession(); if (session.getAttribute("userSession") == null) session.setAttribute("userSession", userSession); return false; } else { redirectToLocal(req, res, "/account/login"); return true; } } /** * Logs the user into RGMS using a HTTP Post, or displays the login page if a * HTTP Get method was used. * * @param req The HTTP Request * @param res The HTTP Response */ public void loginAction(HttpServletRequest req, HttpServletResponse res) { Map<String, Object> viewData = new HashMap<String, Object>(); viewData.put("title", "Login"); //Check the Request method if (req.getMethod() == HttpMethod.Get) { //Check if there is a registerSuccess session object HttpSession session = req.getSession(); Object registerSuccess = session.getAttribute("registerSuccess"); if (registerSuccess != null) { viewData.put("registerSuccess", true); session.removeAttribute("registerSuccess"); } view(req, res, "/views/account/Login.jsp", viewData); } else if (req.getMethod() == HttpMethod.Post) { //User is trying to log in String userName = req.getParameter("userName"); String password = req.getParameter("password"); //Create a userSession for the user Session userSession = AuthenticationManager.login(userName, password, false); if (userSession == null) { viewData.put("loginError", true); view(req, res, "/views/account/Login.jsp", viewData); } else if (!userSession.getUser().isActive()) { viewData.put("inactiveUser", true); view(req, res, "/views/account/Login.jsp", viewData); } else { //Make a cookie for the user session Cookie loginCookie = new Cookie("userCookie", String.valueOf(userSession.getUser().getId())); loginCookie.setMaxAge(24 * 60 * 60); // 24 Hours loginCookie.setPath("/"); res.addCookie(loginCookie); //Log the user in redirectToLocal(req, res, "/home/dashboard"); return; } } } /** * Displays the Register page for a HTTP Get method, or creates an inactive * User in the database for a HTTP Post method * * @param req The HTTP Request * @param res The HTTP Response */ public void registerAction(HttpServletRequest req, HttpServletResponse res) { Map<String, Object> viewData = new HashMap<String, Object>(); viewData.put("title", "Register"); if (req.getMethod() == HttpMethod.Get) { view(req, res, "/views/account/Register.jsp", viewData); } else if (req.getMethod() == HttpMethod.Post) { //Register the user User user = new User(); user.setUserName(req.getParameter("userName")); user.setFirstName(req.getParameter("firstName")); user.setLastName(req.getParameter("lastName")); user.setStudentId(req.getParameter("studentId")); //Create the user in the database UserManager userManager = new UserManager(); userManager.createUser(user, req.getParameter("password")); //Update the User from the database user = userManager.get(user.getUserName()); //Try and authenticate the user Session userSession = AuthenticationManager.login(user.getUserName(), req.getParameter("password") , false); if (userSession == null) { //Notify their attempt was invalid viewData.put("registerError", true); view(req, res, "/views/account/Register.jsp", viewData); } else { //Notify all coordinators to approve the user NotificationManager notificationManager = new NotificationManager(); List<User> coordinators = userManager.getCoordinators(); for (User coordinator : coordinators) { Notification registerNotification = new Notification(coordinator.getId(), coordinator, "New user " + user.getFullName() + " wants to join", "/home/notifications?activate=" + user.getId()); notificationManager.createNotification(registerNotification); } //Redirect back to login page /* * This is a hack since you can't set attributes when redirecting - Tyler */ HttpSession session = req.getSession(); session.setAttribute("registerSuccess", true); redirectToLocal(req, res, "/account/login"); return; } } } /** * Displays the profile of a given user * * - Requires a userId request parameter * - Requires a cookie for the session user * * @param req The HTTP Request * @param res The HTTP Response */ public void profileAction(HttpServletRequest req, HttpServletResponse res) { //Ensure there is a cookie for the user if (redirectIfNoCookie(req, res)) return; Map<String, Object> viewData = new HashMap<String, Object>(); viewData.put("title", "Profile"); User profileUser = null; List<Group> profileUserGroups = null; try { //Initialise Manager connections UserManager um = new UserManager(); GroupManager gm = new GroupManager(); //If finding user by ID if(req.getParameter("userId") != null){ int userId = Integer.parseInt(req.getParameter("userId")); profileUser = um.get(userId); if(profileUser != null){ Logger.getLogger("").info("Showing profile for user: " + profileUser.getFullName()); profileUserGroups = gm.getAllGroups(profileUser.getId()); } } //Finding user by username else if(req.getParameter("userName") != null){ String userName = req.getParameter("userName"); profileUser = um.get(userName); } if (profileUser == null) { httpNotFound(req, res); return; } } catch (Exception e) { Logger.getLogger("").log(Level.SEVERE, "An error occurred when getting profile user", e); } //View the page viewData.put("profileUser", profileUser); viewData.put("profileUserGroups", profileUserGroups); view(req, res, "/views/account/Profile.jsp", viewData); } /** * Displays the edit profile page for the given user. * * - Requires a userId request parameter * - Requires a cookie for the session user * * @param req The HTTP Request * @param res The HTTP Response */ public void editprofileAction(HttpServletRequest req, HttpServletResponse res) { //Ensure there is a cookie for the session user if (redirectIfNoCookie(req, res)) return; Map<String, Object> viewData = new HashMap<String, Object>(); viewData.put("title", "Edit Profile"); if (req.getMethod() == HttpMethod.Get) { //get userid int userId = Integer.parseInt(req.getParameter("userId")); //get user UserManager um = new UserManager(); User profileUser = um.get(userId); viewData.put("profileUser", profileUser); view(req, res, "/views/account/EditProfile.jsp", viewData); } else { httpNotFound(req, res); return; } } /** * Displays the Edit Profile page for a HTTP Get method, and updates the user's * details for a HTTP Post method * * - Requires a cookie for the session user * - Requires userName, firstName, lastName request parameters for POST * - Requires avatar request part for POST * * @param req The HTTP Request * @param res The HTTP Response */ public void updateAction(HttpServletRequest req, HttpServletResponse res){ //Ensure there is a cookie for the session user if (redirectIfNoCookie(req, res)) return; Map<String, Object> viewData = new HashMap<String, Object>(); viewData.put("title", "Update Profile"); int userId = Integer.parseInt(req.getParameter("userId")); Logger.getLogger("").info("Updating profile of: " + userId); if (req.getMethod() == HttpMethod.Get) { view(req, res, "/views/account/EditProfile.jsp", viewData); } else if (req.getMethod() == HttpMethod.Post) { //Get the request parameters User user = new User(); user.setId(userId); user.setUserName(req.getParameter("userName")); user.setFirstName(req.getParameter("firstName")); user.setLastName(req.getParameter("lastName")); user.setDescription(req.getParameter("description")); //Process user avatar image try { Part avatar = req.getPart("avatar"); if (avatar.getSize() > 0) { //Save the image String imageRef = saveProfileImage(avatar); user.setImageReference(imageRef); logger.info("Avatar uploaded for " + userId); } else { user.setImageReference(req.getParameter("imageReference")); } } catch (Exception e) { logger.log(Level.SEVERE, "Error loading file", e); } //Update the user UserManager userManager = new UserManager(); userManager.updateUser(user); redirectToLocal(req, res, "/account/profile?userId=" + userId); return; } } /** * Saves a user's profile image to the server hard disk * * @param profileImage The request part for the image * @return A UUID string that references the image on disk */ private String saveProfileImage(Part profileImage) { try { //get random uuid String id = UUID.randomUUID().toString(); //save to disk String savePath = getServletContext().getRealPath("/Uploads/images") + "/" + id; profileImage.write(savePath); return id; } catch (Exception e) { logger.log(Level.SEVERE, "Error saving profile image", e); return null; } } /** * Logs the session out by removing their HTTP Session and Cookie * * @param req The HTTP Request * @param res The HTTP Response */ public void logoutAction(HttpServletRequest req, HttpServletResponse res) { Map<String, Object> viewData = new HashMap<String, Object>(); viewData.put("title", "Login"); Logger.getLogger("").info("Loging out"); //Find user cookie Cookie[] cookies = req.getCookies(); Cookie loginCookie = null; if (cookies != null) { for (Cookie cookie : cookies) { if (cookie.getName().equals("userCookie")) { loginCookie = cookie; break; } } } //Remove Cookie if (loginCookie != null) { loginCookie.setMaxAge(0); loginCookie.setPath("/"); res.addCookie(loginCookie); } //Remove Session HttpSession session = req.getSession(); session.removeAttribute("userSession"); redirectToLocal(req, res, "/account/login"); return; } }
package org.apache.cassandra.utils; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.NavigableSet; import java.util.Random; import java.util.TreeMap; import java.util.TreeSet; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFutureTask; import org.junit.Assert; import org.junit.Test; import com.yammer.metrics.Metrics; import com.yammer.metrics.core.Timer; import com.yammer.metrics.core.TimerContext; import com.yammer.metrics.stats.Snapshot; import edu.stanford.ppl.concurrent.SnapTreeMap; import org.apache.cassandra.concurrent.NamedThreadFactory; import org.apache.cassandra.utils.btree.BTree; import org.apache.cassandra.utils.btree.BTreeSet; // TODO : should probably lower fan-factor for tests to make them more intensive public class LongBTreeTest { private static final Timer BTREE_TIMER = Metrics.newTimer(BTree.class, "BTREE", TimeUnit.NANOSECONDS, TimeUnit.NANOSECONDS); private static final Timer TREE_TIMER = Metrics.newTimer(BTree.class, "TREE", TimeUnit.NANOSECONDS, TimeUnit.NANOSECONDS); private static final ExecutorService MODIFY = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors(), new NamedThreadFactory("MODIFY")); private static final ExecutorService COMPARE = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors(), new NamedThreadFactory("COMPARE")); static { System.setProperty("cassandra.btree.fanfactor", "4"); } @Test public void testOversizedMiddleInsert() { TreeSet<Integer> canon = new TreeSet<>(); for (int i = 0 ; i < 10000000 ; i++) canon.add(i); Object[] btree = BTree.build(Arrays.asList(Integer.MIN_VALUE, Integer.MAX_VALUE), ICMP, true, null); btree = BTree.update(btree, ICMP, canon, true); canon.add(Integer.MIN_VALUE); canon.add(Integer.MAX_VALUE); Assert.assertTrue(BTree.isWellFormed(btree, ICMP)); testEqual("Oversize", BTree.<Integer>slice(btree, true), canon.iterator()); } @Test public void testIndividualInsertsSmallOverlappingRange() throws ExecutionException, InterruptedException { testInsertions(100000000, 50, 1, 1, true); } @Test public void testBatchesSmallOverlappingRange() throws ExecutionException, InterruptedException { testInsertions(100000000, 50, 1, 5, true); } @Test public void testIndividualInsertsMediumSparseRange() throws ExecutionException, InterruptedException { testInsertions(10000000, 500, 10, 1, true); } @Test public void testBatchesMediumSparseRange() throws ExecutionException, InterruptedException { testInsertions(10000000, 500, 10, 10, true); } @Test public void testLargeBatchesLargeRange() throws ExecutionException, InterruptedException { testInsertions(10000000, 5000, 3, 100, true); } @Test public void testSlicingSmallRandomTrees() throws ExecutionException, InterruptedException { testInsertions(10000, 50, 10, 10, false); } private static void testInsertions(int totalCount, int perTestCount, int testKeyRatio, int modificationBatchSize, boolean quickEquality) throws ExecutionException, InterruptedException { int batchesPerTest = perTestCount / modificationBatchSize; int maximumRunLength = 100; int testKeyRange = perTestCount * testKeyRatio; int tests = totalCount / perTestCount; System.out.println(String.format("Performing %d tests of %d operations, with %.2f max size/key-range ratio in batches of ~%d ops", tests, perTestCount, 1 / (float) testKeyRatio, modificationBatchSize)); // if we're not doing quick-equality, we can spam with garbage for all the checks we perform, so we'll split the work into smaller chunks int chunkSize = quickEquality ? tests : (int) (100000 / Math.pow(perTestCount, 2)); for (int chunk = 0 ; chunk < tests ; chunk += chunkSize) { final List<ListenableFutureTask<List<ListenableFuture<?>>>> outer = new ArrayList<>(); for (int i = 0 ; i < chunkSize ; i++) { outer.add(doOneTestInsertions(testKeyRange, maximumRunLength, modificationBatchSize, batchesPerTest, quickEquality)); } final List<ListenableFuture<?>> inner = new ArrayList<>(); int complete = 0; int reportInterval = totalCount / 100; int lastReportAt = 0; for (ListenableFutureTask<List<ListenableFuture<?>>> f : outer) { inner.addAll(f.get()); complete += perTestCount; if (complete - lastReportAt >= reportInterval) { System.out.println(String.format("Completed %d of %d operations", (chunk * perTestCount) + complete, totalCount)); lastReportAt = complete; } } Futures.allAsList(inner).get(); } Snapshot snap = BTREE_TIMER.getSnapshot(); System.out.println(String.format("btree : %.2fns, %.2fns, %.2fns", snap.getMedian(), snap.get95thPercentile(), snap.get999thPercentile())); snap = TREE_TIMER.getSnapshot(); System.out.println(String.format("snaptree: %.2fns, %.2fns, %.2fns", snap.getMedian(), snap.get95thPercentile(), snap.get999thPercentile())); System.out.println("Done"); } private static ListenableFutureTask<List<ListenableFuture<?>>> doOneTestInsertions(final int upperBound, final int maxRunLength, final int averageModsPerIteration, final int iterations, final boolean quickEquality) { ListenableFutureTask<List<ListenableFuture<?>>> f = ListenableFutureTask.create(new Callable<List<ListenableFuture<?>>>() { @Override public List<ListenableFuture<?>> call() { final List<ListenableFuture<?>> r = new ArrayList<>(); SnapTreeMap<Integer, Integer> canon = new SnapTreeMap<>(); Object[] btree = BTree.empty(); final TreeMap<Integer, Integer> buffer = new TreeMap<>(); final Random rnd = new Random(); for (int i = 0 ; i < iterations ; i++) { buffer.clear(); int mods = (averageModsPerIteration >> 1) + 1 + rnd.nextInt(averageModsPerIteration); while (mods > 0) { int v = rnd.nextInt(upperBound); int rc = Math.max(0, Math.min(mods, maxRunLength) - 1); int c = 1 + (rc <= 0 ? 0 : rnd.nextInt(rc)); for (int j = 0 ; j < c ; j++) { buffer.put(v, v); v++; } mods -= c; } TimerContext ctxt; ctxt = TREE_TIMER.time(); canon = canon.clone(); canon.putAll(buffer); ctxt.stop(); ctxt = BTREE_TIMER.time(); btree = BTree.update(btree, ICMP, buffer.keySet(), true, null); ctxt.stop(); if (quickEquality) testEqual("", BTree.<Integer>slice(btree, true), canon.keySet().iterator()); else r.addAll(testAllSlices("RND", btree, canon.keySet())); if (!BTree.isWellFormed(btree)) System.out.println("ERROR: Not well formed"); } return r; } }); MODIFY.execute(f); return f; } @Test public void testSlicingAllSmallTrees() throws ExecutionException, InterruptedException { Object[] cur = BTree.empty(); TreeSet<Integer> canon = new TreeSet<>(); // we set FAN_FACTOR to 4, so 128 items is four levels deep, three fully populated for (int i = 0 ; i < 128 ; i++) { String id = String.format("[0..%d)", canon.size()); System.out.println("Testing " + id); Futures.allAsList(testAllSlices(id, cur, canon)).get(); cur = BTree.update(cur, ICMP, Arrays.asList(i), true, null); canon.add(i); } } static final Comparator<Integer> ICMP = new Comparator<Integer>() { @Override public int compare(Integer o1, Integer o2) { return Integer.compare(o1, o2); } }; private static List<ListenableFuture<?>> testAllSlices(String id, Object[] btree, NavigableSet<Integer> canon) { List<ListenableFuture<?>> waitFor = new ArrayList<>(); testAllSlices(id + " ASC", new BTreeSet<>(btree, ICMP), canon, true, waitFor); testAllSlices(id + " DSC", new BTreeSet<>(btree, ICMP).descendingSet(), canon.descendingSet(), false, waitFor); return waitFor; } private static void testAllSlices(String id, NavigableSet<Integer> btree, NavigableSet<Integer> canon, boolean ascending, List<ListenableFuture<?>> results) { testOneSlice(id, btree, canon, results); for (Integer lb : range(canon.size(), Integer.MIN_VALUE, ascending)) { // test head/tail sets testOneSlice(String.format("%s->[%d..)", id, lb), btree.headSet(lb, true), canon.headSet(lb, true), results); testOneSlice(String.format("%s->(%d..)", id, lb), btree.headSet(lb, false), canon.headSet(lb, false), results); testOneSlice(String.format("%s->(..%d]", id, lb), btree.tailSet(lb, true), canon.tailSet(lb, true), results); testOneSlice(String.format("%s->(..%d]", id, lb), btree.tailSet(lb, false), canon.tailSet(lb, false), results); for (Integer ub : range(canon.size(), lb, ascending)) { // test subsets testOneSlice(String.format("%s->[%d..%d]", id, lb, ub), btree.subSet(lb, true, ub, true), canon.subSet(lb, true, ub, true), results); testOneSlice(String.format("%s->(%d..%d]", id, lb, ub), btree.subSet(lb, false, ub, true), canon.subSet(lb, false, ub, true), results); testOneSlice(String.format("%s->[%d..%d)", id, lb, ub), btree.subSet(lb, true, ub, false), canon.subSet(lb, true, ub, false), results); testOneSlice(String.format("%s->(%d..%d)", id, lb, ub), btree.subSet(lb, false, ub, false), canon.subSet(lb, false, ub, false), results); } } } private static void testOneSlice(final String id, final NavigableSet<Integer> test, final NavigableSet<Integer> canon, List<ListenableFuture<?>> results) { ListenableFutureTask<?> f = ListenableFutureTask.create(new Runnable() { @Override public void run() { test(id + " Count", test.size(), canon.size()); testEqual(id, test.iterator(), canon.iterator()); testEqual(id + "->DSCI", test.descendingIterator(), canon.descendingIterator()); testEqual(id + "->DSCS", test.descendingSet().iterator(), canon.descendingSet().iterator()); testEqual(id + "->DSCS->DSCI", test.descendingSet().descendingIterator(), canon.descendingSet().descendingIterator()); } }, null); results.add(f); COMPARE.execute(f); } private static void test(String id, int test, int expect) { if (test != expect) { System.out.println(String.format("%s: Expected %d, Got %d", id, expect, test)); } } private static <V> void testEqual(String id, Iterator<V> btree, Iterator<V> canon) { while (btree.hasNext() && canon.hasNext()) { Object i = btree.next(); Object j = canon.next(); if (!i.equals(j)) System.out.println(String.format("%s: Expected %d, Got %d", id, j, i)); } while (btree.hasNext()) System.out.println(String.format("%s: Expected <Nil>, Got %d", id, btree.next())); while (canon.hasNext()) System.out.println(String.format("%s: Expected %d, Got Nil", id, canon.next())); } // should only be called on sets that range from 0->N or N->0 private static final Iterable<Integer> range(final int size, final int from, final boolean ascending) { return new Iterable<Integer>() { int cur; int delta; int end; { if (ascending) { end = size + 1; cur = from == Integer.MIN_VALUE ? -1 : from; delta = 1; } else { end = -2; cur = from == Integer.MIN_VALUE ? size : from; delta = -1; } } @Override public Iterator<Integer> iterator() { return new Iterator<Integer>() { @Override public boolean hasNext() { return cur != end; } @Override public Integer next() { Integer r = cur; cur += delta; return r; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }; } }
/** * Copyright (c) 2000-present Liferay, Inc. All rights reserved. * * This library is free software; you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License as published by the Free * Software Foundation; either version 2.1 of the License, or (at your option) * any later version. * * This library is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more * details. */ package org.oep.core.dossiermgt.model; import com.liferay.portal.kernel.bean.AutoEscape; import com.liferay.portal.kernel.exception.SystemException; import com.liferay.portal.model.BaseModel; import com.liferay.portal.model.CacheModel; import com.liferay.portal.model.StagedModel; import com.liferay.portal.service.ServiceContext; import com.liferay.portlet.expando.model.ExpandoBridge; import java.io.Serializable; import java.util.Date; /** * The base model interface for the DocFile service. Represents a row in the &quot;oep_dossiermgt_docfile&quot; database table, with each column mapped to a property of this class. * * <p> * This interface and its corresponding implementation {@link org.oep.core.dossiermgt.model.impl.DocFileModelImpl} exist only as a container for the default property accessors generated by ServiceBuilder. Helper methods and all application logic should be put in {@link org.oep.core.dossiermgt.model.impl.DocFileImpl}. * </p> * * @author trungdk * @see DocFile * @see org.oep.core.dossiermgt.model.impl.DocFileImpl * @see org.oep.core.dossiermgt.model.impl.DocFileModelImpl * @generated */ public interface DocFileModel extends BaseModel<DocFile>, StagedModel { /* * NOTE FOR DEVELOPERS: * * Never modify or reference this interface directly. All methods that expect a doc file model instance should use the {@link DocFile} interface instead. */ /** * Returns the primary key of this doc file. * * @return the primary key of this doc file */ public long getPrimaryKey(); /** * Sets the primary key of this doc file. * * @param primaryKey the primary key of this doc file */ public void setPrimaryKey(long primaryKey); /** * Returns the uuid of this doc file. * * @return the uuid of this doc file */ @AutoEscape @Override public String getUuid(); /** * Sets the uuid of this doc file. * * @param uuid the uuid of this doc file */ @Override public void setUuid(String uuid); /** * Returns the doc file ID of this doc file. * * @return the doc file ID of this doc file */ public long getDocFileId(); /** * Sets the doc file ID of this doc file. * * @param docFileId the doc file ID of this doc file */ public void setDocFileId(long docFileId); /** * Returns the user ID of this doc file. * * @return the user ID of this doc file */ public long getUserId(); /** * Sets the user ID of this doc file. * * @param userId the user ID of this doc file */ public void setUserId(long userId); /** * Returns the user uuid of this doc file. * * @return the user uuid of this doc file * @throws SystemException if a system exception occurred */ public String getUserUuid() throws SystemException; /** * Sets the user uuid of this doc file. * * @param userUuid the user uuid of this doc file */ public void setUserUuid(String userUuid); /** * Returns the group ID of this doc file. * * @return the group ID of this doc file */ public long getGroupId(); /** * Sets the group ID of this doc file. * * @param groupId the group ID of this doc file */ public void setGroupId(long groupId); /** * Returns the company ID of this doc file. * * @return the company ID of this doc file */ @Override public long getCompanyId(); /** * Sets the company ID of this doc file. * * @param companyId the company ID of this doc file */ @Override public void setCompanyId(long companyId); /** * Returns the create date of this doc file. * * @return the create date of this doc file */ @Override public Date getCreateDate(); /** * Sets the create date of this doc file. * * @param createDate the create date of this doc file */ @Override public void setCreateDate(Date createDate); /** * Returns the modified date of this doc file. * * @return the modified date of this doc file */ @Override public Date getModifiedDate(); /** * Sets the modified date of this doc file. * * @param modifiedDate the modified date of this doc file */ @Override public void setModifiedDate(Date modifiedDate); /** * Returns the dossier ID of this doc file. * * @return the dossier ID of this doc file */ public long getDossierId(); /** * Sets the dossier ID of this doc file. * * @param dossierId the dossier ID of this doc file */ public void setDossierId(long dossierId); /** * Returns the dossier doc ID of this doc file. * * @return the dossier doc ID of this doc file */ public long getDossierDocId(); /** * Sets the dossier doc ID of this doc file. * * @param dossierDocId the dossier doc ID of this doc file */ public void setDossierDocId(long dossierDocId); /** * Returns the doc template ID of this doc file. * * @return the doc template ID of this doc file */ public long getDocTemplateId(); /** * Sets the doc template ID of this doc file. * * @param docTemplateId the doc template ID of this doc file */ public void setDocTemplateId(long docTemplateId); /** * Returns the doc file version ID of this doc file. * * @return the doc file version ID of this doc file */ public long getDocFileVersionId(); /** * Sets the doc file version ID of this doc file. * * @param docFileVersionId the doc file version ID of this doc file */ public void setDocFileVersionId(long docFileVersionId); /** * Returns the doc file name of this doc file. * * @return the doc file name of this doc file */ @AutoEscape public String getDocFileName(); /** * Sets the doc file name of this doc file. * * @param docFileName the doc file name of this doc file */ public void setDocFileName(String docFileName); /** * Returns the doc file type of this doc file. * * @return the doc file type of this doc file */ public long getDocFileType(); /** * Sets the doc file type of this doc file. * * @param docFileType the doc file type of this doc file */ public void setDocFileType(long docFileType); /** * Returns the verify status of this doc file. * * @return the verify status of this doc file */ public int getVerifyStatus(); /** * Sets the verify status of this doc file. * * @param verifyStatus the verify status of this doc file */ public void setVerifyStatus(int verifyStatus); /** * Returns the note of this doc file. * * @return the note of this doc file */ @AutoEscape public String getNote(); /** * Sets the note of this doc file. * * @param note the note of this doc file */ public void setNote(String note); /** * Returns the approve by of this doc file. * * @return the approve by of this doc file */ @AutoEscape public String getApproveBy(); /** * Sets the approve by of this doc file. * * @param approveBy the approve by of this doc file */ public void setApproveBy(String approveBy); /** * Returns the approve date of this doc file. * * @return the approve date of this doc file */ public Date getApproveDate(); /** * Sets the approve date of this doc file. * * @param approveDate the approve date of this doc file */ public void setApproveDate(Date approveDate); /** * Returns the premier of this doc file. * * @return the premier of this doc file */ public int getPremier(); /** * Sets the premier of this doc file. * * @param premier the premier of this doc file */ public void setPremier(int premier); @Override public boolean isNew(); @Override public void setNew(boolean n); @Override public boolean isCachedModel(); @Override public void setCachedModel(boolean cachedModel); @Override public boolean isEscapedModel(); @Override public Serializable getPrimaryKeyObj(); @Override public void setPrimaryKeyObj(Serializable primaryKeyObj); @Override public ExpandoBridge getExpandoBridge(); @Override public void setExpandoBridgeAttributes(BaseModel<?> baseModel); @Override public void setExpandoBridgeAttributes(ExpandoBridge expandoBridge); @Override public void setExpandoBridgeAttributes(ServiceContext serviceContext); @Override public Object clone(); @Override public int compareTo(DocFile docFile); @Override public int hashCode(); @Override public CacheModel<DocFile> toCacheModel(); @Override public DocFile toEscapedModel(); @Override public DocFile toUnescapedModel(); @Override public String toString(); @Override public String toXmlString(); }
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.mvel.integrationtests; import java.util.Collection; import java.util.List; import org.drools.core.WorkingMemory; import org.drools.core.audit.WorkingMemoryFileLogger; import org.drools.core.audit.WorkingMemoryInMemoryLogger; import org.drools.core.audit.event.ActivationLogEvent; import org.drools.core.audit.event.LogEvent; import org.drools.core.event.ProcessNodeLeftEventImpl; import org.drools.mvel.compiler.Cheese; import org.drools.mvel.compiler.Message; import org.drools.testcoverage.common.util.KieBaseTestConfiguration; import org.drools.testcoverage.common.util.KieBaseUtil; import org.drools.testcoverage.common.util.TestParametersUtil; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.kie.api.KieBase; import org.kie.api.definition.process.Node; import org.kie.api.definition.process.Process; import org.kie.api.runtime.KieSession; import org.kie.api.runtime.process.NodeInstance; import org.kie.api.runtime.process.NodeInstanceContainer; import org.kie.api.runtime.process.ProcessInstance; import org.kie.api.runtime.process.WorkflowProcessInstance; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @RunWith(Parameterized.class) public class WorkingMemoryLoggerTest { private final KieBaseTestConfiguration kieBaseTestConfiguration; public WorkingMemoryLoggerTest(final KieBaseTestConfiguration kieBaseTestConfiguration) { this.kieBaseTestConfiguration = kieBaseTestConfiguration; } @Parameterized.Parameters(name = "KieBase type={0}") public static Collection<Object[]> getParameters() { return TestParametersUtil.getKieBaseCloudConfigurations(true); } @Test public void testOutOfMemory() throws Exception { KieBase kbase = KieBaseUtil.getKieBaseFromClasspathResources(getClass(), kieBaseTestConfiguration, "empty.drl"); for (int i = 0; i < 10000; i++) { KieSession session = kbase.newKieSession(); final WorkingMemoryFileLogger logger = new WorkingMemoryFileLogger((WorkingMemory) session); session.fireAllRules(); session.dispose(); } } @Test public void testLogAllBoundVariables() throws Exception { // BZ-1271909 final String drl = "import " + Message.class.getCanonicalName() + "\n" + "rule \"Hello World\" no-loop\n" + " when\n" + " $messageInstance : Message( $myMessage : message )\n" + " then\n" + " update($messageInstance);\n" + "end\n"; KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("test", kieBaseTestConfiguration, drl); KieSession ksession = kbase.newKieSession(); final WorkingMemoryInMemoryLogger logger = new WorkingMemoryInMemoryLogger((WorkingMemory) ksession); final Message message = new Message(); message.setMessage("Hello World"); ksession.insert(message); ksession.fireAllRules(); for (final LogEvent logEvent : logger.getLogEvents()) { if (logEvent instanceof ActivationLogEvent) { assertTrue( ((ActivationLogEvent) logEvent ).getDeclarations().contains( "$messageInstance" )); assertTrue( ((ActivationLogEvent) logEvent ).getDeclarations().contains( "$myMessage" )); } } } public static class AnyType { private Integer typeId = 1; private String typeName = "test"; public String getTypeName() { return typeName; } public Integer getTypeId() { return typeId.intValue(); } public void setTypeId(final Integer id) { typeId = id; } public AnyType() { typeId = 1; typeName = "test"; } public AnyType(final Integer id, final String type) { typeId = id; typeName = type; } } @Test public void testRetraction() throws Exception { // RHBRMS-2641 final String drl = "import " + AnyType.class.getCanonicalName() + ";\n" + "rule \"retract\" when\n" + " $any : AnyType( $typeId :typeId, typeName in (\"Standard\", \"Extended\") )\n" + " $any_c1 : AnyType( typeId == $typeId, typeName not in (\"Standard\", \"Extended\") ) \r\n" + "then\n" + " delete($any);\n" + " $any.setTypeId(null);\n" + "end"; KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("test", kieBaseTestConfiguration, drl); KieSession ksession = kbase.newKieSession(); final WorkingMemoryInMemoryLogger logger = new WorkingMemoryInMemoryLogger( (WorkingMemory) ksession ); ksession.insert(new AnyType(1, "Standard")); ksession.insert(new AnyType(1, "Extended")); ksession.insert(new AnyType(1, "test")); assertEquals( 2, ksession.fireAllRules() ); } @Test public void testWorkingMemoryLoggerWithUnbalancedBranches() throws Exception { KieBase kbase = KieBaseUtil.getKieBaseFromClasspathResources(getClass(), kieBaseTestConfiguration, "test_Logger.drl"); KieSession wm = kbase.newKieSession(); try { wm.fireAllRules(); wm.insert(new Cheese("a", 10)); wm.insert(new Cheese("b", 11)); wm.fireAllRules(); } catch (final Exception e) { e.printStackTrace(); fail("No exception should be raised "); } } @Test public void testLogEvents() throws Exception { KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("test", kieBaseTestConfiguration); KieSession ksession = kbase.newKieSession(); final WorkingMemoryInMemoryLogger logger = new WorkingMemoryInMemoryLogger((WorkingMemory) ksession); logger.afterNodeLeft(new ProcessNodeLeftEventImpl(new EmtpyNodeInstance(), ksession)); List<LogEvent> logEvents = logger.getLogEvents(); assertEquals(logEvents.size(), 1); assertTrue(logEvents.get(0).toString().startsWith("AFTER PROCESS NODE EXITED")); } static public class EmtpyNodeInstance implements NodeInstance { @Override public long getId() { return 0; } @Override public long getNodeId() { return 0; } @Override public Node getNode() { return null; } @Override public String getNodeName() { return "empty.node"; } @Override public WorkflowProcessInstance getProcessInstance() { return new EmtpyWorkflowProcessInstance(); } @Override public NodeInstanceContainer getNodeInstanceContainer() { return null; } @Override public Object getVariable(String variableName) { return null; } @Override public void setVariable(String variableName, Object value) { } } static class EmtpyWorkflowProcessInstance implements WorkflowProcessInstance { @Override public String getProcessId() { return "emtpy.process"; } @Override public Process getProcess() { return null; } @Override public long getId() { return 1; } @Override public String getProcessName() { return null; } @Override public int getState() { return ProcessInstance.STATE_ACTIVE; } @Override public long getParentProcessInstanceId() { return -1; } @Override public void signalEvent(String type, Object event) { } @Override public String[] getEventTypes() { return null; } @Override public Collection<NodeInstance> getNodeInstances() { return null; } @Override public NodeInstance getNodeInstance(long nodeInstanceId) { return null; } @Override public Object getVariable(String name) { return null; } @Override public void setVariable(String name, Object value) { } } }
/* * Copyright 2020 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.optaplanner.core.impl.heuristic.selector.value; import java.util.ArrayList; import java.util.Comparator; import java.util.List; import org.optaplanner.core.api.domain.valuerange.ValueRangeProvider; import org.optaplanner.core.config.heuristic.selector.common.SelectionCacheType; import org.optaplanner.core.config.heuristic.selector.common.SelectionOrder; import org.optaplanner.core.config.heuristic.selector.common.decorator.SelectionSorterOrder; import org.optaplanner.core.config.heuristic.selector.common.nearby.NearbySelectionConfig; import org.optaplanner.core.config.heuristic.selector.value.ValueSelectorConfig; import org.optaplanner.core.config.util.ConfigUtils; import org.optaplanner.core.impl.domain.entity.descriptor.EntityDescriptor; import org.optaplanner.core.impl.domain.solution.descriptor.SolutionDescriptor; import org.optaplanner.core.impl.domain.valuerange.descriptor.EntityIndependentValueRangeDescriptor; import org.optaplanner.core.impl.domain.valuerange.descriptor.ValueRangeDescriptor; import org.optaplanner.core.impl.domain.variable.descriptor.GenuineVariableDescriptor; import org.optaplanner.core.impl.heuristic.HeuristicConfigPolicy; import org.optaplanner.core.impl.heuristic.selector.AbstractSelectorFactory; import org.optaplanner.core.impl.heuristic.selector.common.decorator.ComparatorSelectionSorter; import org.optaplanner.core.impl.heuristic.selector.common.decorator.SelectionFilter; import org.optaplanner.core.impl.heuristic.selector.common.decorator.SelectionProbabilityWeightFactory; import org.optaplanner.core.impl.heuristic.selector.common.decorator.SelectionSorter; import org.optaplanner.core.impl.heuristic.selector.common.decorator.SelectionSorterWeightFactory; import org.optaplanner.core.impl.heuristic.selector.common.decorator.WeightFactorySelectionSorter; import org.optaplanner.core.impl.heuristic.selector.common.nearby.NearbyDistanceMeter; import org.optaplanner.core.impl.heuristic.selector.common.nearby.NearbyRandom; import org.optaplanner.core.impl.heuristic.selector.common.nearby.NearbyRandomFactory; import org.optaplanner.core.impl.heuristic.selector.entity.EntitySelector; import org.optaplanner.core.impl.heuristic.selector.entity.EntitySelectorFactory; import org.optaplanner.core.impl.heuristic.selector.value.decorator.CachingValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.decorator.DowncastingValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.decorator.EntityDependentSortingValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.decorator.FilteringValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.decorator.InitializedValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.decorator.ProbabilityValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.decorator.ReinitializeVariableValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.decorator.SelectedCountLimitValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.decorator.ShufflingValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.decorator.SortingValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.mimic.MimicRecordingValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.mimic.MimicReplayingValueSelector; import org.optaplanner.core.impl.heuristic.selector.value.mimic.ValueMimicRecorder; import org.optaplanner.core.impl.heuristic.selector.value.nearby.NearEntityNearbyValueSelector; public class ValueSelectorFactory<Solution_> extends AbstractSelectorFactory<Solution_, ValueSelectorConfig> { public static <Solution_> ValueSelectorFactory<Solution_> create(ValueSelectorConfig valueSelectorConfig) { return new ValueSelectorFactory<>(valueSelectorConfig); } public ValueSelectorFactory(ValueSelectorConfig valueSelectorConfig) { super(valueSelectorConfig); } public GenuineVariableDescriptor<Solution_> extractVariableDescriptor(HeuristicConfigPolicy<Solution_> configPolicy, EntityDescriptor<Solution_> entityDescriptor) { entityDescriptor = downcastEntityDescriptor(configPolicy, entityDescriptor); if (config.getVariableName() != null) { GenuineVariableDescriptor<Solution_> variableDescriptor = entityDescriptor.getGenuineVariableDescriptor(config.getVariableName()); if (variableDescriptor == null) { throw new IllegalArgumentException("The selectorConfig (" + config + ") has a variableName (" + config.getVariableName() + ") which is not a valid planning variable on entityClass (" + entityDescriptor.getEntityClass() + ").\n" + entityDescriptor.buildInvalidVariableNameExceptionMessage(config.getVariableName())); } return variableDescriptor; } else if (config.getMimicSelectorRef() != null) { return configPolicy.getValueMimicRecorder(config.getMimicSelectorRef()).getVariableDescriptor(); } else { return null; } } /** * * @param configPolicy never null * @param entityDescriptor never null * @param minimumCacheType never null, If caching is used (different from {@link SelectionCacheType#JUST_IN_TIME}), * then it should be at least this {@link SelectionCacheType} because an ancestor already uses such caching * and less would be pointless. * @param inheritedSelectionOrder never null * @return never null */ public ValueSelector<Solution_> buildValueSelector(HeuristicConfigPolicy<Solution_> configPolicy, EntityDescriptor<Solution_> entityDescriptor, SelectionCacheType minimumCacheType, SelectionOrder inheritedSelectionOrder) { return buildValueSelector(configPolicy, entityDescriptor, minimumCacheType, inheritedSelectionOrder, configPolicy.isReinitializeVariableFilterEnabled()); } public ValueSelector<Solution_> buildValueSelector(HeuristicConfigPolicy<Solution_> configPolicy, EntityDescriptor<Solution_> entityDescriptor, SelectionCacheType minimumCacheType, SelectionOrder inheritedSelectionOrder, boolean applyReinitializeVariableFiltering) { if (config.getMimicSelectorRef() != null) { ValueSelector<Solution_> valueSelector = buildMimicReplaying(configPolicy); if (applyReinitializeVariableFiltering) { valueSelector = new ReinitializeVariableValueSelector<>(valueSelector); } valueSelector = applyDowncasting(valueSelector); return valueSelector; } entityDescriptor = downcastEntityDescriptor(configPolicy, entityDescriptor); GenuineVariableDescriptor<Solution_> variableDescriptor = config.getVariableName() == null ? deduceVariableDescriptor(entityDescriptor) : deduceVariableDescriptor(entityDescriptor, config.getVariableName()); SelectionCacheType resolvedCacheType = SelectionCacheType.resolve(config.getCacheType(), minimumCacheType); SelectionOrder resolvedSelectionOrder = SelectionOrder.resolve(config.getSelectionOrder(), inheritedSelectionOrder); if (config.getNearbySelectionConfig() != null) { config.getNearbySelectionConfig().validateNearby(resolvedCacheType, resolvedSelectionOrder); } validateCacheTypeVersusSelectionOrder(resolvedCacheType, resolvedSelectionOrder); validateSorting(resolvedSelectionOrder); validateProbability(resolvedSelectionOrder); validateSelectedLimit(minimumCacheType); // baseValueSelector and lower should be SelectionOrder.ORIGINAL if they are going to get cached completely ValueSelector<Solution_> valueSelector = buildBaseValueSelector(variableDescriptor, SelectionCacheType.max(minimumCacheType, resolvedCacheType), determineBaseRandomSelection(variableDescriptor, resolvedCacheType, resolvedSelectionOrder)); if (config.getNearbySelectionConfig() != null) { // TODO Static filtering (such as movableEntitySelectionFilter) should affect nearbySelection too valueSelector = applyNearbySelection(configPolicy, config.getNearbySelectionConfig(), minimumCacheType, resolvedSelectionOrder, valueSelector); } valueSelector = applyFiltering(valueSelector); valueSelector = applyInitializedChainedValueFilter(configPolicy, variableDescriptor, valueSelector); valueSelector = applySorting(resolvedCacheType, resolvedSelectionOrder, valueSelector); valueSelector = applyProbability(resolvedCacheType, resolvedSelectionOrder, valueSelector); valueSelector = applyShuffling(resolvedCacheType, resolvedSelectionOrder, valueSelector); valueSelector = applyCaching(resolvedCacheType, resolvedSelectionOrder, valueSelector); valueSelector = applySelectedLimit(valueSelector); valueSelector = applyMimicRecording(configPolicy, valueSelector); if (applyReinitializeVariableFiltering) { valueSelector = new ReinitializeVariableValueSelector<>(valueSelector); } valueSelector = applyDowncasting(valueSelector); return valueSelector; } protected ValueSelector<Solution_> buildMimicReplaying(HeuristicConfigPolicy<Solution_> configPolicy) { if (config.getId() != null || config.getVariableName() != null || config.getCacheType() != null || config.getSelectionOrder() != null || config.getNearbySelectionConfig() != null || config.getFilterClass() != null || config.getSorterManner() != null || config.getSorterComparatorClass() != null || config.getSorterWeightFactoryClass() != null || config.getSorterOrder() != null || config.getSorterClass() != null || config.getProbabilityWeightFactoryClass() != null || config.getSelectedCountLimit() != null) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with mimicSelectorRef (" + config.getMimicSelectorRef() + ") has another property that is not null."); } ValueMimicRecorder<Solution_> valueMimicRecorder = configPolicy.getValueMimicRecorder(config.getMimicSelectorRef()); if (valueMimicRecorder == null) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") has a mimicSelectorRef (" + config.getMimicSelectorRef() + ") for which no valueSelector with that id exists (in its solver phase)."); } return new MimicReplayingValueSelector<>(valueMimicRecorder); } protected EntityDescriptor<Solution_> downcastEntityDescriptor(HeuristicConfigPolicy<Solution_> configPolicy, EntityDescriptor<Solution_> entityDescriptor) { if (config.getDowncastEntityClass() != null) { Class<?> parentEntityClass = entityDescriptor.getEntityClass(); if (!parentEntityClass.isAssignableFrom(config.getDowncastEntityClass())) { throw new IllegalStateException("The downcastEntityClass (" + config.getDowncastEntityClass() + ") is not a subclass of the parentEntityClass (" + parentEntityClass + ") configured by the " + EntitySelector.class.getSimpleName() + "."); } SolutionDescriptor<Solution_> solutionDescriptor = configPolicy.getSolutionDescriptor(); entityDescriptor = solutionDescriptor.getEntityDescriptorStrict(config.getDowncastEntityClass()); if (entityDescriptor == null) { throw new IllegalArgumentException("The selectorConfig (" + config + ") has an downcastEntityClass (" + config.getDowncastEntityClass() + ") that is not a known planning entity.\n" + "Check your solver configuration. If that class (" + config.getDowncastEntityClass().getSimpleName() + ") is not in the entityClassSet (" + solutionDescriptor.getEntityClassSet() + "), check your Solution implementation's annotated methods too."); } } return entityDescriptor; } protected boolean determineBaseRandomSelection(GenuineVariableDescriptor<Solution_> variableDescriptor, SelectionCacheType resolvedCacheType, SelectionOrder resolvedSelectionOrder) { switch (resolvedSelectionOrder) { case ORIGINAL: return false; case SORTED: case SHUFFLED: case PROBABILISTIC: // baseValueSelector and lower should be ORIGINAL if they are going to get cached completely return false; case RANDOM: // Predict if caching will occur return resolvedCacheType.isNotCached() || (isBaseInherentlyCached(variableDescriptor) && !hasFiltering(variableDescriptor)); default: throw new IllegalStateException("The selectionOrder (" + resolvedSelectionOrder + ") is not implemented."); } } protected boolean isBaseInherentlyCached(GenuineVariableDescriptor<Solution_> variableDescriptor) { return variableDescriptor.isValueRangeEntityIndependent(); } private ValueSelector<Solution_> buildBaseValueSelector(GenuineVariableDescriptor<Solution_> variableDescriptor, SelectionCacheType minimumCacheType, boolean randomSelection) { ValueRangeDescriptor<Solution_> valueRangeDescriptor = variableDescriptor.getValueRangeDescriptor(); // TODO minimumCacheType SOLVER is only a problem if the valueRange includes entities or custom weird cloning if (minimumCacheType == SelectionCacheType.SOLVER) { // TODO Solver cached entities are not compatible with DroolsScoreCalculator and IncrementalScoreDirector // because between phases the entities get cloned and the KieSession/Maps contains those clones afterwards // https://issues.redhat.com/browse/PLANNER-54 throw new IllegalArgumentException("The minimumCacheType (" + minimumCacheType + ") is not yet supported. Please use " + SelectionCacheType.PHASE + " instead."); } if (valueRangeDescriptor.isEntityIndependent()) { return new FromSolutionPropertyValueSelector<>( (EntityIndependentValueRangeDescriptor<Solution_>) valueRangeDescriptor, minimumCacheType, randomSelection); } else { // TODO Do not allow PHASE cache on FromEntityPropertyValueSelector, except if the moveSelector is PHASE cached too. return new FromEntityPropertyValueSelector<>(valueRangeDescriptor, randomSelection); } } private boolean hasFiltering(GenuineVariableDescriptor<Solution_> variableDescriptor) { return config.getFilterClass() != null || variableDescriptor.hasMovableChainedTrailingValueFilter(); } protected ValueSelector<Solution_> applyFiltering(ValueSelector<Solution_> valueSelector) { GenuineVariableDescriptor<Solution_> variableDescriptor = valueSelector.getVariableDescriptor(); if (hasFiltering(variableDescriptor)) { List<SelectionFilter<Solution_, Object>> filterList = new ArrayList<>(config.getFilterClass() == null ? 1 : 2); if (config.getFilterClass() != null) { filterList.add(ConfigUtils.newInstance(config, "filterClass", config.getFilterClass())); } // Filter out pinned entities if (variableDescriptor.hasMovableChainedTrailingValueFilter()) { filterList.add(variableDescriptor.getMovableChainedTrailingValueFilter()); } valueSelector = FilteringValueSelector.create(valueSelector, filterList); } return valueSelector; } protected ValueSelector<Solution_> applyInitializedChainedValueFilter(HeuristicConfigPolicy<Solution_> configPolicy, GenuineVariableDescriptor<Solution_> variableDescriptor, ValueSelector<Solution_> valueSelector) { if (configPolicy.isInitializedChainedValueFilterEnabled() && variableDescriptor.isChained()) { valueSelector = InitializedValueSelector.create(valueSelector); } return valueSelector; } protected void validateSorting(SelectionOrder resolvedSelectionOrder) { if ((config.getSorterManner() != null || config.getSorterComparatorClass() != null || config.getSorterWeightFactoryClass() != null || config.getSorterOrder() != null || config.getSorterClass() != null) && resolvedSelectionOrder != SelectionOrder.SORTED) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with sorterManner (" + config.getSorterManner() + ") and sorterComparatorClass (" + config.getSorterComparatorClass() + ") and sorterWeightFactoryClass (" + config.getSorterWeightFactoryClass() + ") and sorterOrder (" + config.getSorterOrder() + ") and sorterClass (" + config.getSorterClass() + ") has a resolvedSelectionOrder (" + resolvedSelectionOrder + ") that is not " + SelectionOrder.SORTED + "."); } if (config.getSorterManner() != null && config.getSorterComparatorClass() != null) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") has both a sorterManner (" + config.getSorterManner() + ") and a sorterComparatorClass (" + config.getSorterComparatorClass() + ")."); } if (config.getSorterManner() != null && config.getSorterWeightFactoryClass() != null) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") has both a sorterManner (" + config.getSorterManner() + ") and a sorterWeightFactoryClass (" + config.getSorterWeightFactoryClass() + ")."); } if (config.getSorterManner() != null && config.getSorterClass() != null) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") has both a sorterManner (" + config.getSorterManner() + ") and a sorterClass (" + config.getSorterClass() + ")."); } if (config.getSorterManner() != null && config.getSorterOrder() != null) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with sorterManner (" + config.getSorterManner() + ") has a non-null sorterOrder (" + config.getSorterOrder() + ")."); } if (config.getSorterComparatorClass() != null && config.getSorterWeightFactoryClass() != null) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") has both a sorterComparatorClass (" + config.getSorterComparatorClass() + ") and a sorterWeightFactoryClass (" + config.getSorterWeightFactoryClass() + ")."); } if (config.getSorterComparatorClass() != null && config.getSorterClass() != null) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") has both a sorterComparatorClass (" + config.getSorterComparatorClass() + ") and a sorterClass (" + config.getSorterClass() + ")."); } if (config.getSorterWeightFactoryClass() != null && config.getSorterClass() != null) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") has both a sorterWeightFactoryClass (" + config.getSorterWeightFactoryClass() + ") and a sorterClass (" + config.getSorterClass() + ")."); } if (config.getSorterClass() != null && config.getSorterOrder() != null) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with sorterClass (" + config.getSorterClass() + ") has a non-null sorterOrder (" + config.getSorterOrder() + ")."); } } protected ValueSelector<Solution_> applySorting(SelectionCacheType resolvedCacheType, SelectionOrder resolvedSelectionOrder, ValueSelector<Solution_> valueSelector) { if (resolvedSelectionOrder == SelectionOrder.SORTED) { SelectionSorter<Solution_, Object> sorter; if (config.getSorterManner() != null) { GenuineVariableDescriptor<Solution_> variableDescriptor = valueSelector.getVariableDescriptor(); if (!ValueSelectorConfig.hasSorter(config.getSorterManner(), variableDescriptor)) { return valueSelector; } sorter = ValueSelectorConfig.determineSorter(config.getSorterManner(), variableDescriptor); } else if (config.getSorterComparatorClass() != null) { Comparator<Object> sorterComparator = ConfigUtils.newInstance(config, "sorterComparatorClass", config.getSorterComparatorClass()); sorter = new ComparatorSelectionSorter<>(sorterComparator, SelectionSorterOrder.resolve(config.getSorterOrder())); } else if (config.getSorterWeightFactoryClass() != null) { SelectionSorterWeightFactory<Solution_, Object> sorterWeightFactory = ConfigUtils.newInstance(config, "sorterWeightFactoryClass", config.getSorterWeightFactoryClass()); sorter = new WeightFactorySelectionSorter<>(sorterWeightFactory, SelectionSorterOrder.resolve(config.getSorterOrder())); } else if (config.getSorterClass() != null) { sorter = ConfigUtils.newInstance(config, "sorterClass", config.getSorterClass()); } else { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with resolvedSelectionOrder (" + resolvedSelectionOrder + ") needs a sorterManner (" + config.getSorterManner() + ") or a sorterComparatorClass (" + config.getSorterComparatorClass() + ") or a sorterWeightFactoryClass (" + config.getSorterWeightFactoryClass() + ") or a sorterClass (" + config.getSorterClass() + ")."); } if (!valueSelector.getVariableDescriptor().isValueRangeEntityIndependent() && resolvedCacheType == SelectionCacheType.STEP) { valueSelector = new EntityDependentSortingValueSelector<>(valueSelector, resolvedCacheType, sorter); } else { if (!(valueSelector instanceof EntityIndependentValueSelector)) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with resolvedCacheType (" + resolvedCacheType + ") and resolvedSelectionOrder (" + resolvedSelectionOrder + ") needs to be based on an EntityIndependentValueSelector (" + valueSelector + ")." + " Check your @" + ValueRangeProvider.class.getSimpleName() + " annotations."); } valueSelector = new SortingValueSelector<>((EntityIndependentValueSelector<Solution_>) valueSelector, resolvedCacheType, sorter); } } return valueSelector; } protected void validateProbability(SelectionOrder resolvedSelectionOrder) { if (config.getProbabilityWeightFactoryClass() != null && resolvedSelectionOrder != SelectionOrder.PROBABILISTIC) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with probabilityWeightFactoryClass (" + config.getProbabilityWeightFactoryClass() + ") has a resolvedSelectionOrder (" + resolvedSelectionOrder + ") that is not " + SelectionOrder.PROBABILISTIC + "."); } } protected ValueSelector<Solution_> applyProbability(SelectionCacheType resolvedCacheType, SelectionOrder resolvedSelectionOrder, ValueSelector<Solution_> valueSelector) { if (resolvedSelectionOrder == SelectionOrder.PROBABILISTIC) { if (config.getProbabilityWeightFactoryClass() == null) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with resolvedSelectionOrder (" + resolvedSelectionOrder + ") needs a probabilityWeightFactoryClass (" + config.getProbabilityWeightFactoryClass() + ")."); } SelectionProbabilityWeightFactory<Solution_, Object> probabilityWeightFactory = ConfigUtils.newInstance(config, "probabilityWeightFactoryClass", config.getProbabilityWeightFactoryClass()); if (!(valueSelector instanceof EntityIndependentValueSelector)) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with resolvedCacheType (" + resolvedCacheType + ") and resolvedSelectionOrder (" + resolvedSelectionOrder + ") needs to be based on an EntityIndependentValueSelector (" + valueSelector + ")." + " Check your @" + ValueRangeProvider.class.getSimpleName() + " annotations."); } valueSelector = new ProbabilityValueSelector<>((EntityIndependentValueSelector<Solution_>) valueSelector, resolvedCacheType, probabilityWeightFactory); } return valueSelector; } private ValueSelector<Solution_> applyShuffling(SelectionCacheType resolvedCacheType, SelectionOrder resolvedSelectionOrder, ValueSelector<Solution_> valueSelector) { if (resolvedSelectionOrder == SelectionOrder.SHUFFLED) { if (!(valueSelector instanceof EntityIndependentValueSelector)) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with resolvedCacheType (" + resolvedCacheType + ") and resolvedSelectionOrder (" + resolvedSelectionOrder + ") needs to be based on an EntityIndependentValueSelector (" + valueSelector + ")." + " Check your @" + ValueRangeProvider.class.getSimpleName() + " annotations."); } valueSelector = new ShufflingValueSelector<>((EntityIndependentValueSelector<Solution_>) valueSelector, resolvedCacheType); } return valueSelector; } private ValueSelector<Solution_> applyCaching(SelectionCacheType resolvedCacheType, SelectionOrder resolvedSelectionOrder, ValueSelector<Solution_> valueSelector) { if (resolvedCacheType.isCached() && resolvedCacheType.compareTo(valueSelector.getCacheType()) > 0) { if (!(valueSelector instanceof EntityIndependentValueSelector)) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with resolvedCacheType (" + resolvedCacheType + ") and resolvedSelectionOrder (" + resolvedSelectionOrder + ") needs to be based on an EntityIndependentValueSelector (" + valueSelector + ")." + " Check your @" + ValueRangeProvider.class.getSimpleName() + " annotations."); } valueSelector = new CachingValueSelector<>((EntityIndependentValueSelector<Solution_>) valueSelector, resolvedCacheType, resolvedSelectionOrder.toRandomSelectionBoolean()); } return valueSelector; } private void validateSelectedLimit(SelectionCacheType minimumCacheType) { if (config.getSelectedCountLimit() != null && minimumCacheType.compareTo(SelectionCacheType.JUST_IN_TIME) > 0) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with selectedCountLimit (" + config.getSelectedCountLimit() + ") has a minimumCacheType (" + minimumCacheType + ") that is higher than " + SelectionCacheType.JUST_IN_TIME + "."); } } private ValueSelector<Solution_> applySelectedLimit(ValueSelector<Solution_> valueSelector) { if (config.getSelectedCountLimit() != null) { valueSelector = new SelectedCountLimitValueSelector<>(valueSelector, config.getSelectedCountLimit()); } return valueSelector; } private ValueSelector<Solution_> applyNearbySelection(HeuristicConfigPolicy<Solution_> configPolicy, NearbySelectionConfig nearbySelectionConfig, SelectionCacheType minimumCacheType, SelectionOrder resolvedSelectionOrder, ValueSelector<Solution_> valueSelector) { boolean randomSelection = resolvedSelectionOrder.toRandomSelectionBoolean(); EntitySelectorFactory<Solution_> entitySelectorFactory = EntitySelectorFactory.create(nearbySelectionConfig.getOriginEntitySelectorConfig()); EntitySelector<Solution_> originEntitySelector = entitySelectorFactory.buildEntitySelector(configPolicy, minimumCacheType, resolvedSelectionOrder); NearbyDistanceMeter<?, ?> nearbyDistanceMeter = (NearbyDistanceMeter<?, ?>) ConfigUtils.newInstance(nearbySelectionConfig, "nearbyDistanceMeterClass", nearbySelectionConfig.getNearbyDistanceMeterClass()); // TODO Check nearbyDistanceMeterClass.getGenericInterfaces() to confirm generic type S is an entityClass NearbyRandom nearbyRandom = NearbyRandomFactory.create(config.getNearbySelectionConfig()).buildNearbyRandom(randomSelection); return new NearEntityNearbyValueSelector<>(valueSelector, originEntitySelector, nearbyDistanceMeter, nearbyRandom, randomSelection); } private ValueSelector<Solution_> applyMimicRecording(HeuristicConfigPolicy<Solution_> configPolicy, ValueSelector<Solution_> valueSelector) { if (config.getId() != null) { if (config.getId().isEmpty()) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") has an empty id (" + config.getId() + ")."); } if (!(valueSelector instanceof EntityIndependentValueSelector)) { throw new IllegalArgumentException("The valueSelectorConfig (" + config + ") with id (" + config.getId() + ") needs to be based on an EntityIndependentValueSelector (" + valueSelector + ")." + " Check your @" + ValueRangeProvider.class.getSimpleName() + " annotations."); } MimicRecordingValueSelector<Solution_> mimicRecordingValueSelector = new MimicRecordingValueSelector<>( (EntityIndependentValueSelector<Solution_>) valueSelector); configPolicy.addValueMimicRecorder(config.getId(), mimicRecordingValueSelector); valueSelector = mimicRecordingValueSelector; } return valueSelector; } private ValueSelector<Solution_> applyDowncasting(ValueSelector<Solution_> valueSelector) { if (config.getDowncastEntityClass() != null) { valueSelector = new DowncastingValueSelector<>(valueSelector, config.getDowncastEntityClass()); } return valueSelector; } }
/** * Copyright 2012-2018 Kyrill Zotkin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.enterprisedomain.ecp; import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.eclipse.core.resources.IResource; import org.eclipse.core.resources.IResourceChangeEvent; import org.eclipse.core.resources.IResourceChangeListener; import org.eclipse.core.resources.IResourceDelta; import org.eclipse.core.resources.IResourceDeltaVisitor; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.FileLocator; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.emf.codegen.util.CodeGenUtil; import org.eclipse.emf.common.command.BasicCommandStack; import org.eclipse.emf.common.command.Command; import org.eclipse.emf.common.notify.Adapter; import org.eclipse.emf.common.notify.AdapterFactory; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.Notifier; import org.eclipse.emf.common.util.ECollections; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.common.util.URI; import org.eclipse.emf.ecore.EAnnotation; import org.eclipse.emf.ecore.EAttribute; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.EClassifier; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EOperation; import org.eclipse.emf.ecore.EPackage; import org.eclipse.emf.ecore.EReference; import org.eclipse.emf.ecore.EcoreFactory; import org.eclipse.emf.ecore.EcorePackage; import org.eclipse.emf.ecore.resource.Resource; import org.eclipse.emf.ecore.resource.ResourceSet; import org.eclipse.emf.ecore.util.EContentAdapter; import org.eclipse.emf.ecore.util.EcoreUtil; import org.eclipse.emf.ecp.core.ECPProject; import org.eclipse.emf.ecp.core.ECPRepository; import org.eclipse.emf.ecp.core.util.ECPContainer; import org.eclipse.emf.ecp.core.util.ECPModelContextAdapter; import org.eclipse.emf.ecp.core.util.ECPUtil; import org.eclipse.emf.ecp.core.util.observer.ECPProjectContentChangedObserver; import org.eclipse.emf.ecp.core.util.observer.ECPRepositoriesChangedObserver; import org.eclipse.emf.ecp.spi.core.DefaultProvider; import org.eclipse.emf.ecp.spi.core.InternalProject; import org.eclipse.emf.ecp.spi.core.InternalProvider; import org.eclipse.emf.ecp.spi.core.InternalRepository; import org.eclipse.emf.ecp.spi.core.ProviderChangeListener; import org.eclipse.emf.ecp.spi.core.util.InternalChildrenList; import org.eclipse.emf.edit.command.ChangeCommand; import org.eclipse.emf.edit.domain.AdapterFactoryEditingDomain; import org.eclipse.emf.edit.domain.EditingDomain; import org.eclipse.emf.edit.provider.ComposedAdapterFactory; import org.enterprisedomain.classmaker.ClassMakerService; import org.enterprisedomain.classmaker.Contribution; import org.enterprisedomain.classmaker.Project; import org.enterprisedomain.classmaker.ResourceAdapter; import org.enterprisedomain.classmaker.SelectRevealHandler; import org.enterprisedomain.classmaker.Stage; import org.enterprisedomain.classmaker.core.ClassMakerPlugin; import org.enterprisedomain.classmaker.impl.CompletionListenerImpl; import org.enterprisedomain.classmaker.impl.ResourceChangeListenerImpl; import org.enterprisedomain.classmaker.util.ClassMakerAdapterFactory; import org.enterprisedomain.classmaker.util.ResourceUtils; public class EnterpriseDomainProvider extends DefaultProvider { public static final AdapterFactory ENTERPRISE_DOMAIN_ADAPTER_FACTORY = new ComposedAdapterFactory( new AdapterFactory[] { new ClassMakerAdapterFactory(), InternalProvider.EMF_ADAPTER_FACTORY }); public static final String NAME = "org.enterprisedomain.ecp.provider"; public static final String PROP_CONTRIBUTION = "isContribution"; private static Map<String, HashSet<String>> visiblePackagesToClasses = new HashMap<String, HashSet<String>>(); private final IResourceChangeListener resourceChangeListener = new EnterpriseDomainResourceListener(); private static Map<InternalProject, Boolean> initing = new HashMap<InternalProject, Boolean>(); private Adapter adapter = new EContentAdapter() { @SuppressWarnings("unchecked") @Override public void notifyChanged(Notification notification) { super.notifyChanged(notification); Resource resource = null; if (notification.getEventType() == Notification.ADD && notification.getFeatureID(Resource.class) == Resource.RESOURCE__CONTENTS && notification.getNotifier() instanceof Resource) { resource = (Resource) notification.getNotifier(); } else if (notification.getEventType() == Notification.ADD && notification.getNotifier() instanceof EObject) { final Object feature = notification.getFeature(); if (feature instanceof EReference) { final EReference eReference = (EReference) feature; if (eReference.isContainment()) { resource = ((EObject) notification.getNewValue()).eResource(); } } } if (resource == null) { return; } if (Activator.getClassMaker() == null) return; Project domainProject = Activator.getClassMaker().getWorkspace().getProject(resource); if (domainProject != null && !(domainProject instanceof Contribution)) { try { resource.save(Collections.emptyMap()); } catch (final IOException ex) { Activator.log(ex); } } InternalProject project = (InternalProject) getModelContext(resource); if (project != null) { propagatePackagesVisibility(); project.notifyObjectsChanged((Collection<Object>) (Collection<?>) Arrays.asList(project), true); } } }; private BasicCommandStack commandStack; public EnterpriseDomainProvider() { super(NAME); if (Activator.getClassMaker() != null) { Activator.getClassMaker().getWorkspace().getResourceSet().eAdapters().add(adapter); } ResourcesPlugin.getWorkspace().addResourceChangeListener(resourceChangeListener); } @Override public boolean isThreadSafe() { // TODO Auto-generated method stub return false; } @Override public boolean hasCreateProjectWithoutRepositorySupport() { return true; } @Override public boolean hasCreateRepositorySupport() { return false; } @Override public EList<? extends Object> getElements(InternalProject project) { Project domainProject = (Project) Activator.getClassMaker().getWorkspace().getProject(project.getName()); return domainProject.getChildren(); } @Override public void cloneProject(InternalProject projectToClone, InternalProject targetProject) { Project domainProjectToClone = Activator.getClassMaker().getWorkspace().getProject(projectToClone.getName()); Project domainTargetProject = Activator.getClassMaker().getWorkspace().getProject(targetProject.getName()); domainTargetProject.getChildren().addAll(EcoreUtil.copyAll(domainProjectToClone.getChildren())); } @Override public void handleLifecycle(ECPContainer context, LifecycleEvent event) { super.handleLifecycle(context, event); if (context instanceof InternalProject) { final InternalProject project = (InternalProject) context; switch (event) { case INIT: initProject(project); break; case CREATE: createProject(project); break; case DISPOSE: disposeProject(project); break; case REMOVE: removeProject(project); break; default: break; } } } protected void initProject(final InternalProject project) { if (initing.containsKey(project) && initing.get(project)) return; initing.put(project, true); final EditingDomain editingDomain = project.getEditingDomain(); editingDomain.getResourceSet().eAdapters().add(new EnterpriseDomainProjectObserver(project, this)); final ClassMakerService classMaker = Activator.getClassMaker(); if (classMaker != null) { Project domainProject = classMaker.getWorkspace().getProject(project.getName()); if (domainProject != null) { try { IProgressMonitor monitor = null; if (getUIProvider() != null) monitor = getUIProvider().getAdapter(project, IProgressMonitor.class); else monitor = ClassMakerPlugin.getProgressMonitor(); domainProject.open(monitor); } catch (CoreException e) { Activator.log(e); } if (domainProject.getRevision().getState().getDomainModel().getGenerated() instanceof EPackage) addVisiblePackage(project, (EPackage) domainProject.getRevision().getState().getDomainModel().getGenerated()); domainProject.initialize(false); try { domainProject.load(false, true); } catch (CoreException e) { ClassMakerPlugin.getInstance().getLog().log(e.getStatus()); } } else { boolean contribution = false; if (project.getProperties().getKeys().contains(PROP_CONTRIBUTION)) contribution = Boolean.valueOf(project.getProperties().getValue(PROP_CONTRIBUTION)); IProgressMonitor monitor = null; if (getUIProvider() == null) monitor = ClassMakerPlugin.getProgressMonitor(); else monitor = getUIProvider().getAdapter(project, IProgressMonitor.class); if (contribution) { EcoreFactory ecoreFactory = EcoreFactory.eINSTANCE; EPackage model = ecoreFactory.createEPackage(); model.setName(project.getName()); model.setNsPrefix(CodeGenUtil.capName(project.getName().replaceAll(" ", "").toLowerCase())); model.setNsURI("http://" + project.getName().replaceAll(" ", "") + "/1.0"); EClass dummyEClass = ecoreFactory.createEClass(); dummyEClass.setName("TheObject"); EAttribute dummyEAttribute = ecoreFactory.createEAttribute(); dummyEAttribute.setName("value"); dummyEAttribute.setEType(EcorePackage.Literals.ESTRING); dummyEClass.getEStructuralFeatures().add(dummyEAttribute); EOperation op = ecoreFactory.createEOperation(); op.setName("perform"); op.setEType(EcorePackage.Literals.EINT); EAnnotation an = ecoreFactory.createEAnnotation(); an.setSource("http://www.eclipse.org/emf/2002/GenModel"); an.getDetails().put("body", "return 7;"); op.getEAnnotations().add(an); EAnnotation invocation = ecoreFactory.createEAnnotation(); invocation.setSource(ClassMakerService.INVOCATION_DELEGATE_URI); op.getEAnnotations().add(invocation); dummyEClass.getEOperations().add(op); model.getEClassifiers().add(dummyEClass); EAnnotation invocationDelegate = ecoreFactory.createEAnnotation(); invocationDelegate.setSource(EcorePackage.eNS_URI); invocationDelegate.getDetails().put("invocationDelegates", ClassMakerService.INVOCATION_DELEGATE_URI); model.getEAnnotations().add(invocationDelegate); try { domainProject = classMaker.getWorkspace().createContribution(model, monitor); domainProject.getState().setEdit(true); domainProject.getState().setEditor(true); domainProject.addCompletionListener(new ProviderCompletionListener(project)); } catch (CoreException e) { Activator.log(e); } addVisiblePackage(project, EcorePackage.eINSTANCE); } else try { domainProject = classMaker.getWorkspace().createProject(project.getName(), monitor); domainProject.initialize(true); propagatePackagesVisibility(); } catch (CoreException e) { Activator.log(e); } } classMaker.getWorkspace().getResourceSet().eAdapters() .add(new AdapterFactoryEditingDomain.EditingDomainProvider(project.getEditingDomain())); } registerChangeListener(new ProviderChangeListener() { private Map<EObject, ECPProject> projects = new HashMap<EObject, ECPProject>(); @Override public void preDelete(EObject objectToBeDeleted) { ECPContainer project = getModelContext(objectToBeDeleted); if (project != null && ECPProject.class.isInstance(project)) { projects.put(objectToBeDeleted, (ECPProject) project); } } @Override public void postDelete(EObject objectToBeDeleted) { if (!projects.containsKey(objectToBeDeleted)) return; final Object[] eObjects = new Object[] { objectToBeDeleted }; ECPUtil.getECPObserverBus().notify(ECPProjectContentChangedObserver.class) .objectsChanged(projects.get(objectToBeDeleted), (Collection<Object>) Arrays.asList(eObjects)); projects.remove(objectToBeDeleted); } @Override public void notify(Notification notification) { } @Override public boolean canDelete(EObject objectToBeDeleted) { return true; } }); initing.put(project, false); } protected void createProject(final InternalProject project) { project.open(); final EditingDomain editingDomain = project.getEditingDomain(); Adapter observer = new EnterpriseDomainProjectObserver(project, this); editingDomain.getResourceSet().eAdapters().add(observer); ClassMakerService classMaker = Activator.getClassMaker(); if (classMaker != null) classMaker.getWorkspace().getResourceSet().eAdapters().add(observer); } @Override public ECPContainer getModelContext(Object element) { if (element instanceof ECPContainer) { return (ECPContainer) element; } // if (element instanceof ECPModelContextProvider) { // ECPContainer container = ((ECPModelContextProvider) element).getModelContext(element); // if (container.equals(element)) // return null; // return container; // } if (element instanceof Project) { for (InternalProject project : getOpenProjects()) { if (project.getName().equals(((Project) element).getName()) && !project.getContents().equals(((Project) element).getChildren())) return project; } } if (element instanceof EList<?>) return getModelContext(((EList<?>) element).get(0)); if (element instanceof ResourceAdapter) return getModelContext(((ResourceAdapter) element).getProject()); if (element instanceof Resource) { Collection<InternalProject> projects = null; try { projects = getOpenProjects(); } catch (RuntimeException e) { return null; } for (InternalProject project : projects) { Project domainProject = Activator.getClassMaker().getWorkspace().getProject((Resource) element); if (domainProject != null && domainProject.getProjectName().equals(project.getName())) return project; } } if (element instanceof EObject) { if (((EObject) element).eContainer() instanceof ECPContainer) { return getModelContext(((EObject) element).eContainer().eResource()); } return getModelContext(((EObject) element).eResource()); } return null; } protected void disposeProject(InternalProject project) { initing.remove(project); ClassMakerService service = Activator.getClassMaker(); if (service != null) { Project domainProject = service.getWorkspace().getProject(project.getName()); if (domainProject != null) try { IProgressMonitor monitor = null; if (getUIProvider() == null) monitor = ClassMakerPlugin.getProgressMonitor(); else monitor = getUIProvider().getAdapter(project, IProgressMonitor.class); Object object = domainProject.getChildren().get(0); if (object instanceof Resource) ((Resource) object).setTrackingModification(false); domainProject.close(monitor); } catch (CoreException e) { Activator.log(e); } } } protected void removeProject(InternalProject project) { initing.remove(project); Project domainProject = (Project) Activator.getClassMaker().getWorkspace().getProject(project.getName()); try { domainProject.delete(getUIProvider().getAdapter(project, IProgressMonitor.class)); } catch (CoreException e) { e.printStackTrace(); } } @Override public Set<EPackage> getUnsupportedEPackages(Collection<EPackage> packages, InternalRepository repository) { Set<EPackage> results = new HashSet<EPackage>(); results.addAll(packages); for (InternalProject project : getOpenProjects()) results.removeAll(getVisiblePackages(project)); return results; } public Set<EPackage> getVisiblePackages(InternalProject project) { Set<EPackage> results = new HashSet<EPackage>(); for (String packageNsURI : visiblePackagesToClasses.keySet()) { EPackage ePackage = project.getEditingDomain().getResourceSet().getPackageRegistry() .getEPackage(packageNsURI); results.add(ePackage); } return results; } @Override public boolean contains(InternalProject project, Object object) { if (object instanceof EObject) return Activator.getClassMaker().getWorkspace().contains((EObject) object).getValue() == Stage.LOADED_VALUE; return super.contains(project, object); } @Override public void fillChildren(ECPContainer context, Object parent, InternalChildrenList childrenList) { if (parent instanceof ECPRepository) { childrenList.addChildren(Activator.getClassMaker().getWorkspace().getProjects()); } else if (parent instanceof ECPProject) { final ECPProject project = (ECPProject) parent; final Project domainProject = Activator.getClassMaker().getWorkspace().getProject(project.getName()); if (domainProject != null && !domainProject.getChildren().isEmpty()) childrenList.addChildren(domainProject.getChildren()); } else if (parent instanceof Resource) { Resource resource = (Resource) parent; childrenList.addChildren(resource.getContents()); } else if (parent instanceof EObject) { final EObject eObject = (EObject) parent; childrenList.addChildren(eObject.eContents()); } else { super.fillChildren(context, parent, childrenList); } } @Override public <T> T getAdapter(Object adaptable, Class<T> adapterType) { final T adapter = EnterpriseDomainProviderAdapterFactory.adapt(adaptable, adapterType); if (adapter != null) return adapter; return super.getAdapter(adaptable, adapterType); } @Override public EditingDomain createEditingDomain(final InternalProject project) { commandStack = (BasicCommandStack) createCommandStack(project); final EditingDomain editingDomain = new AdapterFactoryEditingDomain(ENTERPRISE_DOMAIN_ADAPTER_FACTORY, commandStack, Activator.getClassMaker().getWorkspace().getResourceSet()); editingDomain.getResourceSet().eAdapters().add(new ECPModelContextAdapter(project)); return editingDomain; } public class ProviderCompletionListener extends CompletionListenerImpl { private InternalProject project; public ProviderCompletionListener(InternalProject project) { this.project = project; } @SuppressWarnings("unchecked") @Override public void completed(final Project result) throws Exception { result.setSelectRevealHandler(getUIProvider().getAdapter(project, SelectRevealHandler.class)); result.addResourceChangeListener(new ResourceChangeListenerImpl() { @Override public void changed(Notification notification) throws Exception { project.notifyObjectsChanged(((Collection<Object>) (Collection<?>) Arrays.asList(project)), true); } }); if (((Contribution) result).getDomainModel().getGenerated() instanceof EPackage) addVisiblePackage(project, (EPackage) ((Contribution) result).getDomainModel().getGenerated()); project.notifyObjectsChanged((Collection<Object>) (Collection<?>) Arrays.asList(project), true); Collection<ECPRepository> repositories = (Collection<ECPRepository>) (Collection<?>) Arrays .asList(project.getRepository()); ECPUtil.getECPObserverBus().notify(ECPRepositoriesChangedObserver.class).repositoriesChanged(repositories, repositories); } } @Override public Notifier getRoot(InternalProject project) { return Activator.getClassMaker().getWorkspace().getProject(project.getName()); } public void addVisiblePackage(InternalProject project, EPackage ePackage) { HashSet<String> eClasses = null; if (ePackage != null && visiblePackagesToClasses.containsKey(ePackage.getNsURI())) { eClasses = visiblePackagesToClasses.get(ePackage.getNsURI()); eClasses.clear(); } else eClasses = new HashSet<String>(); if (ePackage != null) { for (EClassifier eClass : ePackage.getEClassifiers()) if (eClass instanceof EClass) eClasses.add(eClass.getName()); visiblePackagesToClasses.put(ePackage.getNsURI(), eClasses); } propagatePackagesVisibility(); } private void propagatePackagesVisibility() { for (InternalProject project : getOpenProjects()) for (String ePackageNsURI : visiblePackagesToClasses.keySet()) { EPackage eP = project.getEditingDomain().getResourceSet().getPackageRegistry() .getEPackage(ePackageNsURI); Set<EPackage> ePs = new HashSet<EPackage>(); ePs.addAll(project.getVisiblePackages()); ePs.add(eP); project.setVisiblePackages(ePs); for (Map.Entry<String, HashSet<String>> eClassNames : visiblePackagesToClasses.entrySet()) if (eClassNames.getKey().equals(ePackageNsURI)) for (String eClassName : eClassNames.getValue()) { Set<EClass> eCls = new HashSet<EClass>(); eCls.addAll(project.getVisibleEClasses()); eCls.add((EClass) eP.getEClassifier(eClassName)); project.setVisibleEClasses(eCls); } } } private class EnterpriseDomainResourceListener implements IResourceChangeListener { @Override public void resourceChanged(IResourceChangeEvent event) { final Collection<Resource> changedResources = new ArrayList<Resource>(); final Collection<Resource> removedResources = new ArrayList<Resource>(); final IResourceDelta delta = event.getDelta(); if (delta == null) { return; } try { delta.accept(new EnterpriseDomainEditorResourceDeltaVisitor(removedResources, changedResources)); } catch (final CoreException ex) { Activator.log(ex); } if (changedResources.isEmpty() && removedResources.isEmpty()) { return; } String projectName = ResourceUtils.parseProjectName(changedResources.iterator().next().getURI()); ECPProject project = ECPUtil.getECPProjectManager().getProject(projectName); if (getUIProvider() != null) ((IResourceHandler) getUIProvider().getAdapter(this, IResourceHandler.class)) .handleResourceChange(project, changedResources, removedResources); } } private final class EnterpriseDomainEditorResourceDeltaVisitor implements IResourceDeltaVisitor { private final Collection<Resource> removedResources; private final Collection<Resource> changedResources; EnterpriseDomainEditorResourceDeltaVisitor(Collection<Resource> removedResources, Collection<Resource> changedResources) { this.removedResources = removedResources; this.changedResources = changedResources; } @Override public boolean visit(final IResourceDelta delta) { if (delta.getResource().getType() == IResource.FILE && (delta.getKind() == IResourceDelta.REMOVED || delta.getKind() == IResourceDelta.CHANGED)) { final ResourceSet resourceSet = Activator.getClassMaker().getWorkspace().getResourceSet(); if (resourceSet == null) { return false; } Resource resource = null; final URI uri = URI.createPlatformResourceURI(delta.getFullPath().toString(), true); resource = resourceSet.getResource(uri, false); if (resource == null) { try { final URL fileURL = FileLocator.resolve(new URL(uri.toString())); resource = resourceSet.getResource(URI.createFileURI(fileURL.getPath()), false); } catch (final IOException ex) { return false; } } if (resource != null) { if (delta.getKind() == IResourceDelta.REMOVED) { removedResources.add(resource); } else { changedResources.add(resource); } } return false; } return true; } } public class EnterpriseDomainProjectObserver extends EContentAdapter { private final InternalProject project; private final EnterpriseDomainProvider provider; public EnterpriseDomainProjectObserver(InternalProject project, EnterpriseDomainProvider provider) { this.project = project; this.provider = provider; } @SuppressWarnings("unchecked") @Override public void notifyChanged(Notification notification) { super.notifyChanged(notification); if (notification.getNotifier() instanceof EObject) { provider.notifyProviderChangeListeners(notification); final EObject eObject = (EObject) notification.getNotifier(); final Object[] eObjects = new Object[] { eObject }; project.notifyObjectsChanged((Collection<Object>) Arrays.asList(eObjects), false); final Object feature = notification.getFeature(); if (feature instanceof EReference) { final EReference eReference = (EReference) feature; if (eReference.isContainment() && notification.getNewValue() instanceof EObject) { project.notifyObjectsChanged(Collections.singleton(notification.getNewValue()), true); } } else if (feature instanceof EAttribute) { final EAttribute eAttribute = (EAttribute) feature; if (notification.getNewValue() instanceof EObject) { project.notifyObjectsChanged(Collections.singleton(notification.getNewValue()), true); } } else if (feature instanceof EOperation) { final EOperation eOperation = (EOperation) feature; if (notification.getNewValue() instanceof EObject) { project.notifyObjectsChanged(Collections.singleton(notification.getNewValue()), true); } } return; } if (notification.getNotifier() instanceof Resource) { project.notifyObjectsChanged( (Collection<Object>) (Collection<?>) Collections.singleton(notification.getNotifier()), true); } // Diagnostician.INSTANCE // .validate(ClassMakerPlugin.getClassMaker().getWorkspace().getProject(project.getName())); // if (notification.getNotifier() instanceof Notifier) { // provider.notifyProviderChangeListeners(notification); // // if (notification.getNewValue() instanceof EObject) { // project.notifyObjectsChanged(Collections.singleton(notification.getNewValue()), // true); // } // if ((notification.getEventType() == ((Notification.REMOVE | // Notification.REMOVE_MANY) // & notification.getEventType())) && notification.getOldValue() // instanceof EObject) { // Collection<Object> oldObjects = null; // if (notification.getOldValue() instanceof Collection<?>) // oldObjects = (Collection<Object>) notification.getOldValue(); // else // oldObjects = Collections.singleton(notification.getOldValue()); // ECPUtil.getECPObserverBus().notify(ECPProjectContentChangedObserver.class).objectsChanged(project, // oldObjects); // } // } } } @Override public boolean isDirty(InternalProject project) { if (commandStack != null) return commandStack.isSaveNeeded(); Project domainProject = Activator.getClassMaker().getWorkspace().getProject(project.getName()); return domainProject.isDirty(); } @Override public void doSave(InternalProject project) { // IProgressMonitor monitor = null; // try { // monitor = getUIProvider().getAdapter(project, IProgressMonitor.class); // final Semaphore saved = new Semaphore(0); // CompletionListener saveListener = new CompletionListenerImpl() { // // @Override // public void completed(Project result) { // saved.release(); // } // // }; ClassMakerService classMaker = Activator.getClassMaker(); Project domainProject = classMaker.getWorkspace().getProject(project.getName()); // Blueprint blueprint = classMaker.createBlueprint(); // blueprint.getCompletionListeners().add(saveListener); domainProject.getState().saveResource(); // blueprint.setDynamicModel( // ((Contribution) // domainProject).getContribution().getDomainModel().getDynamic()); // classMaker.make(blueprint, monitor); // try { // saved.acquire(); // } catch (InterruptedException e) { // e.printStackTrace(); // if (monitor != null) // monitor.setCanceled(true); // } // domainProject.removeCompletionListener(saveListener); ((BasicCommandStack) project.getEditingDomain().getCommandStack()).saveIsDone(); // } catch (CoreException e) { // e.printStackTrace(); // if (monitor != null) // monitor.setCanceled(true); // } super.doSave(project); } @Override public void doDelete(final InternalProject project, final Collection<Object> objects) { final Command changeCommand = new ChangeCommand(project.getEditingDomain().getResourceSet()) { @Override protected void doExecute() { Project domainProject = Activator.getClassMaker().getWorkspace().getProject(project.getName()); if (objects.isEmpty()) try { domainProject.delete(getUIProvider().getAdapter(project, IProgressMonitor.class)); } catch (CoreException e) { Activator.log(e); } else domainProject.delete(ECollections.asEList(objects)); } }; if (changeCommand.canExecute()) { project.getEditingDomain().getCommandStack().execute(changeCommand); return; } } }
// Copyright (c) 2004, 2009 Per M.A. Bothner. // This is free software; for terms and warranty disclaimer see ./COPYING. package gnu.kawa.xml; import gnu.xml.*; import gnu.lists.Sequence; /* #ifdef use:org.w3c.dom.Node */ import org.w3c.dom.*; /* #endif */ public class KDocument extends KNode /* #ifdef use:org.w3c.dom.Node */ implements org.w3c.dom.Document /* #endif */ { public KDocument (NodeTree seq, int ipos) { super(seq, ipos); } public String getNodeName() { return "#document"; } /* #ifdef use:org.w3c.dom.Node */ public DOMImplementation getImplementation () { throw new UnsupportedOperationException("getImplementation not implemented"); } public DocumentType getDoctype () { return null; } public Node getParentNode() { return null; } /* #endif */ public KElement getDocumentElement () { int child = ((NodeTree) sequence).posFirstChild(ipos); for (;;) { if (child == -1) return null; if (sequence.getNextKind(child) != Sequence.COMMENT_VALUE) break; child = sequence.nextPos(child); } return (KElement) make((NodeTree) sequence, child); } /* #ifdef use:org.w3c.dom.Node */ public short getNodeType () { return Node.DOCUMENT_NODE; } /* #endif */ public String getNodeValue() { return null; } public String getTextContent () { return null; } protected void getTextContent (StringBuffer sbuf) { // Do nothing. } /* #ifdef use:org.w3c.dom.Node */ /** Not implemented. */ public Element createElement (String tagName) { throw new UnsupportedOperationException("createElement not implemented"); } /** Not implemented. */ public DocumentFragment createDocumentFragment () { throw new UnsupportedOperationException("createDocumentFragment not implemented"); } /** Not implemented. */ public Text createTextNode (String data) { throw new UnsupportedOperationException("createTextNode not implemented"); } /** Not implemented. */ public Comment createComment (String data) { throw new UnsupportedOperationException("createComment not implemented"); } /** Not implemented. */ public CDATASection createCDATASection (String data) { throw new UnsupportedOperationException("createCDATASection not implemented"); } /** Not implemented. */ public ProcessingInstruction createProcessingInstruction (String target, String data) { throw new UnsupportedOperationException("createProcessingInstruction not implemented"); } /** Not implemented. */ public Attr createAttribute (String name) { throw new UnsupportedOperationException("createAttribute not implemented"); } /** Not implemented. */ public EntityReference createEntityReference (String name) { throw new UnsupportedOperationException("createEntityReference implemented"); } /** Not implemented. */ public Node importNode (Node importedNode, boolean deep) { throw new UnsupportedOperationException("importNode not implemented"); } /** Not implemented. */ public Element createElementNS (String namespaceURI, String qualifiedName) { throw new UnsupportedOperationException("createElementNS not implemented"); } /** Not implemented. */ public Attr createAttributeNS (String namespaceURI, String qualifiedName) { throw new UnsupportedOperationException("createAttributeNS not implemented"); } /** Not implemented yet. */ public NodeList getElementsByTagNameNS(String namespaceURI, String localName) { throw new UnsupportedOperationException("getElementsByTagNameNS not implemented yet"); } public Element getElementById (String elementId) { return null; } /* #endif */ public boolean hasAttributes () { return false; } public String getInputEncoding () { return null; } public String getXmlEncoding () { return null; } public boolean getXmlStandalone () { return false; } public void setXmlStandalone (boolean xmlStandalone) { } public String getXmlVersion () { return "1.1"; } public void setXmlVersion (String xmlVersion) { } public boolean getStrictErrorChecking () { return false; } public void setStrictErrorChecking(boolean strictErrorChecking) { } public String getDocumentURI () { return null; } public void setDocumentURI (String documentURI) { } /* #ifdef use:org.w3c.dom.Node */ public Node renameNode (Node n, String namespaceURI, String qualifiedname) throws DOMException { throw new DOMException(DOMException.NOT_SUPPORTED_ERR, "renameNode not implemented"); } public Node adoptNode (Node source) throws DOMException { throw new DOMException(DOMException.NOT_SUPPORTED_ERR, "adoptNode not implemented"); } /* #endif */ public void normalizeDocument () { } /* #ifdef JAXP-1.3 */ public DOMConfiguration getDomConfig () { throw new DOMException(DOMException.NOT_SUPPORTED_ERR, "getDomConfig not implemented"); } /* #endif JAXP-1.3 */ }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dubbo.metadata.annotation.processing.util; import org.apache.dubbo.metadata.annotation.processing.AbstractAnnotationProcessingTest; import org.apache.dubbo.metadata.annotation.processing.model.Color; import org.apache.dubbo.metadata.annotation.processing.model.Model; import org.apache.dubbo.metadata.tools.TestServiceImpl; import org.junit.jupiter.api.Test; import javax.lang.model.element.Element; import javax.lang.model.element.TypeElement; import javax.lang.model.element.VariableElement; import javax.lang.model.type.TypeMirror; import java.lang.reflect.Type; import java.math.BigDecimal; import java.math.BigInteger; import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; import static javax.lang.model.element.Modifier.FINAL; import static javax.lang.model.element.Modifier.PRIVATE; import static javax.lang.model.element.Modifier.PUBLIC; import static javax.lang.model.element.Modifier.STATIC; import static org.apache.dubbo.metadata.annotation.processing.util.FieldUtils.findField; import static org.apache.dubbo.metadata.annotation.processing.util.FieldUtils.getAllDeclaredFields; import static org.apache.dubbo.metadata.annotation.processing.util.FieldUtils.getAllNonStaticFields; import static org.apache.dubbo.metadata.annotation.processing.util.FieldUtils.getDeclaredField; import static org.apache.dubbo.metadata.annotation.processing.util.FieldUtils.getDeclaredFields; import static org.apache.dubbo.metadata.annotation.processing.util.FieldUtils.getNonStaticFields; import static org.apache.dubbo.metadata.annotation.processing.util.FieldUtils.isEnumMemberField; import static org.apache.dubbo.metadata.annotation.processing.util.FieldUtils.isField; import static org.apache.dubbo.metadata.annotation.processing.util.FieldUtils.isNonStaticField; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; /** * {@link FieldUtils} Test * * @since 2.7.6 */ public class FieldUtilsTest extends AbstractAnnotationProcessingTest { private TypeElement testType; @Override protected void addCompiledClasses(Set<Class<?>> classesToBeCompiled) { } @Override protected void beforeEach() { testType = getType(TestServiceImpl.class); } @Test public void testGetDeclaredFields() { TypeElement type = getType(Model.class); List<VariableElement> fields = getDeclaredFields(type); assertModelFields(fields); fields = getDeclaredFields(type.asType()); assertModelFields(fields); assertTrue(getDeclaredFields((Element) null).isEmpty()); assertTrue(getDeclaredFields((TypeMirror) null).isEmpty()); fields = getDeclaredFields(type, f -> "f".equals(f.getSimpleName().toString())); assertEquals(1, fields.size()); assertEquals("f", fields.get(0).getSimpleName().toString()); } @Test public void testGetAllDeclaredFields() { TypeElement type = getType(Model.class); List<VariableElement> fields = getAllDeclaredFields(type); assertModelAllFields(fields); assertTrue(getAllDeclaredFields((Element) null).isEmpty()); assertTrue(getAllDeclaredFields((TypeMirror) null).isEmpty()); fields = getAllDeclaredFields(type, f -> "f".equals(f.getSimpleName().toString())); assertEquals(1, fields.size()); assertEquals("f", fields.get(0).getSimpleName().toString()); } @Test public void testGetDeclaredField() { TypeElement type = getType(Model.class); testGetDeclaredField(type, "f", float.class); testGetDeclaredField(type, "d", double.class); testGetDeclaredField(type, "tu", TimeUnit.class); testGetDeclaredField(type, "str", String.class); testGetDeclaredField(type, "bi", BigInteger.class); testGetDeclaredField(type, "bd", BigDecimal.class); assertNull(getDeclaredField(type, "b")); assertNull(getDeclaredField(type, "s")); assertNull(getDeclaredField(type, "i")); assertNull(getDeclaredField(type, "l")); assertNull(getDeclaredField(type, "z")); assertNull(getDeclaredField((Element) null, "z")); assertNull(getDeclaredField((TypeMirror) null, "z")); } @Test public void testFindField() { TypeElement type = getType(Model.class); testFindField(type, "f", float.class); testFindField(type, "d", double.class); testFindField(type, "tu", TimeUnit.class); testFindField(type, "str", String.class); testFindField(type, "bi", BigInteger.class); testFindField(type, "bd", BigDecimal.class); testFindField(type, "b", byte.class); testFindField(type, "s", short.class); testFindField(type, "i", int.class); testFindField(type, "l", long.class); testFindField(type, "z", boolean.class); assertNull(findField((Element) null, "f")); assertNull(findField((Element) null, null)); assertNull(findField((TypeMirror) null, "f")); assertNull(findField((TypeMirror) null, null)); assertNull(findField(type, null)); assertNull(findField(type.asType(), null)); } @Test public void testIsEnumField() { TypeElement type = getType(Color.class); VariableElement field = findField(type, "RED"); assertTrue(isEnumMemberField(field)); field = findField(type, "YELLOW"); assertTrue(isEnumMemberField(field)); field = findField(type, "BLUE"); assertTrue(isEnumMemberField(field)); type = getType(Model.class); field = findField(type, "f"); assertFalse(isEnumMemberField(field)); assertFalse(isEnumMemberField(null)); } @Test public void testIsNonStaticField() { TypeElement type = getType(Model.class); assertTrue(isNonStaticField(findField(type, "f"))); type = getType(Color.class); assertFalse(isNonStaticField(findField(type, "BLUE"))); } @Test public void testIsField() { TypeElement type = getType(Model.class); assertTrue(isField(findField(type, "f"))); assertTrue(isField(findField(type, "f"), PRIVATE)); type = getType(Color.class); assertTrue(isField(findField(type, "BLUE"), PUBLIC, STATIC, FINAL)); assertFalse(isField(null)); assertFalse(isField(null, PUBLIC, STATIC, FINAL)); } @Test public void testGetNonStaticFields() { TypeElement type = getType(Model.class); List<VariableElement> fields = getNonStaticFields(type); assertModelFields(fields); fields = getNonStaticFields(type.asType()); assertModelFields(fields); assertTrue(getAllNonStaticFields((Element) null).isEmpty()); assertTrue(getAllNonStaticFields((TypeMirror) null).isEmpty()); } @Test public void testGetAllNonStaticFields() { TypeElement type = getType(Model.class); List<VariableElement> fields = getAllNonStaticFields(type); assertModelAllFields(fields); fields = getAllNonStaticFields(type.asType()); assertModelAllFields(fields); assertTrue(getAllNonStaticFields((Element) null).isEmpty()); assertTrue(getAllNonStaticFields((TypeMirror) null).isEmpty()); } private void assertModelFields(List<VariableElement> fields) { assertEquals(6, fields.size()); assertEquals("d", fields.get(1).getSimpleName().toString()); assertEquals("tu", fields.get(2).getSimpleName().toString()); assertEquals("str", fields.get(3).getSimpleName().toString()); assertEquals("bi", fields.get(4).getSimpleName().toString()); assertEquals("bd", fields.get(5).getSimpleName().toString()); } private void assertModelAllFields(List<VariableElement> fields) { assertEquals(11, fields.size()); assertEquals("f", fields.get(0).getSimpleName().toString()); assertEquals("d", fields.get(1).getSimpleName().toString()); assertEquals("tu", fields.get(2).getSimpleName().toString()); assertEquals("str", fields.get(3).getSimpleName().toString()); assertEquals("bi", fields.get(4).getSimpleName().toString()); assertEquals("bd", fields.get(5).getSimpleName().toString()); assertEquals("b", fields.get(6).getSimpleName().toString()); assertEquals("s", fields.get(7).getSimpleName().toString()); assertEquals("i", fields.get(8).getSimpleName().toString()); assertEquals("l", fields.get(9).getSimpleName().toString()); assertEquals("z", fields.get(10).getSimpleName().toString()); } private void testGetDeclaredField(TypeElement type, String fieldName, Type fieldType) { VariableElement field = getDeclaredField(type, fieldName); assertField(field, fieldName, fieldType); } private void testFindField(TypeElement type, String fieldName, Type fieldType) { VariableElement field = findField(type, fieldName); assertField(field, fieldName, fieldType); } private void assertField(VariableElement field, String fieldName, Type fieldType) { assertEquals(fieldName, field.getSimpleName().toString()); assertEquals(fieldType.getTypeName(), field.asType().toString()); } }
package io.dropwizard.servlets.tasks; import com.codahale.metrics.Meter; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; import com.codahale.metrics.annotation.ExceptionMetered; import com.codahale.metrics.annotation.Metered; import com.codahale.metrics.annotation.Timed; import io.dropwizard.util.CharStreams; import io.dropwizard.util.Strings; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.InputStreamReader; import java.io.PrintWriter; import java.lang.reflect.Method; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collection; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import static com.codahale.metrics.MetricRegistry.name; import static java.util.Objects.requireNonNull; /** * A servlet which provides access to administrative {@link Task}s. It only responds to {@code POST} * requests, since most {@link Task}s aren't side-effect free, and passes along the query string * parameters of the request to the task as a multimap. * * @see Task */ public class TaskServlet extends HttpServlet { private static final long serialVersionUID = 7404713218661358124L; private static final Logger LOGGER = LoggerFactory.getLogger(TaskServlet.class); private static final String DEFAULT_CONTENT_TYPE = "text/plain;charset=UTF-8"; private final ConcurrentMap<String, Task> tasks; private final ConcurrentMap<Task, TaskExecutor> taskExecutors; private final MetricRegistry metricRegistry; /** * Creates a new TaskServlet. */ public TaskServlet(MetricRegistry metricRegistry) { this.metricRegistry = metricRegistry; this.tasks = new ConcurrentHashMap<>(); this.taskExecutors = new ConcurrentHashMap<>(); } public void add(Task task) { tasks.put('/' + task.getName(), task); TaskExecutor taskExecutor = new TaskExecutor(task); try { final Method executeMethod = task.getClass().getMethod("execute", Map.class, PrintWriter.class); if (executeMethod.isAnnotationPresent(Timed.class)) { final Timed annotation = executeMethod.getAnnotation(Timed.class); final String name = chooseName(annotation.name(), annotation.absolute(), task); taskExecutor = new TimedTask(taskExecutor, metricRegistry.timer(name)); } if (executeMethod.isAnnotationPresent(Metered.class)) { final Metered annotation = executeMethod.getAnnotation(Metered.class); final String name = chooseName(annotation.name(), annotation.absolute(), task); taskExecutor = new MeteredTask(taskExecutor, metricRegistry.meter(name)); } if (executeMethod.isAnnotationPresent(ExceptionMetered.class)) { final ExceptionMetered annotation = executeMethod.getAnnotation(ExceptionMetered.class); final String name = chooseName(annotation.name(), annotation.absolute(), task, ExceptionMetered.DEFAULT_NAME_SUFFIX); taskExecutor = new ExceptionMeteredTask(taskExecutor, metricRegistry.meter(name), annotation.cause()); } } catch (NoSuchMethodException ignored) { } taskExecutors.put(task, taskExecutor); } @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { if (Strings.isNullOrEmpty(req.getPathInfo())) { try (final PrintWriter output = resp.getWriter()) { resp.setContentType(DEFAULT_CONTENT_TYPE); getTasks().stream() .map(Task::getName) .sorted() .forEach(output::println); } } else if (tasks.containsKey(req.getPathInfo())) { resp.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED); } else { resp.sendError(HttpServletResponse.SC_NOT_FOUND); } } @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { final String pathInfo = req.getPathInfo(); final Task task = pathInfo != null ? tasks.get(pathInfo) : null; if (task != null) { resp.setContentType(task.getResponseContentType().orElse(DEFAULT_CONTENT_TYPE)); final PrintWriter output = resp.getWriter(); try { final TaskExecutor taskExecutor = taskExecutors.get(task); requireNonNull(taskExecutor, "taskExecutor").executeTask(getParams(req), getBody(req), output); } catch (Exception e) { LOGGER.error("Error running {}", task.getName(), e); resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); output.println(); output.println(e.getMessage()); e.printStackTrace(output); } finally { output.close(); } } else { resp.sendError(HttpServletResponse.SC_NOT_FOUND); } } private static Map<String, List<String>> getParams(HttpServletRequest req) { final Map<String, List<String>> results = new HashMap<>(); final Enumeration<String> names = req.getParameterNames(); while (names.hasMoreElements()) { final String name = names.nextElement(); final List<String> values = Arrays.asList(req.getParameterValues(name)); results.put(name, values); } return results; } private String getBody(HttpServletRequest req) throws IOException { return CharStreams.toString(new InputStreamReader(req.getInputStream(), StandardCharsets.UTF_8)); } public Collection<Task> getTasks() { return tasks.values(); } private String chooseName(String explicitName, boolean absolute, Task task, String... suffixes) { if (explicitName != null && !explicitName.isEmpty()) { if (absolute) { return explicitName; } return name(task.getClass(), explicitName); } return name(task.getClass(), suffixes); } private static class TaskExecutor { private final Task task; private TaskExecutor(Task task) { this.task = task; } public void executeTask(Map<String, List<String>> params, String body, PrintWriter output) throws Exception { if (task instanceof PostBodyTask) { PostBodyTask postBodyTask = (PostBodyTask) task; postBodyTask.execute(params, body, output); } else { task.execute(params, output); } } } private static class TimedTask extends TaskExecutor { private TaskExecutor underlying; private final Timer timer; private TimedTask(TaskExecutor underlying, Timer timer) { super(underlying.task); this.underlying = underlying; this.timer = timer; } @Override public void executeTask(Map<String, List<String>> params, String body, PrintWriter output) throws Exception { final Timer.Context context = timer.time(); try { underlying.executeTask(params, body, output); } finally { context.stop(); } } } private static class MeteredTask extends TaskExecutor { private TaskExecutor underlying; private final Meter meter; private MeteredTask(TaskExecutor underlying, Meter meter) { super(underlying.task); this.meter = meter; this.underlying = underlying; } @Override public void executeTask(Map<String, List<String>> params, String body, PrintWriter output) throws Exception { meter.mark(); underlying.executeTask(params, body, output); } } private static class ExceptionMeteredTask extends TaskExecutor { private TaskExecutor underlying; private final Meter exceptionMeter; private final Class<?> exceptionClass; private ExceptionMeteredTask(TaskExecutor underlying, Meter exceptionMeter, Class<? extends Throwable> exceptionClass) { super(underlying.task); this.underlying = underlying; this.exceptionMeter = exceptionMeter; this.exceptionClass = exceptionClass; } private boolean isReallyAssignableFrom(Exception e) { return exceptionClass.isAssignableFrom(e.getClass()) || (e.getCause() != null && exceptionClass.isAssignableFrom(e.getCause().getClass())); } @Override public void executeTask(Map<String, List<String>> params, String body, PrintWriter output) throws Exception { try { underlying.executeTask(params, body, output); } catch (Exception e) { if (exceptionMeter != null && isReallyAssignableFrom(e)) { exceptionMeter.mark(); } else { throw e; } } } } }
package com.wesabe.api.accounts.resources.tests; import static org.fest.assertions.Assertions.*; import static org.mockito.Mockito.*; import static org.junit.Assert.*; import java.util.Currency; import java.util.Locale; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Response.Status; import org.junit.Before; import org.junit.Test; import org.junit.experimental.runners.Enclosed; import org.junit.runner.RunWith; import org.mockito.Mockito; import com.wesabe.api.accounts.dao.AccountDAO; import com.wesabe.api.accounts.entities.Account; import com.wesabe.api.accounts.entities.AccountStatus; import com.wesabe.api.accounts.entities.InvestmentAccount; import com.wesabe.api.accounts.params.BooleanParam; import com.wesabe.api.accounts.params.CurrencyParam; import com.wesabe.api.accounts.params.IntegerParam; import com.wesabe.api.accounts.presenters.AccountPresenter; import com.wesabe.api.accounts.presenters.InvestmentAccountPresenter; import com.wesabe.api.accounts.resources.AccountResource; import com.wesabe.api.util.auth.WesabeUser; @RunWith(Enclosed.class) public class AccountResourceTest { private static class Setup { protected AccountResource accountResource; protected WesabeUser user; protected AccountDAO accountDAO; protected AccountPresenter accountPresenter; protected InvestmentAccountPresenter investmentAccountPresenter; protected Account account; protected InvestmentAccount investmentAccount; @Before public void setup() { accountDAO = mock(AccountDAO.class); accountPresenter = mock(AccountPresenter.class); investmentAccountPresenter = mock(InvestmentAccountPresenter.class); accountResource = new AccountResource(accountDAO, accountPresenter, investmentAccountPresenter); user = mock(WesabeUser.class); account = mock(Account.class); investmentAccount = mock(InvestmentAccount.class); expectAccount(account); } protected void expectAccount(Account account) { when(accountDAO.findAccount(Mockito.anyString(), eq(1))).thenReturn(account); } } public static class Requesting_A_Non_Existent_Account_Id extends Setup { @Before public void setup() { super.setup(); expectAccount(null); } @Test public void itThrowsA404() { try { accountResource.show(user, Locale.ENGLISH, new IntegerParam("1")); fail("Expected 404 to be thrown, but nothing was thrown"); } catch (WebApplicationException e) { assertThat(e.getResponse().getStatus()).isEqualTo(404); } } } public static class Requesting_A_Non_Investment_Account extends Setup { @Before public void setup() { super.setup(); } @Test public void itPresentsTheAccount() { accountResource.show(user, Locale.ENGLISH, new IntegerParam("1")); verify(accountPresenter).present(account, Locale.ENGLISH); } } public static class Requesting_An_Investment_Account extends Setup { @Before public void setup() { super.setup(); expectAccount(investmentAccount); } @Test public void itPresentsTheAccount() { accountResource.show(user, Locale.ENGLISH, new IntegerParam("1")); verify(investmentAccountPresenter).present(investmentAccount, Locale.ENGLISH); } } public static class Updating_An_Account_Name extends Setup { @Before public void setup() { super.setup(); } @Test public void itUpdatesTheAccount() { accountResource.update(user, Locale.ENGLISH, new IntegerParam("1"), "New Name", null, null); verify(account).setName("New Name"); verify(accountDAO).update(account); } } public static class Updating_An_Account_Currency extends Setup { @Before public void setup() { super.setup(); } @Test public void itUpdatesTheCurrency() { accountResource.update(user, Locale.ENGLISH, new IntegerParam("1"), null, new CurrencyParam("USD"), null); verify(account).setCurrency(Currency.getInstance("USD")); verify(accountDAO).update(account); } } public static class Archiving_An_Active_Account extends Setup { @Before public void setup() { super.setup(); when(account.isArchived()).thenReturn(false); when(account.isActive()).thenReturn(true); } @Test public void itArchivesTheAccount() { accountResource.update(user, Locale.ENGLISH, new IntegerParam("1"), null, null, new BooleanParam("true")); verify(account).setStatus(AccountStatus.ARCHIVED); verify(accountDAO).update(account); } } public static class Archiving_An_Archived_Account extends Setup { @Before public void setup() { super.setup(); when(account.isArchived()).thenReturn(true); when(account.isActive()).thenReturn(false); } @Test public void itDoesNothing() { accountResource.update(user, Locale.ENGLISH, new IntegerParam("1"), null, null, new BooleanParam("true")); verify(account, never()).setStatus(AccountStatus.ARCHIVED); verify(accountDAO, never()).update(account); } } public static class Unarchiving_An_Archived_Account extends Setup { @Before public void setup() { super.setup(); when(account.isArchived()).thenReturn(true); when(account.isActive()).thenReturn(false); } @Test public void itActivatesTheAccount() { accountResource.update(user, Locale.ENGLISH, new IntegerParam("1"), null, null, new BooleanParam("false")); verify(account).setStatus(AccountStatus.ACTIVE); verify(accountDAO).update(account); } } public static class Unarchiving_An_Active_Account extends Setup { @Before public void setup() { super.setup(); when(account.isArchived()).thenReturn(false); when(account.isActive()).thenReturn(true); } @Test public void itDoesNothing() { accountResource.update(user, Locale.ENGLISH, new IntegerParam("1"), null, null, new BooleanParam("false")); verify(account, never()).setStatus(AccountStatus.ARCHIVED); verify(accountDAO, never()).update(account); } } public static class Unarchiving_A_Non_Active_Non_Archived_Account extends Setup { @Before public void setup() { super.setup(); when(account.isArchived()).thenReturn(false); when(account.isActive()).thenReturn(false); } @Test public void itRespondsWithBadRequest() { try { accountResource.update(user, Locale.ENGLISH, new IntegerParam("1"), null, null, new BooleanParam("false")); fail("Expected Bad Request exception, got none"); } catch (WebApplicationException ex) { assertThat(ex.getResponse().getStatus()).isEqualTo(Status.BAD_REQUEST.getStatusCode()); } } } public static class Archiving_A_Non_Active_Non_Archived_Account extends Setup { @Before public void setup() { super.setup(); when(account.isArchived()).thenReturn(false); when(account.isActive()).thenReturn(false); } @Test public void itRespondsWithBadRequest() { try { accountResource.update(user, Locale.ENGLISH, new IntegerParam("1"), null, null, new BooleanParam("true")); fail("Expected Bad Request exception, got none"); } catch (WebApplicationException ex) { assertThat(ex.getResponse().getStatus()).isEqualTo(Status.BAD_REQUEST.getStatusCode()); } } } }
package net.minecraft.entity; import com.google.common.collect.Sets; import java.util.Collection; import java.util.List; import java.util.Set; import net.minecraft.block.Block; import net.minecraft.entity.ai.attributes.IAttributeInstance; import net.minecraft.entity.ai.attributes.ServersideAttributeMap; import net.minecraft.entity.item.EntityArmorStand; import net.minecraft.entity.item.EntityBoat; import net.minecraft.entity.item.EntityEnderCrystal; import net.minecraft.entity.item.EntityEnderEye; import net.minecraft.entity.item.EntityEnderPearl; import net.minecraft.entity.item.EntityExpBottle; import net.minecraft.entity.item.EntityFallingBlock; import net.minecraft.entity.item.EntityFireworkRocket; import net.minecraft.entity.item.EntityItem; import net.minecraft.entity.item.EntityItemFrame; import net.minecraft.entity.item.EntityMinecart; import net.minecraft.entity.item.EntityPainting; import net.minecraft.entity.item.EntityTNTPrimed; import net.minecraft.entity.item.EntityXPOrb; import net.minecraft.entity.passive.IAnimals; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.entity.projectile.EntityArrow; import net.minecraft.entity.projectile.EntityEgg; import net.minecraft.entity.projectile.EntityFireball; import net.minecraft.entity.projectile.EntityFishHook; import net.minecraft.entity.projectile.EntityPotion; import net.minecraft.entity.projectile.EntitySmallFireball; import net.minecraft.entity.projectile.EntitySnowball; import net.minecraft.entity.projectile.EntityWitherSkull; import net.minecraft.init.Items; import net.minecraft.item.ItemMap; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.network.Packet; import net.minecraft.network.play.server.S04PacketEntityEquipment; import net.minecraft.network.play.server.S0APacketUseBed; import net.minecraft.network.play.server.S0CPacketSpawnPlayer; import net.minecraft.network.play.server.S0EPacketSpawnObject; import net.minecraft.network.play.server.S0FPacketSpawnMob; import net.minecraft.network.play.server.S10PacketSpawnPainting; import net.minecraft.network.play.server.S11PacketSpawnExperienceOrb; import net.minecraft.network.play.server.S12PacketEntityVelocity; import net.minecraft.network.play.server.S14PacketEntity; import net.minecraft.network.play.server.S18PacketEntityTeleport; import net.minecraft.network.play.server.S19PacketEntityHeadLook; import net.minecraft.network.play.server.S1BPacketEntityAttach; import net.minecraft.network.play.server.S1CPacketEntityMetadata; import net.minecraft.network.play.server.S1DPacketEntityEffect; import net.minecraft.network.play.server.S20PacketEntityProperties; import net.minecraft.network.play.server.S49PacketUpdateEntityNBT; import net.minecraft.potion.PotionEffect; import net.minecraft.util.BlockPos; import net.minecraft.util.MathHelper; import net.minecraft.world.storage.MapData; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; public class EntityTrackerEntry { private static final Logger logger = LogManager.getLogger(); /** The entity that this EntityTrackerEntry tracks. */ public Entity trackedEntity; public int trackingDistanceThreshold; /** check for sync when ticks % updateFrequency==0 */ public int updateFrequency; /** The encoded entity X position. */ public int encodedPosX; /** The encoded entity Y position. */ public int encodedPosY; /** The encoded entity Z position. */ public int encodedPosZ; /** The encoded entity yaw rotation. */ public int encodedRotationYaw; /** The encoded entity pitch rotation. */ public int encodedRotationPitch; public int lastHeadMotion; public double lastTrackedEntityMotionX; public double lastTrackedEntityMotionY; public double motionZ; public int updateCounter; private double lastTrackedEntityPosX; private double lastTrackedEntityPosY; private double lastTrackedEntityPosZ; private boolean firstUpdateDone; private boolean sendVelocityUpdates; /** * every 400 ticks a full teleport packet is sent, rather than just a "move me +x" command, so that position * remains fully synced. */ private int ticksSinceLastForcedTeleport; private Entity field_85178_v; private boolean ridingEntity; private boolean onGround; public boolean playerEntitiesUpdated; public Set<EntityPlayerMP> trackingPlayers = Sets.<EntityPlayerMP>newHashSet(); public EntityTrackerEntry(Entity trackedEntityIn, int trackingDistanceThresholdIn, int updateFrequencyIn, boolean sendVelocityUpdatesIn) { this.trackedEntity = trackedEntityIn; this.trackingDistanceThreshold = trackingDistanceThresholdIn; this.updateFrequency = updateFrequencyIn; this.sendVelocityUpdates = sendVelocityUpdatesIn; this.encodedPosX = MathHelper.floor_double(trackedEntityIn.posX * 32.0D); this.encodedPosY = MathHelper.floor_double(trackedEntityIn.posY * 32.0D); this.encodedPosZ = MathHelper.floor_double(trackedEntityIn.posZ * 32.0D); this.encodedRotationYaw = MathHelper.floor_float(trackedEntityIn.rotationYaw * 256.0F / 360.0F); this.encodedRotationPitch = MathHelper.floor_float(trackedEntityIn.rotationPitch * 256.0F / 360.0F); this.lastHeadMotion = MathHelper.floor_float(trackedEntityIn.getRotationYawHead() * 256.0F / 360.0F); this.onGround = trackedEntityIn.onGround; } public boolean equals(Object p_equals_1_) { return p_equals_1_ instanceof EntityTrackerEntry ? ((EntityTrackerEntry)p_equals_1_).trackedEntity.getEntityId() == this.trackedEntity.getEntityId() : false; } public int hashCode() { return this.trackedEntity.getEntityId(); } public void updatePlayerList(List<EntityPlayer> p_73122_1_) { this.playerEntitiesUpdated = false; if (!this.firstUpdateDone || this.trackedEntity.getDistanceSq(this.lastTrackedEntityPosX, this.lastTrackedEntityPosY, this.lastTrackedEntityPosZ) > 16.0D) { this.lastTrackedEntityPosX = this.trackedEntity.posX; this.lastTrackedEntityPosY = this.trackedEntity.posY; this.lastTrackedEntityPosZ = this.trackedEntity.posZ; this.firstUpdateDone = true; this.playerEntitiesUpdated = true; this.updatePlayerEntities(p_73122_1_); } if (this.field_85178_v != this.trackedEntity.ridingEntity || this.trackedEntity.ridingEntity != null && this.updateCounter % 60 == 0) { this.field_85178_v = this.trackedEntity.ridingEntity; this.sendPacketToTrackedPlayers(new S1BPacketEntityAttach(0, this.trackedEntity, this.trackedEntity.ridingEntity)); } if (this.trackedEntity instanceof EntityItemFrame && this.updateCounter % 10 == 0) { EntityItemFrame entityitemframe = (EntityItemFrame)this.trackedEntity; ItemStack itemstack = entityitemframe.getDisplayedItem(); if (itemstack != null && itemstack.getItem() instanceof ItemMap) { MapData mapdata = Items.filled_map.getMapData(itemstack, this.trackedEntity.worldObj); for (EntityPlayer entityplayer : p_73122_1_) { EntityPlayerMP entityplayermp = (EntityPlayerMP)entityplayer; mapdata.updateVisiblePlayers(entityplayermp, itemstack); Packet packet = Items.filled_map.createMapDataPacket(itemstack, this.trackedEntity.worldObj, entityplayermp); if (packet != null) { entityplayermp.playerNetServerHandler.sendPacket(packet); } } } this.sendMetadataToAllAssociatedPlayers(); } if (this.updateCounter % this.updateFrequency == 0 || this.trackedEntity.isAirBorne || this.trackedEntity.getDataWatcher().hasObjectChanged()) { if (this.trackedEntity.ridingEntity == null) { ++this.ticksSinceLastForcedTeleport; int k = MathHelper.floor_double(this.trackedEntity.posX * 32.0D); int j1 = MathHelper.floor_double(this.trackedEntity.posY * 32.0D); int k1 = MathHelper.floor_double(this.trackedEntity.posZ * 32.0D); int l1 = MathHelper.floor_float(this.trackedEntity.rotationYaw * 256.0F / 360.0F); int i2 = MathHelper.floor_float(this.trackedEntity.rotationPitch * 256.0F / 360.0F); int j2 = k - this.encodedPosX; int k2 = j1 - this.encodedPosY; int i = k1 - this.encodedPosZ; Packet packet1 = null; boolean flag = Math.abs(j2) >= 4 || Math.abs(k2) >= 4 || Math.abs(i) >= 4 || this.updateCounter % 60 == 0; boolean flag1 = Math.abs(l1 - this.encodedRotationYaw) >= 4 || Math.abs(i2 - this.encodedRotationPitch) >= 4; if (this.updateCounter > 0 || this.trackedEntity instanceof EntityArrow) { if (j2 >= -128 && j2 < 128 && k2 >= -128 && k2 < 128 && i >= -128 && i < 128 && this.ticksSinceLastForcedTeleport <= 400 && !this.ridingEntity && this.onGround == this.trackedEntity.onGround) { if ((!flag || !flag1) && !(this.trackedEntity instanceof EntityArrow)) { if (flag) { packet1 = new S14PacketEntity.S15PacketEntityRelMove(this.trackedEntity, this.trackedEntity.getEntityId(), (byte)j2, (byte)k2, (byte)i, this.trackedEntity.onGround); } else if (flag1) { packet1 = new S14PacketEntity.S16PacketEntityLook(this.trackedEntity.getEntityId(), (byte)l1, (byte)i2, this.trackedEntity.onGround); } } else { packet1 = new S14PacketEntity.S17PacketEntityLookMove(this.trackedEntity.getEntityId(), (byte)j2, (byte)k2, (byte)i, (byte)l1, (byte)i2, this.trackedEntity.onGround); } } else { this.onGround = this.trackedEntity.onGround; this.ticksSinceLastForcedTeleport = 0; packet1 = new S18PacketEntityTeleport(this.trackedEntity.getEntityId(), k, j1, k1, (byte)l1, (byte)i2, this.trackedEntity.onGround); } } if (this.sendVelocityUpdates) { double d0 = this.trackedEntity.motionX - this.lastTrackedEntityMotionX; double d1 = this.trackedEntity.motionY - this.lastTrackedEntityMotionY; double d2 = this.trackedEntity.motionZ - this.motionZ; double d3 = 0.02D; double d4 = d0 * d0 + d1 * d1 + d2 * d2; if (d4 > d3 * d3 || d4 > 0.0D && this.trackedEntity.motionX == 0.0D && this.trackedEntity.motionY == 0.0D && this.trackedEntity.motionZ == 0.0D) { this.lastTrackedEntityMotionX = this.trackedEntity.motionX; this.lastTrackedEntityMotionY = this.trackedEntity.motionY; this.motionZ = this.trackedEntity.motionZ; this.sendPacketToTrackedPlayers(new S12PacketEntityVelocity(this.trackedEntity.getEntityId(), this.lastTrackedEntityMotionX, this.lastTrackedEntityMotionY, this.motionZ)); } } if (packet1 != null) { this.sendPacketToTrackedPlayers(packet1); } this.sendMetadataToAllAssociatedPlayers(); if (flag) { this.encodedPosX = k; this.encodedPosY = j1; this.encodedPosZ = k1; } if (flag1) { this.encodedRotationYaw = l1; this.encodedRotationPitch = i2; } this.ridingEntity = false; } else { int j = MathHelper.floor_float(this.trackedEntity.rotationYaw * 256.0F / 360.0F); int i1 = MathHelper.floor_float(this.trackedEntity.rotationPitch * 256.0F / 360.0F); boolean flag2 = Math.abs(j - this.encodedRotationYaw) >= 4 || Math.abs(i1 - this.encodedRotationPitch) >= 4; if (flag2) { this.sendPacketToTrackedPlayers(new S14PacketEntity.S16PacketEntityLook(this.trackedEntity.getEntityId(), (byte)j, (byte)i1, this.trackedEntity.onGround)); this.encodedRotationYaw = j; this.encodedRotationPitch = i1; } this.encodedPosX = MathHelper.floor_double(this.trackedEntity.posX * 32.0D); this.encodedPosY = MathHelper.floor_double(this.trackedEntity.posY * 32.0D); this.encodedPosZ = MathHelper.floor_double(this.trackedEntity.posZ * 32.0D); this.sendMetadataToAllAssociatedPlayers(); this.ridingEntity = true; } int l = MathHelper.floor_float(this.trackedEntity.getRotationYawHead() * 256.0F / 360.0F); if (Math.abs(l - this.lastHeadMotion) >= 4) { this.sendPacketToTrackedPlayers(new S19PacketEntityHeadLook(this.trackedEntity, (byte)l)); this.lastHeadMotion = l; } this.trackedEntity.isAirBorne = false; } ++this.updateCounter; if (this.trackedEntity.velocityChanged) { this.func_151261_b(new S12PacketEntityVelocity(this.trackedEntity)); this.trackedEntity.velocityChanged = false; } } /** * Sends the entity metadata (DataWatcher) and attributes to all players tracking this entity, including the entity * itself if a player. */ private void sendMetadataToAllAssociatedPlayers() { DataWatcher datawatcher = this.trackedEntity.getDataWatcher(); if (datawatcher.hasObjectChanged()) { this.func_151261_b(new S1CPacketEntityMetadata(this.trackedEntity.getEntityId(), datawatcher, false)); } if (this.trackedEntity instanceof EntityLivingBase) { ServersideAttributeMap serversideattributemap = (ServersideAttributeMap)((EntityLivingBase)this.trackedEntity).getAttributeMap(); Set<IAttributeInstance> set = serversideattributemap.getAttributeInstanceSet(); if (!set.isEmpty()) { this.func_151261_b(new S20PacketEntityProperties(this.trackedEntity.getEntityId(), set)); } set.clear(); } } /** * Send the given packet to all players tracking this entity. */ public void sendPacketToTrackedPlayers(Packet packetIn) { for (EntityPlayerMP entityplayermp : this.trackingPlayers) { entityplayermp.playerNetServerHandler.sendPacket(packetIn); } } public void func_151261_b(Packet packetIn) { this.sendPacketToTrackedPlayers(packetIn); if (this.trackedEntity instanceof EntityPlayerMP) { ((EntityPlayerMP)this.trackedEntity).playerNetServerHandler.sendPacket(packetIn); } } public void sendDestroyEntityPacketToTrackedPlayers() { for (EntityPlayerMP entityplayermp : this.trackingPlayers) { entityplayermp.removeEntity(this.trackedEntity); } } public void removeFromTrackedPlayers(EntityPlayerMP playerMP) { if (this.trackingPlayers.contains(playerMP)) { playerMP.removeEntity(this.trackedEntity); this.trackingPlayers.remove(playerMP); } } public void updatePlayerEntity(EntityPlayerMP playerMP) { if (playerMP != this.trackedEntity) { if (this.func_180233_c(playerMP)) { if (!this.trackingPlayers.contains(playerMP) && (this.isPlayerWatchingThisChunk(playerMP) || this.trackedEntity.forceSpawn)) { this.trackingPlayers.add(playerMP); Packet packet = this.func_151260_c(); playerMP.playerNetServerHandler.sendPacket(packet); if (!this.trackedEntity.getDataWatcher().getIsBlank()) { playerMP.playerNetServerHandler.sendPacket(new S1CPacketEntityMetadata(this.trackedEntity.getEntityId(), this.trackedEntity.getDataWatcher(), true)); } NBTTagCompound nbttagcompound = this.trackedEntity.getNBTTagCompound(); if (nbttagcompound != null) { playerMP.playerNetServerHandler.sendPacket(new S49PacketUpdateEntityNBT(this.trackedEntity.getEntityId(), nbttagcompound)); } if (this.trackedEntity instanceof EntityLivingBase) { ServersideAttributeMap serversideattributemap = (ServersideAttributeMap)((EntityLivingBase)this.trackedEntity).getAttributeMap(); Collection<IAttributeInstance> collection = serversideattributemap.getWatchedAttributes(); if (!collection.isEmpty()) { playerMP.playerNetServerHandler.sendPacket(new S20PacketEntityProperties(this.trackedEntity.getEntityId(), collection)); } } this.lastTrackedEntityMotionX = this.trackedEntity.motionX; this.lastTrackedEntityMotionY = this.trackedEntity.motionY; this.motionZ = this.trackedEntity.motionZ; if (this.sendVelocityUpdates && !(packet instanceof S0FPacketSpawnMob)) { playerMP.playerNetServerHandler.sendPacket(new S12PacketEntityVelocity(this.trackedEntity.getEntityId(), this.trackedEntity.motionX, this.trackedEntity.motionY, this.trackedEntity.motionZ)); } if (this.trackedEntity.ridingEntity != null) { playerMP.playerNetServerHandler.sendPacket(new S1BPacketEntityAttach(0, this.trackedEntity, this.trackedEntity.ridingEntity)); } if (this.trackedEntity instanceof EntityLiving && ((EntityLiving)this.trackedEntity).getLeashedToEntity() != null) { playerMP.playerNetServerHandler.sendPacket(new S1BPacketEntityAttach(1, this.trackedEntity, ((EntityLiving)this.trackedEntity).getLeashedToEntity())); } if (this.trackedEntity instanceof EntityLivingBase) { for (int i = 0; i < 5; ++i) { ItemStack itemstack = ((EntityLivingBase)this.trackedEntity).getEquipmentInSlot(i); if (itemstack != null) { playerMP.playerNetServerHandler.sendPacket(new S04PacketEntityEquipment(this.trackedEntity.getEntityId(), i, itemstack)); } } } if (this.trackedEntity instanceof EntityPlayer) { EntityPlayer entityplayer = (EntityPlayer)this.trackedEntity; if (entityplayer.isPlayerSleeping()) { playerMP.playerNetServerHandler.sendPacket(new S0APacketUseBed(entityplayer, new BlockPos(this.trackedEntity))); } } if (this.trackedEntity instanceof EntityLivingBase) { EntityLivingBase entitylivingbase = (EntityLivingBase)this.trackedEntity; for (PotionEffect potioneffect : entitylivingbase.getActivePotionEffects()) { playerMP.playerNetServerHandler.sendPacket(new S1DPacketEntityEffect(this.trackedEntity.getEntityId(), potioneffect)); } } } } else if (this.trackingPlayers.contains(playerMP)) { this.trackingPlayers.remove(playerMP); playerMP.removeEntity(this.trackedEntity); } } } public boolean func_180233_c(EntityPlayerMP playerMP) { double d0 = playerMP.posX - (double)(this.encodedPosX / 32); double d1 = playerMP.posZ - (double)(this.encodedPosZ / 32); return d0 >= (double)(-this.trackingDistanceThreshold) && d0 <= (double)this.trackingDistanceThreshold && d1 >= (double)(-this.trackingDistanceThreshold) && d1 <= (double)this.trackingDistanceThreshold && this.trackedEntity.isSpectatedByPlayer(playerMP); } private boolean isPlayerWatchingThisChunk(EntityPlayerMP playerMP) { return playerMP.getServerForPlayer().getPlayerManager().isPlayerWatchingChunk(playerMP, this.trackedEntity.chunkCoordX, this.trackedEntity.chunkCoordZ); } public void updatePlayerEntities(List<EntityPlayer> p_73125_1_) { for (int i = 0; i < p_73125_1_.size(); ++i) { this.updatePlayerEntity((EntityPlayerMP)p_73125_1_.get(i)); } } private Packet func_151260_c() { if (this.trackedEntity.isDead) { logger.warn("Fetching addPacket for removed entity"); } if (this.trackedEntity instanceof EntityItem) { return new S0EPacketSpawnObject(this.trackedEntity, 2, 1); } else if (this.trackedEntity instanceof EntityPlayerMP) { return new S0CPacketSpawnPlayer((EntityPlayer)this.trackedEntity); } else if (this.trackedEntity instanceof EntityMinecart) { EntityMinecart entityminecart = (EntityMinecart)this.trackedEntity; return new S0EPacketSpawnObject(this.trackedEntity, 10, entityminecart.getMinecartType().getNetworkID()); } else if (this.trackedEntity instanceof EntityBoat) { return new S0EPacketSpawnObject(this.trackedEntity, 1); } else if (this.trackedEntity instanceof IAnimals) { this.lastHeadMotion = MathHelper.floor_float(this.trackedEntity.getRotationYawHead() * 256.0F / 360.0F); return new S0FPacketSpawnMob((EntityLivingBase)this.trackedEntity); } else if (this.trackedEntity instanceof EntityFishHook) { Entity entity1 = ((EntityFishHook)this.trackedEntity).angler; return new S0EPacketSpawnObject(this.trackedEntity, 90, entity1 != null ? entity1.getEntityId() : this.trackedEntity.getEntityId()); } else if (this.trackedEntity instanceof EntityArrow) { Entity entity = ((EntityArrow)this.trackedEntity).shootingEntity; return new S0EPacketSpawnObject(this.trackedEntity, 60, entity != null ? entity.getEntityId() : this.trackedEntity.getEntityId()); } else if (this.trackedEntity instanceof EntitySnowball) { return new S0EPacketSpawnObject(this.trackedEntity, 61); } else if (this.trackedEntity instanceof EntityPotion) { return new S0EPacketSpawnObject(this.trackedEntity, 73, ((EntityPotion)this.trackedEntity).getPotionDamage()); } else if (this.trackedEntity instanceof EntityExpBottle) { return new S0EPacketSpawnObject(this.trackedEntity, 75); } else if (this.trackedEntity instanceof EntityEnderPearl) { return new S0EPacketSpawnObject(this.trackedEntity, 65); } else if (this.trackedEntity instanceof EntityEnderEye) { return new S0EPacketSpawnObject(this.trackedEntity, 72); } else if (this.trackedEntity instanceof EntityFireworkRocket) { return new S0EPacketSpawnObject(this.trackedEntity, 76); } else if (this.trackedEntity instanceof EntityFireball) { EntityFireball entityfireball = (EntityFireball)this.trackedEntity; S0EPacketSpawnObject s0epacketspawnobject2 = null; int i = 63; if (this.trackedEntity instanceof EntitySmallFireball) { i = 64; } else if (this.trackedEntity instanceof EntityWitherSkull) { i = 66; } if (entityfireball.shootingEntity != null) { s0epacketspawnobject2 = new S0EPacketSpawnObject(this.trackedEntity, i, ((EntityFireball)this.trackedEntity).shootingEntity.getEntityId()); } else { s0epacketspawnobject2 = new S0EPacketSpawnObject(this.trackedEntity, i, 0); } s0epacketspawnobject2.setSpeedX((int)(entityfireball.accelerationX * 8000.0D)); s0epacketspawnobject2.setSpeedY((int)(entityfireball.accelerationY * 8000.0D)); s0epacketspawnobject2.setSpeedZ((int)(entityfireball.accelerationZ * 8000.0D)); return s0epacketspawnobject2; } else if (this.trackedEntity instanceof EntityEgg) { return new S0EPacketSpawnObject(this.trackedEntity, 62); } else if (this.trackedEntity instanceof EntityTNTPrimed) { return new S0EPacketSpawnObject(this.trackedEntity, 50); } else if (this.trackedEntity instanceof EntityEnderCrystal) { return new S0EPacketSpawnObject(this.trackedEntity, 51); } else if (this.trackedEntity instanceof EntityFallingBlock) { EntityFallingBlock entityfallingblock = (EntityFallingBlock)this.trackedEntity; return new S0EPacketSpawnObject(this.trackedEntity, 70, Block.getStateId(entityfallingblock.getBlock())); } else if (this.trackedEntity instanceof EntityArmorStand) { return new S0EPacketSpawnObject(this.trackedEntity, 78); } else if (this.trackedEntity instanceof EntityPainting) { return new S10PacketSpawnPainting((EntityPainting)this.trackedEntity); } else if (this.trackedEntity instanceof EntityItemFrame) { EntityItemFrame entityitemframe = (EntityItemFrame)this.trackedEntity; S0EPacketSpawnObject s0epacketspawnobject1 = new S0EPacketSpawnObject(this.trackedEntity, 71, entityitemframe.facingDirection.getHorizontalIndex()); BlockPos blockpos1 = entityitemframe.getHangingPosition(); s0epacketspawnobject1.setX(MathHelper.floor_float((float)(blockpos1.getX() * 32))); s0epacketspawnobject1.setY(MathHelper.floor_float((float)(blockpos1.getY() * 32))); s0epacketspawnobject1.setZ(MathHelper.floor_float((float)(blockpos1.getZ() * 32))); return s0epacketspawnobject1; } else if (this.trackedEntity instanceof EntityLeashKnot) { EntityLeashKnot entityleashknot = (EntityLeashKnot)this.trackedEntity; S0EPacketSpawnObject s0epacketspawnobject = new S0EPacketSpawnObject(this.trackedEntity, 77); BlockPos blockpos = entityleashknot.getHangingPosition(); s0epacketspawnobject.setX(MathHelper.floor_float((float)(blockpos.getX() * 32))); s0epacketspawnobject.setY(MathHelper.floor_float((float)(blockpos.getY() * 32))); s0epacketspawnobject.setZ(MathHelper.floor_float((float)(blockpos.getZ() * 32))); return s0epacketspawnobject; } else if (this.trackedEntity instanceof EntityXPOrb) { return new S11PacketSpawnExperienceOrb((EntityXPOrb)this.trackedEntity); } else { throw new IllegalArgumentException("Don\'t know how to add " + this.trackedEntity.getClass() + "!"); } } /** * Remove a tracked player from our list and tell the tracked player to destroy us from their world. */ public void removeTrackedPlayerSymmetric(EntityPlayerMP playerMP) { if (this.trackingPlayers.contains(playerMP)) { this.trackingPlayers.remove(playerMP); playerMP.removeEntity(this.trackedEntity); } } }
/** * Copyright (C) 2014 The SciGraph authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.scigraph.internal; import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Sets.newHashSet; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import org.hamcrest.collection.IsIterableContainingInAnyOrder; import org.hamcrest.collection.IsIterableWithSize; import org.junit.Before; import org.junit.Test; import org.neo4j.graphdb.Label; import org.neo4j.graphdb.Node; import org.neo4j.graphdb.PropertyContainer; import org.neo4j.graphdb.Relationship; import org.neo4j.graphdb.RelationshipType; import org.prefixcommons.CurieUtil; import com.tinkerpop.blueprints.Direction; import com.tinkerpop.blueprints.Edge; import com.tinkerpop.blueprints.Graph; import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.impls.tg.TinkerGraph; import io.scigraph.frames.CommonProperties; public class TinkerGraphUtilTest { TinkerGraph graph; Node node, otherNode; Relationship relationship; CurieUtil curieUtil; Node mockNode(long id) { Node node = mock(Node.class); when(node.getId()).thenReturn(id); when(node.getPropertyKeys()).thenReturn(Collections.<String>emptySet()); when(node.getLabels()).thenReturn(Collections.<Label>emptySet()); return node; } Relationship mockRealtionship(Node start, Node end) { Relationship r = mock(Relationship.class); when(r.getPropertyKeys()).thenReturn(Collections.<String>emptySet()); when(r.getType()).thenReturn(RelationshipType.withName("FOO")); when(r.getStartNode()).thenReturn(start); when(r.getEndNode()).thenReturn(end); return r; } @Before public void setup() { node = mockNode(0L); otherNode = mockNode(1L); graph = new TinkerGraph(); relationship = mockRealtionship(node, otherNode); Map<String,String> iri2curie = new HashMap<>(); iri2curie.put("B", "http://x.org/B_"); curieUtil = new CurieUtil(iri2curie); } @Test public void idsAreTranslated() { TinkerGraphUtil tgu = new TinkerGraphUtil(graph, curieUtil); Vertex v = tgu.addNode(node); assertThat(v.getId(), is((Object)"0")); } @Test public void addNodeIsIdempotent() { TinkerGraphUtil tgu = new TinkerGraphUtil(graph, curieUtil); Vertex v1 = tgu.addNode(node); Vertex v2 = tgu.addNode(node); assertThat(v1, is(v2)); } @Test public void pathsAreTranslated() { Iterable<PropertyContainer> path = newArrayList(node, relationship, otherNode); TinkerGraphUtil tgu = new TinkerGraphUtil(graph, curieUtil); tgu.addPath(path); assertThat(graph.getVertices(), is(IsIterableWithSize.<Vertex>iterableWithSize(2))); assertThat(graph.getEdges(), is(IsIterableWithSize.<Edge>iterableWithSize(1))); } @Test public void propertiesAreTranslated() { when(node.getPropertyKeys()).thenReturn(newHashSet("foo", "baz")); when(node.getProperty("foo")).thenReturn("bar"); when(node.getProperty("baz")).thenReturn(true); TinkerGraphUtil tgu = new TinkerGraphUtil(curieUtil); Vertex v = tgu.addNode(node); assertThat(v.getProperty("foo"), is((Object)"bar")); assertThat(v.getProperty("baz"), is((Object)true)); } @Test public void properties_areCopied() { Vertex v1 = graph.addVertex(1L); v1.setProperty("foo", "bar"); Vertex v2 = graph.addVertex(2L); TinkerGraphUtil.copyProperties(v1, v2); assertThat((String)v2.getProperty("foo"), is("bar")); } @SuppressWarnings("unchecked") @Test public void arrayProperties_areCopied() { Vertex v1 = graph.addVertex(1L); v1.setProperty("foo", new String[] {"bar", "baz"}); Vertex v2 = graph.addVertex(2L); TinkerGraphUtil.copyProperties(v1, v2); assertThat((List<String>)v2.getProperty("foo"), contains("bar", "baz")); } @Test public void arrayProperties_areMappedToLists() { when(node.getPropertyKeys()).thenReturn(newHashSet("foo", "bar")); when(node.getProperty("foo")).thenReturn(new String[]{"elt1", "elt2"}); when(node.getProperty("bar")).thenReturn(new int[]{1,2}); TinkerGraphUtil tgu = new TinkerGraphUtil(curieUtil); Vertex v = tgu.addNode(node); assertThat(v.getProperty("foo"), is((Object)newArrayList("elt1", "elt2"))); assertThat(v.getProperty("bar"), is((Object)newArrayList(1, 2))); } @SuppressWarnings("unchecked") @Test public void labelsAreTranslated() { Label label = Label.label("label"); when(node.getLabels()).thenReturn(newHashSet(label)); TinkerGraphUtil tgu = new TinkerGraphUtil(curieUtil); Vertex v = tgu.addNode(node); assertThat((Iterable<String>)v.getProperty("types"), IsIterableContainingInAnyOrder.containsInAnyOrder("label")); } @Test public void relationshipsAreTranslated() { TinkerGraphUtil tgu = new TinkerGraphUtil(curieUtil); Vertex u = tgu.addNode(node); Vertex v = tgu.addNode(otherNode); Relationship relationship = mock(Relationship.class); when(relationship.getEndNode()).thenReturn(node); when(relationship.getStartNode()).thenReturn(otherNode); when(relationship.getType()).thenReturn(RelationshipType.withName("foo")); when(relationship.getPropertyKeys()).thenReturn(newHashSet("bar")); when(relationship.getProperty("bar")).thenReturn("baz"); Edge edge = tgu.addEdge(relationship); assertThat(edge.getVertex(Direction.IN), is(u)); assertThat(edge.getVertex(Direction.OUT), is(v)); assertThat(edge.getLabel(), is("foo")); assertThat((String)edge.getProperty("bar"), is("baz")); Edge edge2 = tgu.addEdge(relationship); assertThat(edge, is(edge2)); } @Test public void graphsAreMerged() { TinkerGraph graph1 = new TinkerGraph(); Vertex g1v1 = graph1.addVertex(0); Vertex g1v2 = graph1.addVertex(1); Edge g1e1 = graph1.addEdge(0, g1v1, g1v2, "test"); TinkerGraph graph2 = new TinkerGraph(); Vertex g2v1 = graph2.addVertex(1); Vertex g2v2 = graph2.addVertex(2); Edge g2e1 = graph1.addEdge(1, g2v1, g2v2, "test2"); TinkerGraphUtil tgu = new TinkerGraphUtil(graph1, curieUtil); Graph graph = tgu.combineGraphs(graph2); assertThat(graph.getVertices(), containsInAnyOrder(g1v1, g1v2, g2v2)); assertThat(graph.getEdges(), containsInAnyOrder(g1e1, g2e1)); } @Test public void primitivePropertiesAreReturned() { TinkerGraph graph = new TinkerGraph(); Vertex v = graph.addVertex(1); assertThat(TinkerGraphUtil.getProperty(v, "foo", String.class), is(Optional.<String>empty())); v.setProperty("foo", "bar"); assertThat(TinkerGraphUtil.getProperty(v, "foo", String.class), is(Optional.of("bar"))); } @Test public void collectionsAreReturned() { TinkerGraph graph = new TinkerGraph(); Vertex v = graph.addVertex(1); assertThat(TinkerGraphUtil.getProperties(v, "foo", String.class), is(empty())); v.setProperty("foo", "bar"); assertThat(TinkerGraphUtil.getProperties(v, "foo", String.class), contains("bar")); v.setProperty("foo", newHashSet("bar", "baz")); assertThat(TinkerGraphUtil.getProperties(v, "foo", String.class), containsInAnyOrder("bar", "baz")); v.setProperty("foo", new String[] {"bar", "baz"}); assertThat(TinkerGraphUtil.getProperties(v, "foo", String.class), containsInAnyOrder("bar", "baz")); } @Test public void propertiesProject() { TinkerGraph graph = new TinkerGraph(); Vertex v = graph.addVertex(1); v.setProperty(CommonProperties.IRI, "http://x.org/a"); v.setProperty("foo", "fizz"); v.setProperty("bar", "baz"); TinkerGraphUtil tgu = new TinkerGraphUtil(graph, curieUtil); tgu.project(newHashSet("foo")); assertThat(v.getPropertyKeys(), containsInAnyOrder("foo", CommonProperties.IRI)); } @Test public void allPropertiesProject() { TinkerGraph graph = new TinkerGraph(); Vertex v = graph.addVertex(1); v.setProperty(CommonProperties.IRI, "http://x.org/a"); v.setProperty("foo", "fizz"); v.setProperty("bar", "baz"); TinkerGraphUtil tgu = new TinkerGraphUtil(graph, curieUtil); tgu.project(newHashSet("*")); assertThat(v.getPropertyKeys(), containsInAnyOrder("foo", "bar", CommonProperties.IRI)); } }
package org.andengine.util; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import android.content.Context; import android.os.Environment; /** * (c) 2010 Nicolas Gramlich * (c) 2011 Zynga Inc. * * @author Nicolas Gramlich * @since 13:53:33 - 20.06.2010 */ public final class FileUtils { // =========================================================== // Constants // =========================================================== // =========================================================== // Fields // =========================================================== // =========================================================== // Constructors // =========================================================== private FileUtils() { } // =========================================================== // Getter & Setter // =========================================================== // =========================================================== // Methods for/from SuperClass/Interfaces // =========================================================== // =========================================================== // Methods // =========================================================== public static String[] readLines(final File pFile) throws IOException { return StreamUtils.readLines(new FileInputStream(pFile)); } public static void copyToExternalStorage(final Context pContext, final int pSourceResourceID, final String pFilename) throws FileNotFoundException { FileUtils.copyToExternalStorage(pContext, pContext.getResources().openRawResource(pSourceResourceID), pFilename); } public static void copyToInternalStorage(final Context pContext, final int pSourceResourceID, final String pFilename) throws FileNotFoundException { FileUtils.copyToInternalStorage(pContext, pContext.getResources().openRawResource(pSourceResourceID), pFilename); } public static void copyToExternalStorage(final Context pContext, final String pSourceAssetPath, final String pFilename) throws IOException { FileUtils.copyToExternalStorage(pContext, pContext.getAssets().open(pSourceAssetPath), pFilename); } public static void copyToInternalStorage(final Context pContext, final String pSourceAssetPath, final String pFilename) throws IOException { FileUtils.copyToInternalStorage(pContext, pContext.getAssets().open(pSourceAssetPath), pFilename); } private static void copyToInternalStorage(final Context pContext, final InputStream pInputStream, final String pFilename) throws FileNotFoundException { StreamUtils.copyAndClose(pInputStream, new FileOutputStream(new File(pContext.getFilesDir(), pFilename))); } public static void copyToExternalStorage(final InputStream pInputStream, final String pFilePath) throws FileNotFoundException { if (FileUtils.isExternalStorageWriteable()) { final String absoluteFilePath = FileUtils.getAbsolutePathOnExternalStorage(pFilePath); StreamUtils.copyAndClose(pInputStream, new FileOutputStream(absoluteFilePath)); } else { throw new IllegalStateException("External Storage is not writeable."); } } public static void copyToExternalStorage(final Context pContext, final InputStream pInputStream, final String pFilePath) throws FileNotFoundException { if (FileUtils.isExternalStorageWriteable()) { final String absoluteFilePath = FileUtils.getAbsolutePathOnExternalStorage(pContext, pFilePath); StreamUtils.copyAndClose(pInputStream, new FileOutputStream(absoluteFilePath)); } else { throw new IllegalStateException("External Storage is not writeable."); } } public static boolean isFileExistingOnExternalStorage(final String pFilePath) { if (FileUtils.isExternalStorageReadable()) { final String absoluteFilePath = FileUtils.getAbsolutePathOnExternalStorage(pFilePath); final File file = new File(absoluteFilePath); return file.exists() && file.isFile(); } else { throw new IllegalStateException("External Storage is not readable."); } } public static boolean isFileExistingOnExternalStorage(final Context pContext, final String pFilePath) { if (FileUtils.isExternalStorageReadable()) { final String absoluteFilePath = FileUtils.getAbsolutePathOnExternalStorage(pContext, pFilePath); final File file = new File(absoluteFilePath); return file.exists() && file.isFile(); } else { throw new IllegalStateException("External Storage is not readable."); } } public static boolean isDirectoryExistingOnExternalStorage(final Context pContext, final String pDirectory) { if (FileUtils.isExternalStorageReadable()) { final String absoluteFilePath = FileUtils.getAbsolutePathOnExternalStorage(pContext, pDirectory); final File file = new File(absoluteFilePath); return file.exists() && file.isDirectory(); } else { throw new IllegalStateException("External Storage is not readable."); } } public static boolean ensureDirectoriesExistOnExternalStorage(final Context pContext, final String pDirectory) { if (FileUtils.isDirectoryExistingOnExternalStorage(pContext, pDirectory)) { return true; } if (FileUtils.isExternalStorageWriteable()) { final String absoluteDirectoryPath = FileUtils.getAbsolutePathOnExternalStorage(pContext, pDirectory); return new File(absoluteDirectoryPath).mkdirs(); } else { throw new IllegalStateException("External Storage is not writeable."); } } public static InputStream openOnExternalStorage(final String pFilePath) throws FileNotFoundException { final String absoluteFilePath = FileUtils.getAbsolutePathOnExternalStorage(pFilePath); return new FileInputStream(absoluteFilePath); } public static InputStream openOnExternalStorage(final Context pContext, final String pFilePath) throws FileNotFoundException { final String absoluteFilePath = FileUtils.getAbsolutePathOnExternalStorage(pContext, pFilePath); return new FileInputStream(absoluteFilePath); } public static String[] getDirectoryListOnExternalStorage(final Context pContext, final String pFilePath) throws FileNotFoundException { final String absoluteFilePath = FileUtils.getAbsolutePathOnExternalStorage(pContext, pFilePath); return new File(absoluteFilePath).list(); } public static String[] getDirectoryListOnExternalStorage(final Context pContext, final String pFilePath, final FilenameFilter pFilenameFilter) throws FileNotFoundException { final String absoluteFilePath = FileUtils.getAbsolutePathOnExternalStorage(pContext, pFilePath); return new File(absoluteFilePath).list(pFilenameFilter); } public static String getAbsolutePathOnInternalStorage(final Context pContext, final String pFilePath) { return pContext.getFilesDir().getAbsolutePath() + pFilePath; } public static String getAbsolutePathOnExternalStorage(final String pFilePath) { return Environment.getExternalStorageDirectory() + "/" + pFilePath; } public static String getAbsolutePathOnExternalStorage(final Context pContext, final String pFilePath) { return Environment.getExternalStorageDirectory() + "/Android/data/" + pContext.getApplicationInfo().packageName + "/files/" + pFilePath; } public static boolean isExternalStorageWriteable() { return Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED); } public static boolean isExternalStorageReadable() { final String state = Environment.getExternalStorageState(); return state.equals(Environment.MEDIA_MOUNTED) || state.equals(Environment.MEDIA_MOUNTED_READ_ONLY); } public static void copyFile(final File pSourceFile, final File pDestinationFile) throws IOException { InputStream in = null; OutputStream out = null; try { in = new FileInputStream(pSourceFile); out = new FileOutputStream(pDestinationFile); StreamUtils.copy(in, out); } finally { StreamUtils.close(in); StreamUtils.close(out); } } /** * Recursively deletes all files and sub-directories under <code>pFileOrDirectory</code>. * * @param pFileOrDirectory * @return <code>true</code>, if all deletions were successful. <code>false</code>, if a deletion fails (the recursion is stopped then). */ public static boolean delete(final File pFileOrDirectory) { if (pFileOrDirectory.isDirectory()) { final String[] children = pFileOrDirectory.list(); final int childCount = children.length; for (int i = 0; i < childCount; i++) { final boolean success = FileUtils.delete(new File(pFileOrDirectory, children[i])); if (!success) { return false; } } } /* The directory is now empty so delete it. */ return pFileOrDirectory.delete(); } // =========================================================== // Inner and Anonymous Classes // =========================================================== }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapreduce.v2.app.webapp; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.StringReader; import java.util.Map; import javax.ws.rs.core.MediaType; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskReport; import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.MockAppContext; import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.job.Task; import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.yarn.webapp.GenericExceptionHandler; import org.apache.hadoop.yarn.webapp.WebServicesTestUtils; import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONObject; import org.junit.Before; import org.junit.Test; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.xml.sax.InputSource; import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.servlet.GuiceServletContextListener; import com.google.inject.servlet.ServletModule; import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.ClientResponse.Status; import com.sun.jersey.api.client.UniformInterfaceException; import com.sun.jersey.api.client.WebResource; import com.sun.jersey.guice.spi.container.servlet.GuiceContainer; import com.sun.jersey.test.framework.JerseyTest; import com.sun.jersey.test.framework.WebAppDescriptor; /** * Test the app master web service Rest API for getting tasks, a specific task, * and task counters. * * /ws/v1/mapreduce/jobs/{jobid}/tasks * /ws/v1/mapreduce/jobs/{jobid}/tasks/{taskid} * /ws/v1/mapreduce/jobs/{jobid}/tasks/{taskid}/counters */ public class TestAMWebServicesTasks extends JerseyTest { private static Configuration conf = new Configuration(); private static AppContext appContext; private Injector injector = Guice.createInjector(new ServletModule() { @Override protected void configureServlets() { appContext = new MockAppContext(0, 1, 2, 1); bind(JAXBContextResolver.class); bind(AMWebServices.class); bind(GenericExceptionHandler.class); bind(AppContext.class).toInstance(appContext); bind(Configuration.class).toInstance(conf); serve("/*").with(GuiceContainer.class); } }); public class GuiceServletConfig extends GuiceServletContextListener { @Override protected Injector getInjector() { return injector; } } @Before @Override public void setUp() throws Exception { super.setUp(); } public TestAMWebServicesTasks() { super(new WebAppDescriptor.Builder( "org.apache.hadoop.mapreduce.v2.app.webapp") .contextListenerClass(GuiceServletConfig.class) .filterClass(com.google.inject.servlet.GuiceFilter.class) .contextPath("jersey-guice-filter").servletPath("/").build()); } @Test public void testTasks() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks") .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); JSONObject tasks = json.getJSONObject("tasks"); JSONArray arr = tasks.getJSONArray("task"); assertEquals("incorrect number of elements", 2, arr.length()); verifyAMTask(arr, jobsMap.get(id), null); } } @Test public void testTasksDefault() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks").get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); JSONObject tasks = json.getJSONObject("tasks"); JSONArray arr = tasks.getJSONArray("task"); assertEquals("incorrect number of elements", 2, arr.length()); verifyAMTask(arr, jobsMap.get(id), null); } } @Test public void testTasksSlash() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks/") .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); JSONObject tasks = json.getJSONObject("tasks"); JSONArray arr = tasks.getJSONArray("task"); assertEquals("incorrect number of elements", 2, arr.length()); verifyAMTask(arr, jobsMap.get(id), null); } } @Test public void testTasksXML() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks") .accept(MediaType.APPLICATION_XML).get(ClientResponse.class); assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType()); String xml = response.getEntity(String.class); DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); InputSource is = new InputSource(); is.setCharacterStream(new StringReader(xml)); Document dom = db.parse(is); NodeList tasks = dom.getElementsByTagName("tasks"); assertEquals("incorrect number of elements", 1, tasks.getLength()); NodeList task = dom.getElementsByTagName("task"); verifyAMTaskXML(task, jobsMap.get(id)); } } @Test public void testTasksQueryMap() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); String type = "m"; ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks").queryParam("type", type) .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); JSONObject tasks = json.getJSONObject("tasks"); JSONArray arr = tasks.getJSONArray("task"); assertEquals("incorrect number of elements", 1, arr.length()); verifyAMTask(arr, jobsMap.get(id), type); } } @Test public void testTasksQueryReduce() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); String type = "r"; ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks").queryParam("type", type) .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); JSONObject tasks = json.getJSONObject("tasks"); JSONArray arr = tasks.getJSONArray("task"); assertEquals("incorrect number of elements", 1, arr.length()); verifyAMTask(arr, jobsMap.get(id), type); } } @Test public void testTasksQueryInvalid() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); // tasktype must be exactly either "m" or "r" String tasktype = "reduce"; try { r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId) .path("tasks").queryParam("type", tasktype) .accept(MediaType.APPLICATION_JSON).get(JSONObject.class); fail("should have thrown exception on invalid uri"); } catch (UniformInterfaceException ue) { ClientResponse response = ue.getResponse(); assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus()); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject msg = response.getEntity(JSONObject.class); JSONObject exception = msg.getJSONObject("RemoteException"); assertEquals("incorrect number of elements", 3, exception.length()); String message = exception.getString("message"); String type = exception.getString("exception"); String classname = exception.getString("javaClassName"); WebServicesTestUtils.checkStringMatch("exception message", "java.lang.Exception: tasktype must be either m or r", message); WebServicesTestUtils.checkStringMatch("exception type", "BadRequestException", type); WebServicesTestUtils.checkStringMatch("exception classname", "org.apache.hadoop.yarn.webapp.BadRequestException", classname); } } } @Test public void testTaskId() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); for (Task task : jobsMap.get(id).getTasks().values()) { String tid = MRApps.toString(task.getID()); ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks").path(tid) .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); JSONObject info = json.getJSONObject("task"); verifyAMSingleTask(info, task); } } } @Test public void testTaskIdSlash() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); for (Task task : jobsMap.get(id).getTasks().values()) { String tid = MRApps.toString(task.getID()); ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks").path(tid + "/") .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); JSONObject info = json.getJSONObject("task"); verifyAMSingleTask(info, task); } } } @Test public void testTaskIdDefault() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); for (Task task : jobsMap.get(id).getTasks().values()) { String tid = MRApps.toString(task.getID()); ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks").path(tid) .get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); JSONObject info = json.getJSONObject("task"); verifyAMSingleTask(info, task); } } } @Test public void testTaskIdBogus() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); String tid = "bogustaskid"; try { r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId) .path("tasks").path(tid).get(JSONObject.class); fail("should have thrown exception on invalid uri"); } catch (UniformInterfaceException ue) { ClientResponse response = ue.getResponse(); assertEquals(Status.NOT_FOUND, response.getClientResponseStatus()); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject msg = response.getEntity(JSONObject.class); JSONObject exception = msg.getJSONObject("RemoteException"); assertEquals("incorrect number of elements", 3, exception.length()); String message = exception.getString("message"); String type = exception.getString("exception"); String classname = exception.getString("javaClassName"); WebServicesTestUtils.checkStringMatch("exception message", "java.lang.Exception: TaskId string : " + "bogustaskid is not properly formed", message); WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type); WebServicesTestUtils.checkStringMatch("exception classname", "org.apache.hadoop.yarn.webapp.NotFoundException", classname); } } } @Test public void testTaskIdNonExist() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); String tid = "task_0_0000_m_000000"; try { r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId) .path("tasks").path(tid).get(JSONObject.class); fail("should have thrown exception on invalid uri"); } catch (UniformInterfaceException ue) { ClientResponse response = ue.getResponse(); assertEquals(Status.NOT_FOUND, response.getClientResponseStatus()); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject msg = response.getEntity(JSONObject.class); JSONObject exception = msg.getJSONObject("RemoteException"); assertEquals("incorrect number of elements", 3, exception.length()); String message = exception.getString("message"); String type = exception.getString("exception"); String classname = exception.getString("javaClassName"); WebServicesTestUtils.checkStringMatch("exception message", "java.lang.Exception: task not found with id task_0_0000_m_000000", message); WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type); WebServicesTestUtils.checkStringMatch("exception classname", "org.apache.hadoop.yarn.webapp.NotFoundException", classname); } } } @Test public void testTaskIdInvalid() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); String tid = "task_0_0000_d_000000"; try { r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId) .path("tasks").path(tid).get(JSONObject.class); fail("should have thrown exception on invalid uri"); } catch (UniformInterfaceException ue) { ClientResponse response = ue.getResponse(); assertEquals(Status.NOT_FOUND, response.getClientResponseStatus()); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject msg = response.getEntity(JSONObject.class); JSONObject exception = msg.getJSONObject("RemoteException"); assertEquals("incorrect number of elements", 3, exception.length()); String message = exception.getString("message"); String type = exception.getString("exception"); String classname = exception.getString("javaClassName"); WebServicesTestUtils.checkStringMatch("exception message", "java.lang.Exception: Bad TaskType identifier. TaskId string : " + "task_0_0000_d_000000 is not properly formed.", message); WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type); WebServicesTestUtils.checkStringMatch("exception classname", "org.apache.hadoop.yarn.webapp.NotFoundException", classname); } } } @Test public void testTaskIdInvalid2() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); String tid = "task_0_m_000000"; try { r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId) .path("tasks").path(tid).get(JSONObject.class); fail("should have thrown exception on invalid uri"); } catch (UniformInterfaceException ue) { ClientResponse response = ue.getResponse(); assertEquals(Status.NOT_FOUND, response.getClientResponseStatus()); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject msg = response.getEntity(JSONObject.class); JSONObject exception = msg.getJSONObject("RemoteException"); assertEquals("incorrect number of elements", 3, exception.length()); String message = exception.getString("message"); String type = exception.getString("exception"); String classname = exception.getString("javaClassName"); WebServicesTestUtils.checkStringMatch("exception message", "java.lang.Exception: TaskId string : " + "task_0_m_000000 is not properly formed", message); WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type); WebServicesTestUtils.checkStringMatch("exception classname", "org.apache.hadoop.yarn.webapp.NotFoundException", classname); } } } @Test public void testTaskIdInvalid3() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); String tid = "task_0_0000_m"; try { r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId) .path("tasks").path(tid).get(JSONObject.class); fail("should have thrown exception on invalid uri"); } catch (UniformInterfaceException ue) { ClientResponse response = ue.getResponse(); assertEquals(Status.NOT_FOUND, response.getClientResponseStatus()); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject msg = response.getEntity(JSONObject.class); JSONObject exception = msg.getJSONObject("RemoteException"); assertEquals("incorrect number of elements", 3, exception.length()); String message = exception.getString("message"); String type = exception.getString("exception"); String classname = exception.getString("javaClassName"); WebServicesTestUtils.checkStringMatch("exception message", "java.lang.Exception: TaskId string : " + "task_0_0000_m is not properly formed", message); WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type); WebServicesTestUtils.checkStringMatch("exception classname", "org.apache.hadoop.yarn.webapp.NotFoundException", classname); } } } @Test public void testTaskIdXML() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); for (Task task : jobsMap.get(id).getTasks().values()) { String tid = MRApps.toString(task.getID()); ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks").path(tid) .accept(MediaType.APPLICATION_XML).get(ClientResponse.class); assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType()); String xml = response.getEntity(String.class); DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); InputSource is = new InputSource(); is.setCharacterStream(new StringReader(xml)); Document dom = db.parse(is); NodeList nodes = dom.getElementsByTagName("task"); for (int i = 0; i < nodes.getLength(); i++) { Element element = (Element) nodes.item(i); verifyAMSingleTaskXML(element, task); } } } } public void verifyAMSingleTask(JSONObject info, Task task) throws JSONException { assertEquals("incorrect number of elements", 9, info.length()); verifyTaskGeneric(task, info.getString("id"), info.getString("state"), info.getString("type"), info.getString("successfulAttempt"), info.getLong("startTime"), info.getLong("finishTime"), info.getLong("elapsedTime"), (float) info.getDouble("progress"), info.getString("status")); } public void verifyAMTask(JSONArray arr, Job job, String type) throws JSONException { for (Task task : job.getTasks().values()) { TaskId id = task.getID(); String tid = MRApps.toString(id); Boolean found = false; if (type != null && task.getType() == MRApps.taskType(type)) { for (int i = 0; i < arr.length(); i++) { JSONObject info = arr.getJSONObject(i); if (tid.matches(info.getString("id"))) { found = true; verifyAMSingleTask(info, task); } } assertTrue("task with id: " + tid + " not in web service output", found); } } } public void verifyTaskGeneric(Task task, String id, String state, String type, String successfulAttempt, long startTime, long finishTime, long elapsedTime, float progress, String status) { TaskId taskid = task.getID(); String tid = MRApps.toString(taskid); TaskReport report = task.getReport(); WebServicesTestUtils.checkStringMatch("id", tid, id); WebServicesTestUtils.checkStringMatch("type", task.getType().toString(), type); WebServicesTestUtils.checkStringMatch("state", report.getTaskState() .toString(), state); // not easily checked without duplicating logic, just make sure its here assertNotNull("successfulAttempt null", successfulAttempt); assertEquals("startTime wrong", report.getStartTime(), startTime); assertEquals("finishTime wrong", report.getFinishTime(), finishTime); assertEquals("elapsedTime wrong", finishTime - startTime, elapsedTime); assertEquals("progress wrong", report.getProgress() * 100, progress, 1e-3f); assertEquals("status wrong", report.getStatus(), status); } public void verifyAMSingleTaskXML(Element element, Task task) { verifyTaskGeneric(task, WebServicesTestUtils.getXmlString(element, "id"), WebServicesTestUtils.getXmlString(element, "state"), WebServicesTestUtils.getXmlString(element, "type"), WebServicesTestUtils.getXmlString(element, "successfulAttempt"), WebServicesTestUtils.getXmlLong(element, "startTime"), WebServicesTestUtils.getXmlLong(element, "finishTime"), WebServicesTestUtils.getXmlLong(element, "elapsedTime"), WebServicesTestUtils.getXmlFloat(element, "progress"), WebServicesTestUtils.getXmlString(element, "status")); } public void verifyAMTaskXML(NodeList nodes, Job job) { assertEquals("incorrect number of elements", 2, nodes.getLength()); for (Task task : job.getTasks().values()) { TaskId id = task.getID(); String tid = MRApps.toString(id); Boolean found = false; for (int i = 0; i < nodes.getLength(); i++) { Element element = (Element) nodes.item(i); if (tid.matches(WebServicesTestUtils.getXmlString(element, "id"))) { found = true; verifyAMSingleTaskXML(element, task); } } assertTrue("task with id: " + tid + " not in web service output", found); } } @Test public void testTaskIdCounters() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); for (Task task : jobsMap.get(id).getTasks().values()) { String tid = MRApps.toString(task.getID()); ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks").path(tid).path("counters") .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); JSONObject info = json.getJSONObject("jobTaskCounters"); verifyAMJobTaskCounters(info, task); } } } @Test public void testTaskIdCountersSlash() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); for (Task task : jobsMap.get(id).getTasks().values()) { String tid = MRApps.toString(task.getID()); ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks").path(tid).path("counters/") .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); JSONObject info = json.getJSONObject("jobTaskCounters"); verifyAMJobTaskCounters(info, task); } } } @Test public void testTaskIdCountersDefault() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); for (Task task : jobsMap.get(id).getTasks().values()) { String tid = MRApps.toString(task.getID()); ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks").path(tid).path("counters") .get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); JSONObject info = json.getJSONObject("jobTaskCounters"); verifyAMJobTaskCounters(info, task); } } } @Test public void testJobTaskCountersXML() throws Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); for (Task task : jobsMap.get(id).getTasks().values()) { String tid = MRApps.toString(task.getID()); ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks").path(tid).path("counters") .accept(MediaType.APPLICATION_XML).get(ClientResponse.class); assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType()); String xml = response.getEntity(String.class); DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); InputSource is = new InputSource(); is.setCharacterStream(new StringReader(xml)); Document dom = db.parse(is); NodeList info = dom.getElementsByTagName("jobTaskCounters"); verifyAMTaskCountersXML(info, task); } } } public void verifyAMJobTaskCounters(JSONObject info, Task task) throws JSONException { assertEquals("incorrect number of elements", 2, info.length()); WebServicesTestUtils.checkStringMatch("id", MRApps.toString(task.getID()), info.getString("id")); // just do simple verification of fields - not data is correct // in the fields JSONArray counterGroups = info.getJSONArray("taskCounterGroup"); for (int i = 0; i < counterGroups.length(); i++) { JSONObject counterGroup = counterGroups.getJSONObject(i); String name = counterGroup.getString("counterGroupName"); assertTrue("name not set", (name != null && !name.isEmpty())); JSONArray counters = counterGroup.getJSONArray("counter"); for (int j = 0; j < counters.length(); j++) { JSONObject counter = counters.getJSONObject(j); String counterName = counter.getString("name"); assertTrue("name not set", (counterName != null && !counterName.isEmpty())); long value = counter.getLong("value"); assertTrue("value >= 0", value >= 0); } } } public void verifyAMTaskCountersXML(NodeList nodes, Task task) { for (int i = 0; i < nodes.getLength(); i++) { Element element = (Element) nodes.item(i); WebServicesTestUtils.checkStringMatch("id", MRApps.toString(task.getID()), WebServicesTestUtils.getXmlString(element, "id")); // just do simple verification of fields - not data is correct // in the fields NodeList groups = element.getElementsByTagName("taskCounterGroup"); for (int j = 0; j < groups.getLength(); j++) { Element counters = (Element) groups.item(j); assertNotNull("should have counters in the web service info", counters); String name = WebServicesTestUtils.getXmlString(counters, "counterGroupName"); assertTrue("name not set", (name != null && !name.isEmpty())); NodeList counterArr = counters.getElementsByTagName("counter"); for (int z = 0; z < counterArr.getLength(); z++) { Element counter = (Element) counterArr.item(z); String counterName = WebServicesTestUtils.getXmlString(counter, "name"); assertTrue("counter name not set", (counterName != null && !counterName.isEmpty())); long value = WebServicesTestUtils.getXmlLong(counter, "value"); assertTrue("value not >= 0", value >= 0); } } } } }
/* * Copyright 2019 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.spanner.connection; import static com.google.cloud.spanner.connection.AbstractConnectionImplTest.DDL; import static com.google.cloud.spanner.connection.AbstractConnectionImplTest.SELECT; import static com.google.cloud.spanner.connection.AbstractConnectionImplTest.UPDATE; import static com.google.cloud.spanner.connection.AbstractConnectionImplTest.expectSpannerException; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Mockito.any; import static org.mockito.Mockito.anyList; import static org.mockito.Mockito.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import com.google.api.core.ApiFuture; import com.google.api.core.ApiFutures; import com.google.api.gax.longrunning.OperationFuture; import com.google.cloud.NoCredentials; import com.google.cloud.Timestamp; import com.google.cloud.spanner.CommitResponse; import com.google.cloud.spanner.CommitStats; import com.google.cloud.spanner.DatabaseClient; import com.google.cloud.spanner.ErrorCode; import com.google.cloud.spanner.ForwardingResultSet; import com.google.cloud.spanner.Options; import com.google.cloud.spanner.Options.QueryOption; import com.google.cloud.spanner.ReadContext.QueryAnalyzeMode; import com.google.cloud.spanner.ReadOnlyTransaction; import com.google.cloud.spanner.ResultSet; import com.google.cloud.spanner.Spanner; import com.google.cloud.spanner.SpannerException; import com.google.cloud.spanner.SpannerExceptionFactory; import com.google.cloud.spanner.Statement; import com.google.cloud.spanner.TimestampBound; import com.google.cloud.spanner.TimestampBound.Mode; import com.google.cloud.spanner.TransactionContext; import com.google.cloud.spanner.TransactionManager; import com.google.cloud.spanner.TransactionRunner; import com.google.cloud.spanner.Type; import com.google.cloud.spanner.connection.ConnectionImpl.UnitOfWorkType; import com.google.cloud.spanner.connection.ConnectionStatementExecutorImpl.StatementTimeoutGetter; import com.google.cloud.spanner.connection.ReadOnlyStalenessUtil.GetExactStaleness; import com.google.cloud.spanner.connection.StatementParser.ParsedStatement; import com.google.cloud.spanner.connection.StatementResult.ResultType; import com.google.cloud.spanner.connection.UnitOfWork.UnitOfWorkState; import com.google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata; import com.google.spanner.v1.ExecuteSqlRequest.QueryOptions; import com.google.spanner.v1.ResultSetStats; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @RunWith(JUnit4.class) public class ConnectionImplTest { public static final String URI = "cloudspanner:/projects/test-project-123/instances/test-instance/databases/test-database"; static class SimpleTransactionManager implements TransactionManager { private TransactionState state; private CommitResponse commitResponse; private TransactionContext txContext; private final boolean returnCommitStats; private SimpleTransactionManager(TransactionContext txContext, boolean returnCommitStats) { this.txContext = txContext; this.returnCommitStats = returnCommitStats; } @Override public TransactionContext begin() { state = TransactionState.STARTED; return txContext; } @Override public void commit() { Timestamp commitTimestamp = Timestamp.now(); commitResponse = mock(CommitResponse.class); when(commitResponse.getCommitTimestamp()).thenReturn(commitTimestamp); if (returnCommitStats) { CommitStats stats = mock(CommitStats.class); when(commitResponse.hasCommitStats()).thenReturn(true); when(stats.getMutationCount()).thenReturn(5L); when(commitResponse.getCommitStats()).thenReturn(stats); } state = TransactionState.COMMITTED; } @Override public void rollback() { state = TransactionState.ROLLED_BACK; } @Override public TransactionContext resetForRetry() { return txContext; } @Override public Timestamp getCommitTimestamp() { return commitResponse == null ? null : commitResponse.getCommitTimestamp(); } @Override public CommitResponse getCommitResponse() { return commitResponse; } @Override public TransactionState getState() { return state; } @Override public void close() { if (state != TransactionState.COMMITTED) { state = TransactionState.ROLLED_BACK; } } } private static class SimpleResultSet extends ForwardingResultSet { private boolean nextCalled = false; private boolean onValidRow = false; private boolean hasNextReturnedFalse = false; SimpleResultSet(ResultSet delegate) { super(delegate); } @Override public boolean next() { nextCalled = true; onValidRow = super.next(); hasNextReturnedFalse = !onValidRow; return onValidRow; } boolean isNextCalled() { return nextCalled; } @Override public ResultSetStats getStats() { if (hasNextReturnedFalse) { return super.getStats(); } return null; } @Override public long getLong(int columnIndex) { if (onValidRow) { return super.getLong(columnIndex); } throw SpannerExceptionFactory.newSpannerException( ErrorCode.FAILED_PRECONDITION, "ResultSet is not positioned on a valid row"); } } private static ResultSet createSelect1MockResultSet() { ResultSet mockResultSet = mock(ResultSet.class); when(mockResultSet.next()).thenReturn(true, false); when(mockResultSet.getLong(0)).thenReturn(1L); when(mockResultSet.getLong("TEST")).thenReturn(1L); when(mockResultSet.getColumnType(0)).thenReturn(Type.int64()); when(mockResultSet.getColumnType("TEST")).thenReturn(Type.int64()); return mockResultSet; } private static DdlClient createDefaultMockDdlClient() { try { DdlClient ddlClient = mock(DdlClient.class); @SuppressWarnings("unchecked") final OperationFuture<Void, UpdateDatabaseDdlMetadata> operation = mock(OperationFuture.class); when(operation.get()).thenReturn(null); UpdateDatabaseDdlMetadata metadata = UpdateDatabaseDdlMetadata.getDefaultInstance(); ApiFuture<UpdateDatabaseDdlMetadata> futureMetadata = ApiFutures.immediateFuture(metadata); when(operation.getMetadata()).thenReturn(futureMetadata); when(ddlClient.executeDdl(anyString())).thenCallRealMethod(); when(ddlClient.executeDdl(anyList())).thenReturn(operation); return ddlClient; } catch (Exception e) { throw new RuntimeException(e); } } public static ConnectionImpl createConnection(final ConnectionOptions options) { Spanner spanner = mock(Spanner.class); SpannerPool spannerPool = mock(SpannerPool.class); when(spannerPool.getSpanner(any(ConnectionOptions.class), any(ConnectionImpl.class))) .thenReturn(spanner); DdlClient ddlClient = createDefaultMockDdlClient(); DatabaseClient dbClient = mock(DatabaseClient.class); ReadOnlyTransaction singleUseReadOnlyTx = mock(ReadOnlyTransaction.class); ResultSet mockResultSetWithStats = createSelect1MockResultSet(); when(mockResultSetWithStats.getStats()).thenReturn(ResultSetStats.getDefaultInstance()); final SimpleResultSet select1ResultSet = new SimpleResultSet(createSelect1MockResultSet()); final SimpleResultSet select1ResultSetWithStats = new SimpleResultSet(mockResultSetWithStats); when(singleUseReadOnlyTx.executeQuery(Statement.of(SELECT))) .thenAnswer( invocation -> { if (select1ResultSet.nextCalled) { // create a new mock return new SimpleResultSet(createSelect1MockResultSet()); } return select1ResultSet; }); when(singleUseReadOnlyTx.analyzeQuery(Statement.of(SELECT), QueryAnalyzeMode.PLAN)) .thenReturn(select1ResultSetWithStats); when(singleUseReadOnlyTx.analyzeQuery(Statement.of(SELECT), QueryAnalyzeMode.PROFILE)) .thenReturn(select1ResultSetWithStats); when(singleUseReadOnlyTx.getReadTimestamp()) .then( invocation -> { if (select1ResultSet.isNextCalled() || select1ResultSetWithStats.isNextCalled()) { return Timestamp.now(); } throw SpannerExceptionFactory.newSpannerException( ErrorCode.FAILED_PRECONDITION, "No query has returned with any data yet"); }); when(dbClient.singleUseReadOnlyTransaction(any(TimestampBound.class))) .thenReturn(singleUseReadOnlyTx); when(dbClient.transactionManager(any())) .thenAnswer( invocation -> { TransactionContext txContext = mock(TransactionContext.class); when(txContext.executeQuery(Statement.of(SELECT))) .thenAnswer( ignored -> { if (select1ResultSet.nextCalled) { // create a new mock return new SimpleResultSet(createSelect1MockResultSet()); } return select1ResultSet; }); when(txContext.analyzeQuery(Statement.of(SELECT), QueryAnalyzeMode.PLAN)) .thenReturn(select1ResultSetWithStats); when(txContext.analyzeQuery(Statement.of(SELECT), QueryAnalyzeMode.PROFILE)) .thenReturn(select1ResultSetWithStats); when(txContext.executeUpdate(Statement.of(UPDATE))).thenReturn(1L); return new SimpleTransactionManager(txContext, options.isReturnCommitStats()); }); when(dbClient.readOnlyTransaction(any(TimestampBound.class))) .thenAnswer( invocation -> { ReadOnlyTransaction tx = mock(ReadOnlyTransaction.class); when(tx.executeQuery(Statement.of(SELECT))) .thenAnswer( ignored -> { if (select1ResultSet.nextCalled) { // create a new mock return new SimpleResultSet(createSelect1MockResultSet()); } return select1ResultSet; }); when(tx.analyzeQuery(Statement.of(SELECT), QueryAnalyzeMode.PLAN)) .thenReturn(select1ResultSetWithStats); when(tx.analyzeQuery(Statement.of(SELECT), QueryAnalyzeMode.PROFILE)) .thenReturn(select1ResultSetWithStats); when(tx.getReadTimestamp()) .then( ignored -> { if (select1ResultSet.isNextCalled() || select1ResultSetWithStats.isNextCalled()) { return Timestamp.now(); } throw SpannerExceptionFactory.newSpannerException( ErrorCode.FAILED_PRECONDITION, "No query has returned with any data yet"); }); return tx; }); when(dbClient.readWriteTransaction()) .thenAnswer( new Answer<TransactionRunner>() { @Override public TransactionRunner answer(InvocationOnMock invocation) { return new TransactionRunner() { private CommitResponse commitResponse; @Override public <T> T run(TransactionCallable<T> callable) { commitResponse = new CommitResponse(Timestamp.ofTimeSecondsAndNanos(1, 1)); TransactionContext transaction = mock(TransactionContext.class); when(transaction.executeUpdate(Statement.of(UPDATE))).thenReturn(1L); try { return callable.run(transaction); } catch (Exception e) { throw SpannerExceptionFactory.newSpannerException(e); } } @Override public Timestamp getCommitTimestamp() { return commitResponse == null ? null : commitResponse.getCommitTimestamp(); } @Override public CommitResponse getCommitResponse() { return commitResponse; } @Override public TransactionRunner allowNestedTransaction() { return this; } }; } }); return new ConnectionImpl(options, spannerPool, ddlClient, dbClient); } @Test public void testExecuteSetAutocommitOn() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI + ";autocommit=false") .build())) { assertThat(subject.isAutocommit(), is(false)); StatementResult res = subject.execute(Statement.of("set autocommit = true")); assertThat(res.getResultType(), is(equalTo(ResultType.NO_RESULT))); assertThat(subject.isAutocommit(), is(true)); } } @Test public void testExecuteSetAutocommitOff() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { assertThat(subject.isAutocommit(), is(true)); StatementResult res = subject.execute(Statement.of("set autocommit = false")); assertThat(res.getResultType(), is(equalTo(ResultType.NO_RESULT))); assertThat(subject.isAutocommit(), is(false)); } } @Test public void testExecuteGetAutocommit() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { // assert that autocommit is true (default) assertThat(subject.isAutocommit(), is(true)); StatementResult res = subject.execute(Statement.of("show variable autocommit")); assertThat(res.getResultType(), is(equalTo(ResultType.RESULT_SET))); assertThat(res.getResultSet().next(), is(true)); assertThat(res.getResultSet().getBoolean("AUTOCOMMIT"), is(true)); // set autocommit to false and assert that autocommit is false subject.execute(Statement.of("set autocommit = false")); assertThat(subject.isAutocommit(), is(false)); res = subject.execute(Statement.of("show variable autocommit")); assertThat(res.getResultType(), is(equalTo(ResultType.RESULT_SET))); assertThat(res.getResultSet().next(), is(true)); assertThat(res.getResultSet().getBoolean("AUTOCOMMIT"), is(false)); } } @Test public void testExecuteSetReadOnlyOn() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { assertThat(subject.isReadOnly(), is(false)); StatementResult res = subject.execute(Statement.of("set readonly = true")); assertThat(res.getResultType(), is(equalTo(ResultType.NO_RESULT))); assertThat(subject.isReadOnly(), is(true)); } } @Test public void testExecuteSetReadOnlyOff() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI + ";readonly=true") .build())) { assertThat(subject.isReadOnly(), is(true)); StatementResult res = subject.execute(Statement.of("set readonly = false")); assertThat(res.getResultType(), is(equalTo(ResultType.NO_RESULT))); assertThat(subject.isReadOnly(), is(false)); } } @Test public void testExecuteGetReadOnly() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { // assert that read only is false (default) assertThat(subject.isReadOnly(), is(false)); StatementResult res = subject.execute(Statement.of("show variable readonly")); assertThat(res.getResultType(), is(equalTo(ResultType.RESULT_SET))); assertThat(res.getResultSet().next(), is(true)); assertThat(res.getResultSet().getBoolean("READONLY"), is(false)); // set read only to true and assert that read only is true subject.execute(Statement.of("set readonly = true")); assertThat(subject.isReadOnly(), is(true)); res = subject.execute(Statement.of("show variable readonly")); assertThat(res.getResultType(), is(equalTo(ResultType.RESULT_SET))); assertThat(res.getResultSet().next(), is(true)); assertThat(res.getResultSet().getBoolean("READONLY"), is(true)); } } @Test public void testExecuteSetAutocommitDmlMode() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { assertThat(subject.isAutocommit(), is(true)); assertThat(subject.getAutocommitDmlMode(), is(equalTo(AutocommitDmlMode.TRANSACTIONAL))); StatementResult res = subject.execute(Statement.of("set autocommit_dml_mode='PARTITIONED_NON_ATOMIC'")); assertThat(res.getResultType(), is(equalTo(ResultType.NO_RESULT))); assertThat( subject.getAutocommitDmlMode(), is(equalTo(AutocommitDmlMode.PARTITIONED_NON_ATOMIC))); res = subject.execute(Statement.of("set autocommit_dml_mode='TRANSACTIONAL'")); assertThat(res.getResultType(), is(equalTo(ResultType.NO_RESULT))); assertThat(subject.getAutocommitDmlMode(), is(equalTo(AutocommitDmlMode.TRANSACTIONAL))); } } @Test public void testExecuteSetAutocommitDmlModeInvalidValue() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { assertThat(subject.isAutocommit(), is(true)); assertThat(subject.getAutocommitDmlMode(), is(equalTo(AutocommitDmlMode.TRANSACTIONAL))); ErrorCode expected = null; try { subject.execute(Statement.of("set autocommit_dml_mode='NON_EXISTENT_VALUE'")); } catch (SpannerException e) { expected = e.getErrorCode(); } assertThat(expected, is(equalTo(ErrorCode.INVALID_ARGUMENT))); } } @Test public void testExecuteGetAutocommitDmlMode() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { assertThat(subject.isAutocommit(), is(true)); assertThat(subject.getAutocommitDmlMode(), is(equalTo(AutocommitDmlMode.TRANSACTIONAL))); StatementResult res = subject.execute(Statement.of("show variable autocommit_dml_mode")); assertThat(res.getResultType(), is(equalTo(ResultType.RESULT_SET))); assertThat(res.getResultSet().next(), is(true)); assertThat( res.getResultSet().getString("AUTOCOMMIT_DML_MODE"), is(equalTo(AutocommitDmlMode.TRANSACTIONAL.toString()))); subject.execute(Statement.of("set autocommit_dml_mode='PARTITIONED_NON_ATOMIC'")); res = subject.execute(Statement.of("show variable autocommit_dml_mode")); assertThat(res.getResultType(), is(equalTo(ResultType.RESULT_SET))); assertThat(res.getResultSet().next(), is(true)); assertThat( res.getResultSet().getString("AUTOCOMMIT_DML_MODE"), is(equalTo(AutocommitDmlMode.PARTITIONED_NON_ATOMIC.toString()))); } } @Test public void testExecuteSetOptimizerVersion() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { assertThat(subject.getOptimizerVersion(), is(equalTo(""))); StatementResult res = subject.execute(Statement.of("set optimizer_version='1'")); assertThat(res.getResultType(), is(equalTo(ResultType.NO_RESULT))); assertThat(subject.getOptimizerVersion(), is(equalTo("1"))); res = subject.execute(Statement.of("set optimizer_version='1000'")); assertThat(res.getResultType(), is(equalTo(ResultType.NO_RESULT))); assertThat(subject.getOptimizerVersion(), is(equalTo("1000"))); res = subject.execute(Statement.of("set optimizer_version='latest'")); assertThat(res.getResultType(), is(equalTo(ResultType.NO_RESULT))); assertThat(subject.getOptimizerVersion(), is(equalTo("latest"))); res = subject.execute(Statement.of("set optimizer_version=''")); assertThat(res.getResultType(), is(equalTo(ResultType.NO_RESULT))); assertThat(subject.getOptimizerVersion(), is(equalTo(""))); } } @Test public void testExecuteSetOptimizerVersionInvalidValue() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { assertThat(subject.getOptimizerVersion(), is(equalTo(""))); try { subject.execute(Statement.of("set optimizer_version='NOT_A_VERSION'")); fail("Missing expected exception"); } catch (SpannerException e) { assertThat(e.getErrorCode(), is(equalTo(ErrorCode.INVALID_ARGUMENT))); } } } @Test public void testExecuteGetOptimizerVersion() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { assertThat(subject.getOptimizerVersion(), is(equalTo(""))); StatementResult res = subject.execute(Statement.of("show variable optimizer_version")); assertThat(res.getResultType(), is(equalTo(ResultType.RESULT_SET))); assertThat(res.getResultSet().next(), is(true)); assertThat(res.getResultSet().getString("OPTIMIZER_VERSION"), is(equalTo(""))); subject.execute(Statement.of("set optimizer_version='1'")); res = subject.execute(Statement.of("show variable optimizer_version")); assertThat(res.getResultType(), is(equalTo(ResultType.RESULT_SET))); assertThat(res.getResultSet().next(), is(true)); assertThat(res.getResultSet().getString("OPTIMIZER_VERSION"), is(equalTo("1"))); } } @Test public void testExecuteSetOptimizerStatisticsPackage() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { assertThat(subject.getOptimizerStatisticsPackage(), is(equalTo(""))); StatementResult res = subject.execute(Statement.of("set optimizer_statistics_package='custom-package'")); assertThat(res.getResultType(), is(equalTo(ResultType.NO_RESULT))); assertThat(subject.getOptimizerStatisticsPackage(), is(equalTo("custom-package"))); res = subject.execute(Statement.of("set optimizer_statistics_package=''")); assertThat(res.getResultType(), is(equalTo(ResultType.NO_RESULT))); assertThat(subject.getOptimizerStatisticsPackage(), is(equalTo(""))); } } @Test public void testExecuteSetOptimizerStatisticsPackageInvalidValue() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { assertThat(subject.getOptimizerVersion(), is(equalTo(""))); try { subject.execute(Statement.of("set optimizer_statistics_package=' '")); fail("Missing expected exception"); } catch (SpannerException e) { assertThat(e.getErrorCode(), is(equalTo(ErrorCode.INVALID_ARGUMENT))); } } } @Test public void testExecuteGetOptimizerStatisticsPackage() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { assertThat(subject.getOptimizerStatisticsPackage(), is(equalTo(""))); StatementResult res = subject.execute(Statement.of("show variable optimizer_statistics_package")); assertThat(res.getResultType(), is(equalTo(ResultType.RESULT_SET))); assertThat(res.getResultSet().next(), is(true)); assertThat(res.getResultSet().getString("OPTIMIZER_STATISTICS_PACKAGE"), is(equalTo(""))); subject.execute(Statement.of("set optimizer_statistics_package='custom-package'")); res = subject.execute(Statement.of("show variable optimizer_statistics_package")); assertThat(res.getResultType(), is(equalTo(ResultType.RESULT_SET))); assertThat(res.getResultSet().next(), is(true)); assertThat( res.getResultSet().getString("OPTIMIZER_STATISTICS_PACKAGE"), is(equalTo("custom-package"))); } } @Test public void testExecuteSetReturnCommitStats() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { assertFalse(subject.isReturnCommitStats()); StatementResult result = subject.execute(Statement.of("set return_commit_stats=true")); assertEquals(ResultType.NO_RESULT, result.getResultType()); assertTrue(subject.isReturnCommitStats()); result = subject.execute(Statement.of("set return_commit_stats=false")); assertEquals(ResultType.NO_RESULT, result.getResultType()); assertFalse(subject.isReturnCommitStats()); } } @Test public void testExecuteSetReturnCommitStatsInvalidValue() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { assertFalse(subject.isReturnCommitStats()); try { subject.execute(Statement.of("set return_commit_stats=yes")); fail("Missing expected exception"); } catch (SpannerException e) { assertEquals(ErrorCode.INVALID_ARGUMENT, e.getErrorCode()); } } } @Test public void testExecuteGetReturnCommitStats() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { assertFalse(subject.isReturnCommitStats()); StatementResult returnCommitStatsFalse = subject.execute(Statement.of("show variable return_commit_stats")); assertEquals(ResultType.RESULT_SET, returnCommitStatsFalse.getResultType()); assertTrue(returnCommitStatsFalse.getResultSet().next()); assertFalse(returnCommitStatsFalse.getResultSet().getBoolean("RETURN_COMMIT_STATS")); subject.execute(Statement.of("set return_commit_stats=true")); StatementResult returnCommitStatsTrue = subject.execute(Statement.of("show variable return_commit_stats")); assertEquals(ResultType.RESULT_SET, returnCommitStatsTrue.getResultType()); assertTrue(returnCommitStatsTrue.getResultSet().next()); assertTrue(returnCommitStatsTrue.getResultSet().getBoolean("RETURN_COMMIT_STATS")); } } @Test public void testExecuteSetStatementTimeout() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { assertThat(subject.getStatementTimeout(TimeUnit.MILLISECONDS), is(equalTo(0L))); for (TimeUnit unit : ReadOnlyStalenessUtil.SUPPORTED_UNITS) { for (Long timeout : new Long[] {1L, 100L, 10000L, 315576000000L}) { StatementResult res = subject.execute( Statement.of( String.format( "set statement_timeout='%d%s'", timeout, ReadOnlyStalenessUtil.getTimeUnitAbbreviation(unit)))); assertThat(res.getResultType(), is(equalTo(ResultType.NO_RESULT))); assertThat(subject.getStatementTimeout(unit), is(equalTo(timeout))); assertThat(subject.hasStatementTimeout(), is(true)); StatementResult resNoTimeout = subject.execute(Statement.of("set statement_timeout=null")); assertThat(resNoTimeout.getResultType(), is(equalTo(ResultType.NO_RESULT))); assertThat(subject.getStatementTimeout(unit), is(equalTo(0L))); assertThat(subject.hasStatementTimeout(), is(false)); } } } } @Test public void testExecuteSetStatementTimeoutInvalidValue() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { assertThat(subject.getStatementTimeout(TimeUnit.MILLISECONDS), is(equalTo(0L))); ErrorCode expected = null; try { subject.execute(Statement.of("set statement_timeout=-1")); } catch (SpannerException e) { expected = e.getErrorCode(); } assertThat(expected, is(equalTo(ErrorCode.INVALID_ARGUMENT))); } } @Test public void testExecuteGetStatementTimeout() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { assertThat(subject.getStatementTimeout(TimeUnit.MILLISECONDS), is(equalTo(0L))); for (TimeUnit unit : ReadOnlyStalenessUtil.SUPPORTED_UNITS) { for (Long timeout : new Long[] {1L, 100L, 10000L, 315576000000L}) { subject.execute( Statement.of( String.format( "set statement_timeout='%d%s'", timeout, ReadOnlyStalenessUtil.getTimeUnitAbbreviation(unit)))); StatementResult res = subject.execute(Statement.of("show variable statement_timeout")); assertThat(res.getResultType(), is(equalTo(ResultType.RESULT_SET))); assertThat(res.getResultSet().next(), is(true)); TimeUnit appropriateUnit = ReadOnlyStalenessUtil.getAppropriateTimeUnit(new StatementTimeoutGetter(subject)); assertThat( res.getResultSet().getString("STATEMENT_TIMEOUT"), is( equalTo( subject.getStatementTimeout(appropriateUnit) + ReadOnlyStalenessUtil.getTimeUnitAbbreviation(appropriateUnit)))); subject.execute(Statement.of("set statement_timeout=null")); StatementResult resNoTimeout = subject.execute(Statement.of("show variable statement_timeout")); assertThat(resNoTimeout.getResultType(), is(equalTo(ResultType.RESULT_SET))); assertThat(resNoTimeout.getResultSet().next(), is(true)); assertThat(resNoTimeout.getResultSet().isNull("STATEMENT_TIMEOUT"), is(true)); } } } } @Test public void testExecuteGetReadTimestamp() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { subject.beginTransaction(); subject.setTransactionMode(TransactionMode.READ_ONLY_TRANSACTION); subject.executeQuery(Statement.of(AbstractConnectionImplTest.SELECT)); StatementResult res = subject.execute(Statement.of("show variable read_timestamp")); assertThat(res.getResultType(), is(equalTo(ResultType.RESULT_SET))); assertThat(res.getResultSet().next(), is(true)); assertThat(res.getResultSet().getTimestamp("READ_TIMESTAMP"), is(notNullValue())); subject.commit(); } } @Test public void testExecuteGetCommitTimestamp() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { subject.beginTransaction(); subject.executeQuery(Statement.of(AbstractConnectionImplTest.SELECT)).next(); subject.commit(); StatementResult res = subject.execute(Statement.of("show variable commit_timestamp")); assertThat(res.getResultType(), is(equalTo(ResultType.RESULT_SET))); assertThat(res.getResultSet().next(), is(true)); assertThat(res.getResultSet().getTimestamp("COMMIT_TIMESTAMP"), is(notNullValue())); } } @Test public void testExecuteGetCommitResponse() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { subject.beginTransaction(); subject.executeQuery(Statement.of(AbstractConnectionImplTest.SELECT)).next(); subject.commit(); StatementResult response = subject.execute(Statement.of("show variable commit_response")); assertEquals(ResultType.RESULT_SET, response.getResultType()); assertTrue(response.getResultSet().next()); assertNotNull(response.getResultSet().getTimestamp("COMMIT_TIMESTAMP")); assertTrue(response.getResultSet().isNull("MUTATION_COUNT")); assertFalse(response.getResultSet().next()); } try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI + ";returnCommitStats=true") .build())) { subject.beginTransaction(); subject.executeQuery(Statement.of(AbstractConnectionImplTest.SELECT)).next(); subject.commit(); StatementResult response = subject.execute(Statement.of("show variable commit_response")); assertEquals(ResultType.RESULT_SET, response.getResultType()); assertTrue(response.getResultSet().next()); assertNotNull(response.getResultSet().getTimestamp("COMMIT_TIMESTAMP")); assertFalse(response.getResultSet().isNull("MUTATION_COUNT")); assertFalse(response.getResultSet().next()); } } private static final class StalenessDuration { private final long duration; private final TimeUnit unit; private StalenessDuration(long duration, TimeUnit unit) { this.duration = duration; this.unit = unit; } @Override public String toString() { GetExactStaleness getExactStalenessFunction = new GetExactStaleness(TimestampBound.ofExactStaleness(duration, unit)); return ReadOnlyStalenessUtil.durationToString(getExactStalenessFunction); } } @Test public void testExecuteGetReadOnlyStaleness() { Map<TimestampBound.Mode, Timestamp> timestamps = new HashMap<>(); timestamps.put(Mode.READ_TIMESTAMP, ReadOnlyStalenessUtil.parseRfc3339("2018-10-08T14:05:10Z")); timestamps.put( Mode.MIN_READ_TIMESTAMP, ReadOnlyStalenessUtil.parseRfc3339("2018-10-08T14:05:10.12345Z")); Map<TimestampBound.Mode, StalenessDuration> durations = new HashMap<>(); durations.put(Mode.EXACT_STALENESS, new StalenessDuration(1000L, TimeUnit.MILLISECONDS)); durations.put(Mode.MAX_STALENESS, new StalenessDuration(1234567L, TimeUnit.MICROSECONDS)); List<TimestampBound> stalenesses = Arrays.asList( TimestampBound.strong(), TimestampBound.ofReadTimestamp(timestamps.get(Mode.READ_TIMESTAMP)), TimestampBound.ofMinReadTimestamp(timestamps.get(Mode.MIN_READ_TIMESTAMP)), TimestampBound.ofExactStaleness( durations.get(Mode.EXACT_STALENESS).duration, durations.get(Mode.EXACT_STALENESS).unit), TimestampBound.ofMaxStaleness( durations.get(Mode.MAX_STALENESS).duration, durations.get(Mode.MAX_STALENESS).unit)); try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { for (TimestampBound staleness : stalenesses) { subject.setReadOnlyStaleness(staleness); StatementResult res = subject.execute(Statement.of("show variable read_only_staleness")); assertThat(res.getResultType(), is(equalTo(ResultType.RESULT_SET))); assertThat(res.getResultSet().next(), is(true)); assertThat( res.getResultSet().getString("READ_ONLY_STALENESS"), is(equalTo(ReadOnlyStalenessUtil.timestampBoundToString(staleness)))); } } } @Test public void testExecuteSetReadOnlyStaleness() { Map<TimestampBound.Mode, Timestamp> timestamps = new HashMap<>(); timestamps.put(Mode.READ_TIMESTAMP, ReadOnlyStalenessUtil.parseRfc3339("2018-10-08T12:13:14Z")); timestamps.put( Mode.MIN_READ_TIMESTAMP, ReadOnlyStalenessUtil.parseRfc3339("2018-10-08T14:13:14.1234+02:00")); Map<TimestampBound.Mode, StalenessDuration> durations = new HashMap<>(); durations.put(Mode.EXACT_STALENESS, new StalenessDuration(1000L, TimeUnit.MILLISECONDS)); durations.put(Mode.MAX_STALENESS, new StalenessDuration(1234567L, TimeUnit.MICROSECONDS)); List<TimestampBound> stalenesses = Arrays.asList( TimestampBound.strong(), TimestampBound.ofReadTimestamp(timestamps.get(Mode.READ_TIMESTAMP)), TimestampBound.ofMinReadTimestamp(timestamps.get(Mode.MIN_READ_TIMESTAMP)), TimestampBound.ofExactStaleness( durations.get(Mode.EXACT_STALENESS).duration, durations.get(Mode.EXACT_STALENESS).unit), TimestampBound.ofMaxStaleness( durations.get(Mode.MAX_STALENESS).duration, durations.get(Mode.MAX_STALENESS).unit)); try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { for (TimestampBound staleness : stalenesses) { StatementResult res = subject.execute( Statement.of( String.format( "set read_only_staleness='%s'", ReadOnlyStalenessUtil.timestampBoundToString(staleness)))); assertThat(res.getResultType(), is(equalTo(ResultType.NO_RESULT))); assertThat(subject.getReadOnlyStaleness(), is(equalTo(staleness))); } } } @Test public void testExecuteBeginTransaction() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { assertThat(subject.isInTransaction(), is(false)); StatementResult res = subject.execute(Statement.of("begin transaction")); assertThat(res.getResultType(), is(equalTo(ResultType.NO_RESULT))); assertThat(subject.isInTransaction(), is(true)); } } @Test public void testExecuteCommitTransaction() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { subject.execute(Statement.of("begin transaction")); assertThat(subject.isInTransaction(), is(true)); StatementResult res = subject.execute(Statement.of("commit")); assertThat(res.getResultType(), is(equalTo(ResultType.NO_RESULT))); assertThat(subject.isInTransaction(), is(false)); } } @Test public void testExecuteRollbackTransaction() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { subject.execute(Statement.of("begin")); assertThat(subject.isInTransaction(), is(true)); StatementResult res = subject.execute(Statement.of("rollback")); assertThat(res.getResultType(), is(equalTo(ResultType.NO_RESULT))); assertThat(subject.isInTransaction(), is(false)); } } @Test public void testExecuteSetTransactionReadOnly() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { subject.execute(Statement.of("begin")); assertThat(subject.getTransactionMode(), is(equalTo(TransactionMode.READ_WRITE_TRANSACTION))); assertThat(subject.isInTransaction(), is(true)); StatementResult res = subject.execute(Statement.of("set transaction read only")); assertThat(res.getResultType(), is(equalTo(ResultType.NO_RESULT))); assertThat(subject.getTransactionMode(), is(equalTo(TransactionMode.READ_ONLY_TRANSACTION))); } } @Test public void testExecuteSetTransactionReadWrite() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI + ";readonly=true") .build())) { subject.execute(Statement.of("begin")); assertThat(subject.getTransactionMode(), is(equalTo(TransactionMode.READ_ONLY_TRANSACTION))); assertThat(subject.isInTransaction(), is(true)); // end the current temporary transaction and turn off read-only mode subject.execute(Statement.of("commit")); subject.execute(Statement.of("set readonly = false")); subject.execute(Statement.of("begin")); StatementResult res = subject.execute(Statement.of("set transaction read only")); assertThat(res.getResultType(), is(equalTo(ResultType.NO_RESULT))); assertThat(subject.getTransactionMode(), is(equalTo(TransactionMode.READ_ONLY_TRANSACTION))); res = subject.execute(Statement.of("set transaction read write")); assertThat(res.getResultType(), is(equalTo(ResultType.NO_RESULT))); assertThat(subject.getTransactionMode(), is(equalTo(TransactionMode.READ_WRITE_TRANSACTION))); } } @Test public void testExecuteStartDdlBatch() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { StatementResult res = subject.execute(Statement.of("start batch ddl")); assertThat(res.getResultType(), is(equalTo(ResultType.NO_RESULT))); assertThat(subject.getUnitOfWorkType(), is(equalTo(UnitOfWorkType.DDL_BATCH))); assertThat(subject.isInTransaction(), is(false)); } } @Test public void testDefaultIsAutocommit() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { assertThat(subject.isAutocommit(), is(true)); assertThat(subject.isInTransaction(), is(false)); } } @Test public void testDefaultIsReadWrite() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { assertThat(subject.isReadOnly(), is(false)); } } @Test public void testDefaultTransactionIsReadWrite() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { for (boolean autocommit : new Boolean[] {true, false}) { subject.setAutocommit(autocommit); subject.execute(Statement.of("begin")); assertThat( subject.getTransactionMode(), is(equalTo(TransactionMode.READ_WRITE_TRANSACTION))); subject.commit(); subject.execute(Statement.of("begin")); subject.execute(Statement.of("set transaction read only")); assertThat( subject.getTransactionMode(), is(equalTo(TransactionMode.READ_ONLY_TRANSACTION))); subject.commit(); subject.execute(Statement.of("begin")); assertThat( subject.getTransactionMode(), is(equalTo(TransactionMode.READ_WRITE_TRANSACTION))); subject.commit(); subject.execute(Statement.of("start batch ddl")); assertThat(subject.getUnitOfWorkType(), is(equalTo(UnitOfWorkType.DDL_BATCH))); subject.runBatch(); subject.execute(Statement.of("begin")); assertThat( subject.getTransactionMode(), is(equalTo(TransactionMode.READ_WRITE_TRANSACTION))); subject.commit(); } } } @Test public void testDefaultTransactionIsReadOnly() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI + ";readOnly=true") .build())) { for (boolean autocommit : new Boolean[] {true, false}) { subject.setAutocommit(autocommit); subject.execute(Statement.of("begin")); assertThat( subject.getTransactionMode(), is(equalTo(TransactionMode.READ_ONLY_TRANSACTION))); subject.commit(); } } } /** * ReadOnlyStaleness is a session setting for a connection. However, certain settings are only * allowed when the connection is in autocommit mode. The setting therefore must be reset to its * default {@link TimestampBound#strong()} when the current setting is not compatible with * transactional mode. */ @Test public void testResetReadOnlyStaleness() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { assertThat(subject.isAutocommit(), is(true)); assertThat(subject.getReadOnlyStaleness().getMode(), is(equalTo(TimestampBound.Mode.STRONG))); // the following values are always allowed subject.setReadOnlyStaleness(TimestampBound.strong()); assertThat(subject.getReadOnlyStaleness().getMode(), is(equalTo(TimestampBound.Mode.STRONG))); subject.setAutocommit(false); assertThat(subject.getReadOnlyStaleness().getMode(), is(equalTo(TimestampBound.Mode.STRONG))); subject.setAutocommit(true); assertThat(subject.getReadOnlyStaleness().getMode(), is(equalTo(TimestampBound.Mode.STRONG))); subject.setReadOnlyStaleness(TimestampBound.ofReadTimestamp(Timestamp.MAX_VALUE)); subject.setAutocommit(false); assertThat( subject.getReadOnlyStaleness(), is(equalTo(TimestampBound.ofReadTimestamp(Timestamp.MAX_VALUE)))); subject.setAutocommit(true); assertThat( subject.getReadOnlyStaleness(), is(equalTo(TimestampBound.ofReadTimestamp(Timestamp.MAX_VALUE)))); subject.setReadOnlyStaleness(TimestampBound.ofExactStaleness(10L, TimeUnit.SECONDS)); subject.setAutocommit(false); assertThat( subject.getReadOnlyStaleness(), is(equalTo(TimestampBound.ofExactStaleness(10L, TimeUnit.SECONDS)))); subject.setAutocommit(true); assertThat( subject.getReadOnlyStaleness(), is(equalTo(TimestampBound.ofExactStaleness(10L, TimeUnit.SECONDS)))); // the following values are only allowed in autocommit mode. Turning off autocommit will // return the setting to its default subject.setReadOnlyStaleness(TimestampBound.ofMinReadTimestamp(Timestamp.MAX_VALUE)); assertThat( subject.getReadOnlyStaleness(), is(equalTo(TimestampBound.ofMinReadTimestamp(Timestamp.MAX_VALUE)))); subject.setAutocommit(false); assertThat(subject.getReadOnlyStaleness().getMode(), is(equalTo(TimestampBound.Mode.STRONG))); subject.setAutocommit(true); assertThat(subject.getReadOnlyStaleness().getMode(), is(equalTo(TimestampBound.Mode.STRONG))); subject.setReadOnlyStaleness(TimestampBound.ofMaxStaleness(10L, TimeUnit.SECONDS)); assertThat( subject.getReadOnlyStaleness(), is(equalTo(TimestampBound.ofMaxStaleness(10L, TimeUnit.SECONDS)))); subject.setAutocommit(false); assertThat(subject.getReadOnlyStaleness().getMode(), is(equalTo(TimestampBound.Mode.STRONG))); subject.setAutocommit(true); assertThat(subject.getReadOnlyStaleness().getMode(), is(equalTo(TimestampBound.Mode.STRONG))); } } @Test public void testChangeReadOnlyModeInAutocommit() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { subject.execute(Statement.of(UPDATE)); assertThat(subject.getCommitTimestamp(), is(notNullValue())); // change to read-only subject.setReadOnly(true); expectSpannerException( "Updates should not be allowed in read-only mode", connection -> connection.execute(Statement.of(UPDATE)), subject); assertThat(subject.executeQuery(Statement.of(SELECT)), is(notNullValue())); // change back to read-write subject.setReadOnly(false); subject.execute(Statement.of(UPDATE)); assertThat(subject.getCommitTimestamp(), is(notNullValue())); // and back to read-only subject.setReadOnly(true); expectSpannerException( "DDL should not be allowed in read-only mode", connection -> connection.execute(Statement.of(DDL)), subject); assertThat(subject.executeQuery(Statement.of(SELECT)), is(notNullValue())); } } @Test public void testChangeReadOnlyModeInTransactionalMode() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { subject.setAutocommit(false); subject.execute(Statement.of(UPDATE)); subject.commit(); assertThat(subject.getCommitTimestamp(), is(notNullValue())); // change to read-only subject.setReadOnly(true); expectSpannerException( "Updates should not be allowed in read-only mode", connection -> connection.execute(Statement.of(UPDATE)), subject); assertThat(subject.executeQuery(Statement.of(SELECT)), is(notNullValue())); subject.commit(); // change back to read-write subject.setReadOnly(false); subject.execute(Statement.of(UPDATE)); subject.commit(); assertThat(subject.getCommitTimestamp(), is(notNullValue())); // and back to read-only subject.setReadOnly(true); expectSpannerException( "DDL should not be allowed in read-only mode", connection -> connection.execute(Statement.of(DDL)), subject); assertThat(subject.executeQuery(Statement.of(SELECT)), is(notNullValue())); } } @Test public void testAddRemoveTransactionRetryListener() { try (ConnectionImpl subject = createConnection( ConnectionOptions.newBuilder() .setCredentials(NoCredentials.getInstance()) .setUri(URI) .build())) { assertThat(subject.getTransactionRetryListeners().hasNext(), is(false)); TransactionRetryListener listener = mock(TransactionRetryListener.class); subject.addTransactionRetryListener(listener); assertThat(subject.getTransactionRetryListeners().hasNext(), is(true)); assertThat(subject.removeTransactionRetryListener(listener), is(true)); assertThat(subject.getTransactionRetryListeners().hasNext(), is(false)); assertThat(subject.removeTransactionRetryListener(listener), is(false)); } } @Test public void testMergeQueryOptions() { ConnectionOptions connectionOptions = mock(ConnectionOptions.class); SpannerPool spannerPool = mock(SpannerPool.class); DdlClient ddlClient = mock(DdlClient.class); DatabaseClient dbClient = mock(DatabaseClient.class); final UnitOfWork unitOfWork = mock(UnitOfWork.class); when(unitOfWork.executeQueryAsync( any(ParsedStatement.class), any(AnalyzeMode.class), Mockito.<QueryOption>any())) .thenReturn(ApiFutures.immediateFuture(mock(ResultSet.class))); try (ConnectionImpl impl = new ConnectionImpl(connectionOptions, spannerPool, ddlClient, dbClient) { @Override UnitOfWork getCurrentUnitOfWorkOrStartNewUnitOfWork() { return unitOfWork; } }) { // Execute query with an optimizer version and statistics package set on the connection. impl.setOptimizerVersion("1"); impl.setOptimizerStatisticsPackage("custom-package-1"); impl.executeQuery(Statement.of("SELECT FOO FROM BAR")); verify(unitOfWork) .executeQueryAsync( StatementParser.INSTANCE.parse( Statement.newBuilder("SELECT FOO FROM BAR") .withQueryOptions( QueryOptions.newBuilder() .setOptimizerVersion("1") .setOptimizerStatisticsPackage("custom-package-1") .build()) .build()), AnalyzeMode.NONE); // Execute query with an optimizer version and statistics package set on the connection. impl.setOptimizerVersion("2"); impl.setOptimizerStatisticsPackage("custom-package-2"); impl.executeQuery(Statement.of("SELECT FOO FROM BAR")); verify(unitOfWork) .executeQueryAsync( StatementParser.INSTANCE.parse( Statement.newBuilder("SELECT FOO FROM BAR") .withQueryOptions( QueryOptions.newBuilder() .setOptimizerVersion("2") .setOptimizerStatisticsPackage("custom-package-2") .build()) .build()), AnalyzeMode.NONE); // Execute query with an optimizer version and statistics package set on the connection and // PrefetchChunks query // option specified for the query. QueryOption prefetchOption = Options.prefetchChunks(100); impl.setOptimizerVersion("3"); impl.setOptimizerStatisticsPackage("custom-package-3"); impl.executeQuery(Statement.of("SELECT FOO FROM BAR"), prefetchOption); verify(unitOfWork) .executeQueryAsync( StatementParser.INSTANCE.parse( Statement.newBuilder("SELECT FOO FROM BAR") .withQueryOptions( QueryOptions.newBuilder() .setOptimizerVersion("3") .setOptimizerStatisticsPackage("custom-package-3") .build()) .build()), AnalyzeMode.NONE, prefetchOption); // Execute query with an optimizer version and statistics package set on the connection, and // the same options also // passed in to the query. The specific options passed in to the query should take precedence. impl.setOptimizerVersion("4"); impl.setOptimizerStatisticsPackage("custom-package-4"); impl.executeQuery( Statement.newBuilder("SELECT FOO FROM BAR") .withQueryOptions( QueryOptions.newBuilder() .setOptimizerVersion("5") .setOptimizerStatisticsPackage("custom-package-5") .build()) .build(), prefetchOption); verify(unitOfWork) .executeQueryAsync( StatementParser.INSTANCE.parse( Statement.newBuilder("SELECT FOO FROM BAR") .withQueryOptions( QueryOptions.newBuilder() .setOptimizerVersion("5") .setOptimizerStatisticsPackage("custom-package-5") .build()) .build()), AnalyzeMode.NONE, prefetchOption); } } @Test public void testStatementTagAlwaysAllowed() { ConnectionOptions connectionOptions = mock(ConnectionOptions.class); when(connectionOptions.isAutocommit()).thenReturn(true); SpannerPool spannerPool = mock(SpannerPool.class); DdlClient ddlClient = mock(DdlClient.class); DatabaseClient dbClient = mock(DatabaseClient.class); final UnitOfWork unitOfWork = mock(UnitOfWork.class); when(unitOfWork.executeQueryAsync( any(ParsedStatement.class), any(AnalyzeMode.class), Mockito.<QueryOption>any())) .thenReturn(ApiFutures.immediateFuture(mock(ResultSet.class))); try (ConnectionImpl connection = new ConnectionImpl(connectionOptions, spannerPool, ddlClient, dbClient) { @Override UnitOfWork getCurrentUnitOfWorkOrStartNewUnitOfWork() { return unitOfWork; } }) { assertTrue(connection.isAutocommit()); assertNull(connection.getStatementTag()); connection.setStatementTag("tag"); assertEquals("tag", connection.getStatementTag()); connection.setStatementTag(null); assertNull(connection.getStatementTag()); connection.setAutocommit(false); connection.setStatementTag("tag"); assertEquals("tag", connection.getStatementTag()); connection.setStatementTag(null); assertNull(connection.getStatementTag()); // Start a transaction connection.execute(Statement.of("SELECT FOO FROM BAR")); connection.setStatementTag("tag"); assertEquals("tag", connection.getStatementTag()); connection.setStatementTag(null); assertNull(connection.getStatementTag()); } } @Test public void testTransactionTagAllowedInTransaction() { ConnectionOptions connectionOptions = mock(ConnectionOptions.class); when(connectionOptions.isAutocommit()).thenReturn(false); SpannerPool spannerPool = mock(SpannerPool.class); DdlClient ddlClient = mock(DdlClient.class); DatabaseClient dbClient = mock(DatabaseClient.class); try (ConnectionImpl connection = new ConnectionImpl(connectionOptions, spannerPool, ddlClient, dbClient)) { assertFalse(connection.isAutocommit()); assertNull(connection.getTransactionTag()); connection.setTransactionTag("tag"); assertEquals("tag", connection.getTransactionTag()); connection.setTransactionTag(null); assertNull(connection.getTransactionTag()); // Committing or rolling back a transaction should clear the transaction tag for the next // transaction. connection.setTransactionTag("tag"); assertEquals("tag", connection.getTransactionTag()); connection.commit(); assertNull(connection.getTransactionTag()); connection.setTransactionTag("tag"); assertEquals("tag", connection.getTransactionTag()); connection.rollback(); assertNull(connection.getTransactionTag()); // Temporary transactions should also allow transaction tags. connection.setAutocommit(false); connection.beginTransaction(); assertNull(connection.getTransactionTag()); connection.setTransactionTag("tag"); assertEquals("tag", connection.getTransactionTag()); connection.commit(); assertNull(connection.getTransactionTag()); } } @Test public void testTransactionTagNotAllowedWithoutTransaction() { ConnectionOptions connectionOptions = mock(ConnectionOptions.class); when(connectionOptions.isAutocommit()).thenReturn(true); SpannerPool spannerPool = mock(SpannerPool.class); DdlClient ddlClient = mock(DdlClient.class); DatabaseClient dbClient = mock(DatabaseClient.class); try (ConnectionImpl connection = new ConnectionImpl(connectionOptions, spannerPool, ddlClient, dbClient)) { assertTrue(connection.isAutocommit()); try { connection.setTransactionTag("tag"); fail("missing expected exception"); } catch (SpannerException e) { assertEquals(ErrorCode.FAILED_PRECONDITION, e.getErrorCode()); } } } @Test public void testTransactionTagNotAllowedAfterTransactionStarted() { ConnectionOptions connectionOptions = mock(ConnectionOptions.class); when(connectionOptions.isAutocommit()).thenReturn(false); SpannerPool spannerPool = mock(SpannerPool.class); DdlClient ddlClient = mock(DdlClient.class); DatabaseClient dbClient = mock(DatabaseClient.class); final UnitOfWork unitOfWork = mock(UnitOfWork.class); // Indicate that a transaction has been started. when(unitOfWork.getState()).thenReturn(UnitOfWorkState.STARTED); when(unitOfWork.executeQueryAsync( any(ParsedStatement.class), any(AnalyzeMode.class), Mockito.<QueryOption>any())) .thenReturn(ApiFutures.immediateFuture(mock(ResultSet.class))); when(unitOfWork.rollbackAsync()).thenReturn(ApiFutures.immediateFuture(null)); try (ConnectionImpl connection = new ConnectionImpl(connectionOptions, spannerPool, ddlClient, dbClient) { @Override UnitOfWork createNewUnitOfWork() { return unitOfWork; } }) { // Start a transaction connection.execute(Statement.of("SELECT FOO FROM BAR")); try { connection.setTransactionTag("tag"); fail("missing expected exception"); } catch (SpannerException e) { assertEquals(ErrorCode.FAILED_PRECONDITION, e.getErrorCode()); } assertNull(connection.getTransactionTag()); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.apache.harmony.luni.tests.java.lang; import java.io.UnsupportedEncodingException; import java.lang.reflect.Constructor; import junit.framework.TestCase; public class StringTest extends TestCase { private static final Constructor<String> UNSAFE_CONSTRUCTOR; static { Constructor<String> uc; try { uc = String.class.getDeclaredConstructor(new Class[] { int.class, int.class, char[].class }); uc.setAccessible(true); } catch (Exception e) { uc = null; } UNSAFE_CONSTRUCTOR = uc; } private static String newString(int start, int len, char[] data) throws Exception { if (UNSAFE_CONSTRUCTOR == null) { return new String(data, start, len); } return UNSAFE_CONSTRUCTOR.newInstance(Integer.valueOf(start), Integer.valueOf(len), data); } /** * @tests java.lang.String#String() */ public void test_Constructor() { assertEquals("Created incorrect string", "", new String()); } /** * @tests java.lang.String#String(byte[]) */ public void test_Constructor$B() { assertEquals("Failed to create string", "HelloWorld", new String( "HelloWorld".getBytes())); } /** * @tests java.lang.String#String(byte[], int) */ @SuppressWarnings("deprecation") public void test_Constructor$BI() { String s = new String(new byte[] { 65, 66, 67, 68, 69 }, 0); assertEquals("Incorrect string returned: " + s, "ABCDE", s); s = new String(new byte[] { 65, 66, 67, 68, 69 }, 1); assertFalse("Did not use nonzero hibyte", s.equals("ABCDE")); } /** * @tests java.lang.String#String(byte[], int, int) */ public void test_Constructor$BII() { byte[] hwba = "HelloWorld".getBytes(); assertEquals("Failed to create string", "HelloWorld", new String(hwba, 0, hwba.length)); try { new String(new byte[0], 0, Integer.MAX_VALUE); fail("No IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { } } /** * @tests java.lang.String#String(byte[], int, int, int) */ @SuppressWarnings("deprecation") public void test_Constructor$BIII() { String s = new String(new byte[] { 65, 66, 67, 68, 69 }, 0, 1, 3); assertEquals("Incorrect string returned: " + s, "BCD", s); s = new String(new byte[] { 65, 66, 67, 68, 69 }, 1, 0, 5); assertFalse("Did not use nonzero hibyte", s.equals("ABCDE")); } /** * @tests java.lang.String#String(byte[], int, int, java.lang.String) */ public void test_Constructor$BIILjava_lang_String() throws Exception { String s = new String(new byte[] { 65, 66, 67, 68, 69 }, 0, 5, "8859_1"); assertEquals("Incorrect string returned: " + s, "ABCDE", s); try { new String(new byte[] { 65, 66, 67, 68, 69 }, 0, 5, ""); fail("Should throw UnsupportedEncodingException"); } catch (UnsupportedEncodingException e) { //expected } } /** * @tests java.lang.String#String(byte[], java.lang.String) */ public void test_Constructor$BLjava_lang_String() throws Exception { String s = new String(new byte[] { 65, 66, 67, 68, 69 }, "8859_1"); assertEquals("Incorrect string returned: " + s, "ABCDE", s); } /** * @tests java.lang.String#String(char[]) */ public void test_Constructor$C() { assertEquals("Failed Constructor test", "World", new String(new char[] { 'W', 'o', 'r', 'l', 'd' })); } /** * @tests java.lang.String#String(char[], int, int) */ public void test_Constructor$CII() throws Exception { char[] buf = { 'H', 'e', 'l', 'l', 'o', 'W', 'o', 'r', 'l', 'd' }; String s = new String(buf, 0, buf.length); assertEquals("Incorrect string created", "HelloWorld", s); try { new String(new char[0], 0, Integer.MAX_VALUE); fail("No IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { } } /** * @tests java.lang.String#String(java.lang.String) */ public void test_ConstructorLjava_lang_String() { String s = new String("Hello World"); assertEquals("Failed to construct correct string", "Hello World", s); } /** * @tests java.lang.String#String(java.lang.StringBuffer) */ public void test_ConstructorLjava_lang_StringBuffer() { StringBuffer sb = new StringBuffer(); sb.append("HelloWorld"); assertEquals("Created incorrect string", "HelloWorld", new String(sb)); } /** * @tests java.lang.String#String(java.lang.StringBuilder) */ public void test_ConstructorLjava_lang_StringBuilder() { StringBuilder sb = new StringBuilder(32); sb.append("HelloWorld"); assertEquals("HelloWorld", new String(sb)); try { new String((StringBuilder) null); fail("No NPE"); } catch (NullPointerException e) { } } /** * @tests java.lang.String#String(int[],int,int) */ public void test_Constructor$III() { assertEquals("HelloWorld", new String(new int[] { 'H', 'e', 'l', 'l', 'o', 'W', 'o', 'r', 'l', 'd' }, 0, 10)); assertEquals("Hello", new String(new int[] { 'H', 'e', 'l', 'l', 'o', 'W', 'o', 'r', 'l', 'd' }, 0, 5)); assertEquals("World", new String(new int[] { 'H', 'e', 'l', 'l', 'o', 'W', 'o', 'r', 'l', 'd' }, 5, 5)); assertEquals("", new String(new int[] { 'H', 'e', 'l', 'l', 'o', 'W', 'o', 'r', 'l', 'd' }, 5, 0)); assertEquals("\uD800\uDC00", new String(new int[] { 0x010000 }, 0, 1)); assertEquals("\uD800\uDC00a\uDBFF\uDFFF", new String(new int[] { 0x010000, 'a', 0x010FFFF }, 0, 3)); try { new String((int[]) null, 0, 1); fail("No NPE"); } catch (NullPointerException e) { } try { new String(new int[] { 'a', 'b' }, -1, 2); fail("No IOOBE, negative offset"); } catch (IndexOutOfBoundsException e) { } try { new String(new int[] { 'a', 'b' }, 0, -1); fail("No IOOBE, negative count"); } catch (IndexOutOfBoundsException e) { } try { new String(new int[] { 'a', 'b' }, 0, -1); fail("No IOOBE, negative count"); } catch (IndexOutOfBoundsException e) { } try { new String(new int[] { 'a', 'b' }, 0, 3); fail("No IOOBE, too large"); } catch (IndexOutOfBoundsException e) { } } /** * @tests java.lang.String#contentEquals(CharSequence) */ public void test_contentEqualsLjava_lang_CharSequence() throws Exception { String s = "abc"; assertTrue(s.contentEquals((CharSequence) new StringBuffer("abc"))); assertFalse(s.contentEquals((CharSequence) new StringBuffer("def"))); assertFalse(s.contentEquals((CharSequence) new StringBuffer("ghij"))); s = newString(1, 3, "_abc_".toCharArray()); assertTrue(s.contentEquals((CharSequence) new StringBuffer("abc"))); assertFalse(s.contentEquals((CharSequence) new StringBuffer("def"))); assertFalse(s.contentEquals((CharSequence) new StringBuffer("ghij"))); try { s.contentEquals((CharSequence) null); fail("No NPE"); } catch (NullPointerException e) { } } /** * @tests java.lang.String#contentEquals(StringBuffer) */ @SuppressWarnings("nls") public void test_boolean_contentEquals_StringBuffer() throws Exception { String s = "abc"; assertTrue(s.contentEquals(new StringBuffer("abc"))); assertFalse(s.contentEquals(new StringBuffer("def"))); assertFalse(s.contentEquals(new StringBuffer("ghij"))); s = newString(1, 3, "_abc_".toCharArray()); assertTrue(s.contentEquals(new StringBuffer("abc"))); assertFalse(s.contentEquals(new StringBuffer("def"))); assertFalse(s.contentEquals(new StringBuffer("ghij"))); try { s.contentEquals((StringBuffer) null); fail("Should throw a NullPointerException"); } catch (NullPointerException e) { // expected } } /** * @tests java.lang.String#contains(CharSequence) */ @SuppressWarnings("cast") public void test_containsLjava_lang_CharSequence() throws Exception { String s = "abcdefghijklmnopqrstuvwxyz"; assertTrue(s.contains((CharSequence) new StringBuffer("abc"))); assertTrue(s.contains((CharSequence) new StringBuffer("def"))); assertFalse(s.contains((CharSequence) new StringBuffer("ac"))); s = newString(1, 26, "_abcdefghijklmnopqrstuvwxyz_".toCharArray()); assertTrue(s.contains((CharSequence) new StringBuffer("abc"))); assertTrue(s.contains((CharSequence) new StringBuffer("def"))); assertFalse(s.contains((CharSequence) new StringBuffer("ac"))); try { s.contentEquals((CharSequence) null); fail("No NPE"); } catch (NullPointerException e) { } } /** * @tests java.lang.String.offsetByCodePoints(int, int)' */ public void test_offsetByCodePoints_II() throws Exception { int result = new String("a\uD800\uDC00b").offsetByCodePoints(0, 2); assertEquals(3, result); result = new String("abcd").offsetByCodePoints(3, -1); assertEquals(2, result); result = new String("a\uD800\uDC00b").offsetByCodePoints(0, 3); assertEquals(4, result); result = new String("a\uD800\uDC00b").offsetByCodePoints(3, -1); assertEquals(1, result); result = new String("a\uD800\uDC00b").offsetByCodePoints(3, 0); assertEquals(3, result); result = new String("\uD800\uDC00bc").offsetByCodePoints(3, 0); assertEquals(3, result); result = new String("a\uDC00bc").offsetByCodePoints(3, -1); assertEquals(2, result); result = new String("a\uD800bc").offsetByCodePoints(3, -1); assertEquals(2, result); result = newString(2, 4, "__a\uD800\uDC00b__".toCharArray()) .offsetByCodePoints(0, 2); assertEquals(3, result); result = newString(2, 4, "__abcd__".toCharArray()).offsetByCodePoints( 3, -1); assertEquals(2, result); result = newString(2, 4, "__a\uD800\uDC00b__".toCharArray()) .offsetByCodePoints(0, 3); assertEquals(4, result); result = newString(2, 4, "__a\uD800\uDC00b__".toCharArray()) .offsetByCodePoints(3, -1); assertEquals(1, result); result = newString(2, 4, "__a\uD800\uDC00b__".toCharArray()) .offsetByCodePoints(3, 0); assertEquals(3, result); result = newString(2, 4, "__\uD800\uDC00bc__".toCharArray()) .offsetByCodePoints(3, 0); assertEquals(3, result); result = newString(2, 4, "__a\uDC00bc__".toCharArray()) .offsetByCodePoints(3, -1); assertEquals(2, result); result = newString(2, 4, "__a\uD800bc__".toCharArray()) .offsetByCodePoints(3, -1); assertEquals(2, result); String s = "abc"; try { s.offsetByCodePoints(-1, 1); fail("No IOOBE for negative index."); } catch (IndexOutOfBoundsException e) { } try { s.offsetByCodePoints(0, 4); fail("No IOOBE for offset that's too large."); } catch (IndexOutOfBoundsException e) { } try { s.offsetByCodePoints(3, -4); fail("No IOOBE for offset that's too small."); } catch (IndexOutOfBoundsException e) { } try { s.offsetByCodePoints(3, 1); fail("No IOOBE for index that's too large."); } catch (IndexOutOfBoundsException e) { } try { s.offsetByCodePoints(4, -1); fail("No IOOBE for index that's too large."); } catch (IndexOutOfBoundsException e) { } s = newString(2,3,"__abc__".toCharArray()); try { s.offsetByCodePoints(-1, 1); fail("No IOOBE for negative index."); } catch (IndexOutOfBoundsException e) { } try { s.offsetByCodePoints(0, 4); fail("No IOOBE for offset that's too large."); } catch (IndexOutOfBoundsException e) { } try { s.offsetByCodePoints(3, -4); fail("No IOOBE for offset that's too small."); } catch (IndexOutOfBoundsException e) { } try { s.offsetByCodePoints(3, 1); fail("No IOOBE for index that's too large."); } catch (IndexOutOfBoundsException e) { } try { s.offsetByCodePoints(4, -1); fail("No IOOBE for index that's too large."); } catch (IndexOutOfBoundsException e) { } } /** * @tests java.lang.StringBuilder.codePointAt(int) */ public void test_codePointAtI() throws Exception { String s = "abc"; assertEquals('a', s.codePointAt(0)); assertEquals('b', s.codePointAt(1)); assertEquals('c', s.codePointAt(2)); s = newString(2,3,"__abc__".toCharArray()); assertEquals('a', s.codePointAt(0)); assertEquals('b', s.codePointAt(1)); assertEquals('c', s.codePointAt(2)); s = "\uD800\uDC00"; assertEquals(0x10000, s.codePointAt(0)); assertEquals('\uDC00', s.codePointAt(1)); s = newString(2,2,"__\uD800\uDC00__".toCharArray()); assertEquals(0x10000, s.codePointAt(0)); assertEquals('\uDC00', s.codePointAt(1)); s = "abc"; try { s.codePointAt(-1); fail("No IOOBE on negative index."); } catch (IndexOutOfBoundsException e) { } try { s.codePointAt(s.length()); fail("No IOOBE on index equal to length."); } catch (IndexOutOfBoundsException e) { } try { s.codePointAt(s.length() + 1); fail("No IOOBE on index greater than length."); } catch (IndexOutOfBoundsException e) { } s = newString(2,3,"__abc__".toCharArray()); try { s.codePointAt(-1); fail("No IOOBE on negative index."); } catch (IndexOutOfBoundsException e) { } try { s.codePointAt(s.length()); fail("No IOOBE on index equal to length."); } catch (IndexOutOfBoundsException e) { } try { s.codePointAt(s.length() + 1); fail("No IOOBE on index greater than length."); } catch (IndexOutOfBoundsException e) { } } /** * @tests java.lang.StringBuilder.codePointBefore(int) */ public void test_codePointBeforeI() throws Exception { String s = "abc"; assertEquals('a', s.codePointBefore(1)); assertEquals('b', s.codePointBefore(2)); assertEquals('c', s.codePointBefore(3)); s = newString(2,3,"__abc__".toCharArray()); assertEquals('a', s.codePointBefore(1)); assertEquals('b', s.codePointBefore(2)); assertEquals('c', s.codePointBefore(3)); s = "\uD800\uDC00"; assertEquals(0x10000, s.codePointBefore(2)); assertEquals('\uD800', s.codePointBefore(1)); s = newString(2,2,"__\uD800\uDC00__".toCharArray()); assertEquals(0x10000, s.codePointBefore(2)); assertEquals('\uD800', s.codePointBefore(1)); s = "abc"; try { s.codePointBefore(0); fail("No IOOBE on zero index."); } catch (IndexOutOfBoundsException e) { } try { s.codePointBefore(-1); fail("No IOOBE on negative index."); } catch (IndexOutOfBoundsException e) { } try { s.codePointBefore(s.length() + 1); fail("No IOOBE on index greater than length."); } catch (IndexOutOfBoundsException e) { } s = newString(2,3,"__abc__".toCharArray()); try { s.codePointBefore(0); fail("No IOOBE on zero index."); } catch (IndexOutOfBoundsException e) { } try { s.codePointBefore(-1); fail("No IOOBE on negative index."); } catch (IndexOutOfBoundsException e) { } try { s.codePointBefore(s.length() + 1); fail("No IOOBE on index greater than length."); } catch (IndexOutOfBoundsException e) { } } /** * @tests java.lang.StringBuilder.codePointCount(int, int) */ public void test_codePointCountII() throws Exception { assertEquals(1, "\uD800\uDC00".codePointCount(0, 2)); assertEquals(1, "\uD800\uDC01".codePointCount(0, 2)); assertEquals(1, "\uD801\uDC01".codePointCount(0, 2)); assertEquals(1, "\uDBFF\uDFFF".codePointCount(0, 2)); assertEquals(3, "a\uD800\uDC00b".codePointCount(0, 4)); assertEquals(4, "a\uD800\uDC00b\uD800".codePointCount(0, 5)); assertEquals(1, newString(2,2,"__\uD800\uDC00__".toCharArray()).codePointCount(0, 2)); assertEquals(1, newString(2,2,"__\uD800\uDC01__".toCharArray()).codePointCount(0, 2)); assertEquals(1, newString(2,2,"__\uD801\uDC01__".toCharArray()).codePointCount(0, 2)); assertEquals(1, newString(2,2,"__\uDBFF\uDFFF__".toCharArray()).codePointCount(0, 2)); assertEquals(3, newString(2,4,"__a\uD800\uDC00b__".toCharArray()).codePointCount(0, 4)); assertEquals(4, newString(2,5,"__a\uD800\uDC00b\uD800__".toCharArray()).codePointCount(0, 5)); String s = "abc"; try { s.codePointCount(-1, 2); fail("No IOOBE for negative begin index."); } catch (IndexOutOfBoundsException e) { } try { s.codePointCount(0, 4); fail("No IOOBE for end index that's too large."); } catch (IndexOutOfBoundsException e) { } try { s.codePointCount(3, 2); fail("No IOOBE for begin index larger than end index."); } catch (IndexOutOfBoundsException e) { } s = newString(2, 3, "__abc__".toCharArray()); try { s.codePointCount(-1, 2); fail("No IOOBE for negative begin index."); } catch (IndexOutOfBoundsException e) { } try { s.codePointCount(0, 4); fail("No IOOBE for end index that's too large."); } catch (IndexOutOfBoundsException e) { } try { s.codePointCount(3, 2); fail("No IOOBE for begin index larger than end index."); } catch (IndexOutOfBoundsException e) { } } }
package fasterDB; import fasterDB.store.MappedStorage; import fasterDB.util.ByteUtil; import fasterDB.vo.PageFaultException; import fasterDB.vo.Pair; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Arrays; /** * Created by zn on 15/5/3. * * Structure on first page * * | isFirstPage | nextPageId | pageCount | keyLength | valueLength | key | value | * | 1 bit (unsigned) | 7 bit + 3 byte (unsigned) | 2 byte (unsigned) | 1 byte (unsigned) | 4 bytes (signed) | keyLength | some bytes | * * Structure on other page * * | isFirstPage | nextPageId | value | * | 1 bit (unsigned) | 7 bit + 3 byte (unsigned) | rest bytes | * */ public class Record { protected enum OP { GET(0), UPDATE(1), DEL(2); byte[] code; OP(int code) { this.code = new byte[] {(byte) code}; } static OP codeOf(int code) { switch (code) { case 0: return GET; case 1: return UPDATE; case 2: return DEL; } return null; } } private static final int IS_FIRST_PAGE_INDEX = 0; private static final int NEXT_PAGE_ID_INDEX = 0; private static final int PAGE_COUNT_INDEX = 4; private static final int KEY_LENGTH_INDEX = 6; private static final int VALUE_LENGTH_INDEX = 7; private static final int FIRST_PAGE_DATA_INDEX = 11; private static final int OTHER_PAGE_DATA_INDEX = 4; private static final int MAX_PAGE_COUNT_PER_RECORD = (1 << 16) - 1; private static final int MAX_KEY_LENGTH = (1 << 8) - 1; private static final int MAX_VALUE_LENGTH = Integer.MAX_VALUE; private static final byte IS_FIRST_PAGE_MASK = (byte) 0x80; private static final byte IS_NOT_FIRST_PAGE_MASK = (byte) 0x7f; private static final int NEXT_PAGE_ID_MASK = 0x7fffffff; private static final int DEL_MASK = 0x80000000; private static final int UPDATE_MASK = 0x40000000; private static final int REF_MASK = 0x3fffffff; private int[] pageIds; private int flag; private Record(int[] pageIds) { this.pageIds = pageIds; } public synchronized void lock(OP op) throws InterruptedException { switch (op) { case GET: for (;;) { if ((flag & UPDATE_MASK) == UPDATE_MASK || (flag & DEL_MASK) == DEL_MASK) { wait(); } else { flag++; return; } } case UPDATE: for (;;) { if ((flag & REF_MASK) != 0 || (flag & DEL_MASK) == DEL_MASK) { wait(); } else { flag |= UPDATE_MASK; return; } } case DEL: for (;;) { if ((flag & REF_MASK) != 0 || (flag & UPDATE_MASK) == UPDATE_MASK) { wait(); } else { flag |= DEL_MASK; return; } } } } public synchronized void unlock(OP op) { switch (op) { case GET: flag--; break; case UPDATE: flag |= UPDATE_MASK; break; case DEL: flag |= DEL_MASK; break; } notifyAll(); } public static final Pair<Object, Record> restoreRecord(Context context, MappedStorage mappedStorage, int pageId) throws PageFaultException, IOException { byte[] page = context.borrowPageCache(); int pageSize = context.getConfig().getPageSize(); try { int pageCount = 1; int[] pageIds = null; int keyLength = 0; byte[] keyBytes = null; int leftKeyBytes = 0; int currentPageId = pageId; for (int pageIndex = 0; pageIndex < pageCount; pageIndex++) { boolean valid = mappedStorage.getPage(pageId, page); if (!valid) { return null; } boolean isFirstPage = (page[IS_FIRST_PAGE_INDEX] & IS_FIRST_PAGE_MASK) == IS_FIRST_PAGE_MASK; if ((pageIndex == 0 && !isFirstPage) || (pageIndex != 0 && isFirstPage)) { return null; } if (isFirstPage) { pageCount = ByteUtil.getUnsignedShortBigEndian(page, PAGE_COUNT_INDEX); if (pageCount <= 0) { return null; } pageIds = new int[pageCount]; keyLength = ByteUtil.getUnsignedByte(page, KEY_LENGTH_INDEX); keyBytes = getKeyBytes(context, keyLength); leftKeyBytes = keyLength; } pageIds[pageIndex] = currentPageId; if (leftKeyBytes > 0) { int dataIndex = pageIndex == 0 ? FIRST_PAGE_DATA_INDEX : OTHER_PAGE_DATA_INDEX; int canStoreKey = Math.min(leftKeyBytes, pageSize - dataIndex); if (canStoreKey > 0) { System.arraycopy(page, dataIndex, keyBytes, keyLength - leftKeyBytes, canStoreKey); leftKeyBytes -= canStoreKey; } } int nextPageId = (int) (ByteUtil.getUnsignedIntBigEndian(page, NEXT_PAGE_ID_INDEX) & NEXT_PAGE_ID_MASK); if (nextPageId == currentPageId) { break; } pageIndex++; currentPageId = nextPageId; } Pair<Object, Record> pair = new Pair<Object, Record>(); pair.first = getKey(context, keyBytes, 0, keyLength); pair.second = new Record(pageIds); return pair; } finally { if (page != null) { context.returnPageCache(page); } } } private static byte[] getKeyBytes(Context context, int keyLength) { switch (context.getConfig().getKeyType()) { case STRING: return new byte[keyLength]; case SHORT: return context.getKeyBytesShortCache(); case INT: case FLOAT: return context.getKeyBytesIntCache(); case LONG: case DOUBLE: return context.getKeyBytesLongCache(); } throw new IllegalArgumentException("key 's type is invalid"); } public static final Record buildRecord(Context context, MappedStorage mappedStorage, byte[] keyBytes, byte[] valueBytes) throws Throwable { if (keyBytes == null || keyBytes.length == 0 || valueBytes == null || valueBytes.length == 0) { throw new IllegalArgumentException("key and value should not be empty"); } int pageSize = context.getConfig().getPageSize(); if (pageSize < FIRST_PAGE_DATA_INDEX) { throw new RuntimeException("pageSize should >= " + FIRST_PAGE_DATA_INDEX); } if (keyBytes.length > MAX_KEY_LENGTH) { throw new IllegalArgumentException("key 's length should <= " + MAX_KEY_LENGTH); } if (valueBytes.length > MAX_VALUE_LENGTH) { throw new IllegalArgumentException("key 's length should <= " + MAX_VALUE_LENGTH); } int pageCount = pageCount(keyBytes.length, valueBytes.length, pageSize); if (pageCount > MAX_PAGE_COUNT_PER_RECORD) { throw new IllegalArgumentException("key and value is too large"); } int leftKeyBytes = keyBytes.length; int leftValueBytes = valueBytes.length; int[] pageIds = context.borrowPageIds(pageCount); Arrays.sort(pageIds); byte[] page = context.borrowPageCache(); try { for (int pageIndex = 0; pageIndex < pageCount; pageIndex++) { int nextPageId = pageIndex == pageCount - 1 ? pageIds[pageIndex] : pageIds[pageIndex + 1]; ByteUtil.getUnsignedBytesBigEndian(nextPageId, page, NEXT_PAGE_ID_INDEX); if (pageIndex == 0) { page[IS_FIRST_PAGE_INDEX] |= IS_FIRST_PAGE_MASK; ByteUtil.getUnsignedBytesBigEndian((short) pageCount, page, PAGE_COUNT_INDEX); ByteUtil.getUnsignedByte((byte) keyBytes.length, page, KEY_LENGTH_INDEX); ByteUtil.getBytesBigEndian(valueBytes.length, page, VALUE_LENGTH_INDEX); int canStoreKeyBytes = Math.min(pageSize - FIRST_PAGE_DATA_INDEX, leftKeyBytes); if (canStoreKeyBytes > 0) { System.arraycopy(keyBytes, 0, page, FIRST_PAGE_DATA_INDEX, canStoreKeyBytes); leftKeyBytes -= canStoreKeyBytes; int canStoreValueBytes = Math.min(pageSize - FIRST_PAGE_DATA_INDEX - canStoreKeyBytes, leftValueBytes); if (canStoreValueBytes > 0) { System.arraycopy(valueBytes, 0, page, FIRST_PAGE_DATA_INDEX + canStoreKeyBytes, canStoreValueBytes); leftValueBytes -= canStoreValueBytes; } } } else { page[IS_FIRST_PAGE_INDEX] &= IS_NOT_FIRST_PAGE_MASK; int canStoreKeyBytes = Math.min(pageSize - OTHER_PAGE_DATA_INDEX, leftKeyBytes); if (canStoreKeyBytes > 0) { System.arraycopy(keyBytes, keyBytes.length - leftKeyBytes, page, OTHER_PAGE_DATA_INDEX, canStoreKeyBytes); leftKeyBytes -= canStoreKeyBytes; } int canStoreValueBytes = Math.min(pageSize - OTHER_PAGE_DATA_INDEX - canStoreKeyBytes, leftValueBytes); if (canStoreValueBytes > 0) { System.arraycopy(valueBytes, valueBytes.length - leftValueBytes, page, OTHER_PAGE_DATA_INDEX + canStoreKeyBytes, canStoreValueBytes); leftValueBytes -= canStoreValueBytes; } } mappedStorage.setPage(pageIds[pageIndex], page, false); } } catch (Throwable cause) { context.returnPageIds(pageIds); throw cause; } finally { context.returnPageCache(page); } return new Record(pageIds); } private static int pageCount(int keyBytes, int valueBytes, int pageSize) { int totalSize = FIRST_PAGE_DATA_INDEX + keyBytes + valueBytes; int pageCount = 1; totalSize -= pageSize; for (;;) { if (totalSize <= 0) { break; } pageCount++; totalSize -= pageSize - OTHER_PAGE_DATA_INDEX; } return pageCount; } public byte[] getValue(Context context, MappedStorage mappedStorage) throws InterruptedException, IOException { int pageCount = pageIds.length; byte[][] pageSegments = new byte[pageCount][]; try { try { lock(Record.OP.GET); for (int i = 0; i < pageCount; i++) { int pageId = pageIds[i]; pageSegments[i] = context.borrowPageCache(); boolean valid = mappedStorage.getPage(pageId, pageSegments[i]); if (!valid) { return null; } } } catch (PageFaultException e) { return null; } finally { unlock(Record.OP.GET); } int keyLength = ByteUtil.getUnsignedByte(pageSegments[0], KEY_LENGTH_INDEX); int valueLength = ByteUtil.getIntBigEndian(pageSegments[0], VALUE_LENGTH_INDEX); byte[] valueBytes = new byte[valueLength]; int pageSize = context.getConfig().getPageSize(); int leftKeyBytes = keyLength; int leftValueBytes = valueLength; for (int pageIndex = 0; pageIndex < pageCount; pageIndex++) { byte[] pageSegment = pageSegments[pageIndex]; int dataIndex = pageIndex == 0 ? FIRST_PAGE_DATA_INDEX : OTHER_PAGE_DATA_INDEX; int canStoreKeyBytes = Math.min(pageSize - dataIndex, leftKeyBytes); int canStoreValueBytes = Math.min(pageSize - dataIndex - canStoreKeyBytes, leftValueBytes); if (canStoreValueBytes > 0) { System.arraycopy(pageSegment, dataIndex + canStoreKeyBytes, valueBytes, valueLength - leftValueBytes, canStoreValueBytes); } leftKeyBytes -= canStoreKeyBytes; leftValueBytes -= canStoreValueBytes; } return valueBytes; } finally { for (byte[] page : pageSegments) { if (page != null) { context.returnPageCache(page); } } } } public int[] getPageIds() { return pageIds; } public int[] setPageIds(int[] pageIds) { int[] old = this.pageIds; this.pageIds = pageIds; flag = 0; return old; } public int[] removePageIds() { pageIds = null; flag = 0; return pageIds; } public static Object getKey(Context context, byte[] bytes, int from, int to) { switch (context.getConfig().getKeyType()) { case STRING: byte[] bs = from <= 0 ? bytes : Arrays.copyOfRange(bytes, from, to); return new String(bs, StandardCharsets.UTF_8); case SHORT: return ByteUtil.getShortBigEndian(bytes, from); case INT: return ByteUtil.getIntBigEndian(bytes, from); case FLOAT: return Float.intBitsToFloat(ByteUtil.getIntBigEndian(bytes, from)); case LONG: return ByteUtil.getLongBigEndian(bytes, from); case DOUBLE: return Double.longBitsToDouble(ByteUtil.getLongBigEndian(bytes, from)); } throw new IllegalArgumentException("key 's type is invalid"); } }
package org.jtrim2.access; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Objects; import org.jtrim2.event.ListenerRef; import org.jtrim2.property.PropertySource; import org.jtrim2.utils.ExceptionHelper; /** * Defines static utility methods to track the availability of rights of an * {@link AccessManager} as a {@link PropertySource} (with {@code Boolean} value). * <P> * Note that these properties are not intended for synchronization purposes but * they can be used to enable and disable GUI components or change their state * otherwise. * * @see org.jtrim2.property.swing.AutoDisplayState */ public final class AccessProperties { /** * Returns a property which tracks if the given {@code AccessRequest} is * available or not. That is, if it can be acquired or not from the * specified {@code AccessManager}. The returned property is not intended to * be used as a true synchronization utility, instead it can be used to * change the state of GUI components based on the availability of rights. * <P> * Note that listeners registered with the returned property are not * necessarily invoked on the Event Dispatch Thread. * * @param <RightType> the type of the rights managed by the specified * {@code AccessManager} * @param accessManager the {@code AccessManager} managing the availability * of rights. This argument cannot be {@code null}. * @param request the {@code AccessRequest} whose availability is to be * tracked. This argument cannot be {@code null}. * @return a property which tracks if the given read and write rights are * available or not. This method never returns {@code null}. * * @throws NullPointerException thrown if any of the arguments is * {@code null} */ public static <RightType> PropertySource<Boolean> trackRequestAvailable( AccessManager<?, ? super RightType> accessManager, AccessRequest<?, ? extends RightType> request) { return trackRightsAvailable(accessManager, request.getReadRights(), request.getWriteRights()); } /** * Returns a property which tracks if the given read rights are available * or not. That is, if they can be acquired or not from the specified * {@code AccessManager}. The returned property is not intended to be used * as a true synchronization utility, instead it can be used to change the * state of GUI components based on the availability of rights. * <P> * Note that listeners registered with the returned property are not * necessarily invoked on the Event Dispatch Thread. * * @param <RightType> the type of the rights managed by the specified * {@code AccessManager} * @param accessManager the {@code AccessManager} managing the availability * of rights. This argument cannot be {@code null}. * @param readRights the rights requiring read access. This argument * cannot be {@code null} (but can be an empty collection) and none of its * elements is allowed to be {@code null}. * @return a property which tracks if the given read rights are available * or not. This method never returns {@code null}. * * @throws NullPointerException thrown if any of the arguments is * {@code null} */ public static <RightType> PropertySource<Boolean> trackReadRightsAvailable( AccessManager<?, ? super RightType> accessManager, Collection<? extends RightType> readRights) { return trackRightsAvailable(accessManager, readRights, Collections.<RightType>emptySet()); } /** * Returns a property which tracks if the given read right is available or * not. That is, if it can be acquired or not from the specified * {@code AccessManager}. The returned property is not intended to be used * as a true synchronization utility, instead it can be used to change * the state of GUI components based on the availability of rights. * <P> * Note that listeners registered with the returned property are not * necessarily invoked on the Event Dispatch Thread. * * @param <RightType> the type of the rights managed by the specified * {@code AccessManager} * @param accessManager the {@code AccessManager} managing the availability * of rights. This argument cannot be {@code null}. * @param readRight the right requiring read access. This argument cannot * be {@code null} * @return a property which tracks if the given read right is available or * not. This method never returns {@code null}. * * @throws NullPointerException thrown if any of the arguments is * {@code null} */ public static <RightType> PropertySource<Boolean> trackReadRightAvailable( AccessManager<?, ? super RightType> accessManager, RightType readRight) { return trackRightsAvailable(accessManager, Collections.singleton(readRight), Collections.<RightType>emptySet()); } /** * Returns a property which tracks if the given write rights are available * or not. That is, if they can be acquired or not from the specified * {@code AccessManager}. The returned property is not intended to be used * as a true synchronization utility, instead it can be used to change the * state of GUI components based on the availability of rights. * <P> * Note that listeners registered with the returned property are not * necessarily invoked on the Event Dispatch Thread. * * @param <RightType> the type of the rights managed by the specified * {@code AccessManager} * @param accessManager the {@code AccessManager} managing the availability * of rights. This argument cannot be {@code null}. * @param writeRights the rights requiring write access. This argument * cannot be {@code null} (but can be an empty collection) and none of its * elements is allowed to be {@code null}. * @return a property which tracks if the given write rights are available * or not. This method never returns {@code null}. * * @throws NullPointerException thrown if any of the arguments is * {@code null} */ public static <RightType> PropertySource<Boolean> trackWriteRightsAvailable( AccessManager<?, ? super RightType> accessManager, Collection<? extends RightType> writeRights) { return trackRightsAvailable(accessManager, Collections.<RightType>emptySet(), writeRights); } /** * Returns a property which tracks if the given write right is available or * not. That is, if it can be acquired or not from the specified * {@code AccessManager}. The returned property is not intended to be used * as a true synchronization utility, instead it can be used to change * the state of GUI components based on the availability of rights. * <P> * Note that listeners registered with the returned property are not * necessarily invoked on the Event Dispatch Thread. * * @param <RightType> the type of the rights managed by the specified * {@code AccessManager} * @param accessManager the {@code AccessManager} managing the availability * of rights. This argument cannot be {@code null}. * @param writeRight the right requiring write access. This argument cannot * be {@code null} * @return a property which tracks if the given write right is available or * not. This method never returns {@code null}. * * @throws NullPointerException thrown if any of the arguments is * {@code null} */ public static <RightType> PropertySource<Boolean> trackWriteRightAvailable( AccessManager<?, ? super RightType> accessManager, RightType writeRight) { return trackRightsAvailable(accessManager, Collections.<RightType>emptySet(), Collections.singleton(writeRight)); } /** * Returns a property which tracks if the given read and write rights * are available or not. That is, if they can be acquired or not from the * specified {@code AccessManager}. The returned property is not intended to * be used as a true synchronization utility, instead it can be used to * change the state of GUI components based on the availability of rights. * <P> * Note that listeners registered with the returned property are not * necessarily invoked on the Event Dispatch Thread. * * @param <RightType> the type of the rights managed by the specified * {@code AccessManager} * @param accessManager the {@code AccessManager} managing the availability * of rights. This argument cannot be {@code null}. * @param readRights the rights requiring read access. This argument cannot * be {@code null} (but can be an empty collection) and none of its * elements is allowed to be {@code null}. * @param writeRights the rights requiring write access. This argument * cannot be {@code null} (but can be an empty collection) and none of its * elements is allowed to be {@code null}. * @return a property which tracks if the given read and write rights are * available or not. This method never returns {@code null}. * * @throws NullPointerException thrown if any of the arguments is * {@code null} */ public static <RightType> PropertySource<Boolean> trackRightsAvailable( final AccessManager<?, ? super RightType> accessManager, Collection<? extends RightType> readRights, Collection<? extends RightType> writeRights) { // The bridge method is created, so that we can provide a more generic // method call avoiding syntax errors. return trackRightsAvailableBridge(accessManager, readRights, writeRights); } private static <IDType, RightType> PropertySource<Boolean> trackRightsAvailableBridge( final AccessManager<IDType, RightType> accessManager, Collection<? extends RightType> readRights, Collection<? extends RightType> writeRights) { return new RightTrackerPropertySource<>(accessManager, readRights, writeRights); } private AccessProperties() { throw new AssertionError(); } private static final class RightTrackerPropertySource<IDType, RightType> implements PropertySource<Boolean> { private final AccessManager<IDType, RightType> accessManager; private final Collection<RightType> readRights; private final Collection<RightType> writeRights; public RightTrackerPropertySource( AccessManager<IDType, RightType> accessManager, Collection<? extends RightType> readRights, Collection<? extends RightType> writeRights) { Objects.requireNonNull(accessManager, "accessManager"); this.accessManager = accessManager; this.readRights = new ArrayList<>(readRights); this.writeRights = new ArrayList<>(writeRights); ExceptionHelper.checkNotNullElements(this.readRights, "readRights"); ExceptionHelper.checkNotNullElements(this.writeRights, "writeRights"); } @Override public Boolean getValue() { return accessManager.isAvailable(readRights, writeRights); } @Override public ListenerRef addChangeListener(final Runnable listener) { Objects.requireNonNull(listener, "listener"); return accessManager.addAccessChangeListener((request, acquired) -> listener.run()); } } }
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.compiler.integrationtests; import java.io.Serializable; import java.math.BigDecimal; import java.math.BigInteger; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import org.drools.compiler.Address; import org.drools.compiler.Cheese; import org.drools.compiler.Cheesery; import org.drools.compiler.CommonTestMethodBase; import org.drools.compiler.Person; import org.drools.compiler.TestEnum; import org.drools.core.base.ClassFieldReader; import org.drools.core.base.ClassObjectType; import org.drools.core.base.extractors.MVELObjectClassFieldReader; import org.drools.core.base.mvel.MVELDebugHandler; import org.drools.core.impl.InternalKnowledgeBase; import org.drools.core.impl.KnowledgeBaseFactory; import org.drools.core.reteoo.AlphaNode; import org.drools.core.reteoo.ObjectTypeNode; import org.drools.core.rule.constraint.MvelConstraint; import org.drools.core.spi.AlphaNodeFieldConstraint; import org.drools.core.spi.FieldValue; import org.drools.core.util.DateUtils; import org.junit.Test; import org.kie.api.KieBase; import org.kie.api.KieBaseConfiguration; import org.kie.api.conf.EqualityBehaviorOption; import org.kie.api.definition.type.FactType; import org.kie.api.io.ResourceType; import org.kie.api.runtime.KieSession; import org.kie.api.runtime.StatelessKieSession; import org.kie.internal.builder.KnowledgeBuilder; import org.kie.internal.builder.KnowledgeBuilderFactory; import org.kie.internal.io.ResourceFactory; import org.kie.internal.runtime.StatefulKnowledgeSession; import org.mvel2.MVEL; import org.mvel2.ParserContext; public class MVELTest extends CommonTestMethodBase { @Test public void testHelloWorld() { // read in the source final KieBase kbase = loadKnowledgeBase("test_mvel.drl"); final KieSession ksession = kbase.newKieSession(); final List list = new ArrayList(); ksession.setGlobal("list", list); final List list2 = new ArrayList(); ksession.setGlobal("list2", list2); final Cheese c = new Cheese("stilton", 10); ksession.insert(c); ksession.fireAllRules(); assertEquals(2, list.size()); assertEquals(BigInteger.valueOf(30), list.get(0)); assertEquals(22, list.get(1)); assertEquals("hello world", list2.get(0)); final Date dt = DateUtils.parseDate("10-Jul-1974"); assertEquals(dt, c.getUsedBy()); } @Test public void testIncrementOperator() { String str = ""; str += "package org.kie \n"; str += "global java.util.List list \n"; str += "rule rule1 \n"; str += " dialect \"mvel\" \n"; str += "when \n"; str += " $I : Integer() \n"; str += "then \n"; str += " i = $I.intValue(); \n"; str += " i += 5; \n"; str += " list.add( i ); \n"; str += "end \n"; final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add(ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL); assertFalse(kbuilder.hasErrors()); final InternalKnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addPackages(kbuilder.getKnowledgePackages()); final KieSession ksession = createKnowledgeSession(kbase); final List list = new ArrayList(); ksession.setGlobal("list", list); ksession.insert(5); ksession.fireAllRules(); assertEquals(1, list.size()); assertEquals(10, list.get(0)); } @Test public void testEvalWithBigDecimal() { String str = ""; str += "package org.kie \n"; str += "import java.math.BigDecimal; \n"; str += "global java.util.List list \n"; str += "rule rule1 \n"; str += " dialect \"mvel\" \n"; str += "when \n"; str += " $bd : BigDecimal() \n"; str += " eval( $bd.compareTo( BigDecimal.ZERO ) > 0 ) \n"; str += "then \n"; str += " list.add( $bd ); \n"; str += "end \n"; final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add(ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL); if (kbuilder.hasErrors()) { System.err.println(kbuilder.getErrors()); } assertFalse(kbuilder.hasErrors()); final InternalKnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addPackages(kbuilder.getKnowledgePackages()); final KieSession ksession = createKnowledgeSession(kbase); final List list = new ArrayList(); ksession.setGlobal("list", list); ksession.insert(new BigDecimal(1.5)); ksession.fireAllRules(); assertEquals(1, list.size()); assertEquals(new BigDecimal(1.5), list.get(0)); } @Test public void testLocalVariableMVELConsequence() { final KieBase kbase = loadKnowledgeBase("test_LocalVariableMVELConsequence.drl"); final KieSession ksession = kbase.newKieSession(); final List list = new ArrayList(); ksession.setGlobal("results", list); ksession.insert(new Person("bob", "stilton")); ksession.insert(new Person("mark", "brie")); try { ksession.fireAllRules(); assertEquals("should have fired twice", 2, list.size()); } catch (final Exception e) { e.printStackTrace(); fail("Should not raise any exception"); } } @Test public void testMVELUsingGlobalsInDebugMode() { MVELDebugHandler.setDebugMode(true); try { final KieBase kbase = loadKnowledgeBase("test_MVELGlobalDebug.drl"); KieSession ksession = kbase.newKieSession(); ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, false); ksession.dispose(); MVELDebugHandler.setDebugMode(false); } catch (final Exception e) { MVELDebugHandler.setDebugMode(false); e.printStackTrace(); fail("Should not raise exceptions"); } } @Test public void testDuplicateLocalVariableMVELConsequence() { final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add(ResourceFactory.newInputStreamResource(getClass().getResourceAsStream("test_DuplicateLocalVariableMVELConsequence.drl")), ResourceType.DRL); assertTrue( kbuilder.hasErrors() ); } @Test public void testArrays() { String text = "package test_mvel;\n"; text += "import " + TestObject.class.getCanonicalName() + ";\n"; text += "import function " + TestObject.class.getCanonicalName() + ".array;\n"; text += "no-loop true\n"; text += "dialect \"mvel\"\n"; text += "rule \"1\"\n"; text += "salience 1\n"; text += "when\n"; text += " $fact: TestObject()\n"; text += " eval($fact.checkHighestPriority(\"mvel\", 2))\n"; text += " eval($fact.stayHasDaysOfWeek(\"mvel\", false, new String[][]{{\"2008-04-01\", \"2008-04-10\"}}))\n"; text += "then\n"; text += " $fact.applyValueAddPromo(1,2,3,4,\"mvel\");\n"; text += "end"; final KieBase kieBase = loadKnowledgeBaseFromString(text.replaceAll("mvel", "java"), text); final StatelessKieSession statelessKieSession = kieBase.newStatelessKieSession(); final List<String> list = new ArrayList<String>(); statelessKieSession.execute(new TestObject(list)); assertEquals(6, list.size()); assertTrue(list.containsAll( Arrays.asList("TestObject.checkHighestPriority: java|2", "TestObject.stayHasDaysOfWeek: java|false|[2008-04-01, 2008-04-10]", "TestObject.checkHighestPriority: mvel|2", "TestObject.stayHasDaysOfWeek: mvel|false|[2008-04-01, 2008-04-10]", "TestObject.applyValueAddPromo: 1|2|3|4|mvel", "TestObject.applyValueAddPromo: 1|2|3|4|java") )); } @Test public void testPackageImports() { String str = ""; str += "package org.kie \n"; str += "dialect \"mvel\"\n"; str += "import org.acme.healthcare.* \n"; str += "import org.acme.insurance.* \n"; str += "import org.acme.sensors.SensorReading \n"; str += "rule rule1 \n"; str += " when \n"; str += " eval(true)\n"; str += " then \n"; str += " insert(new Claim()); // from org.acme.healthcare.* \n"; str += " insert(new Policy()); // from org.acme.insurance.* \n"; str += " insert(new SensorReading()); // from org.acme.sensor.SensorReading \n"; str += "end\n"; final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add(ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL); if (kbuilder.hasErrors()) { throw new RuntimeException(kbuilder.getErrors().toString()); } final InternalKnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addPackages(kbuilder.getKnowledgePackages()); final KieSession ksession = createKnowledgeSession(kbase); final int result = ksession.fireAllRules(); assertEquals(1, result); final Collection<? extends Object> insertedObjects = ksession.getObjects(); assertEquals(3, insertedObjects.size()); } @Test public void testSizeCheckInObject() { final String str = ""+ "package org.drools.compiler.test \n" + "import " + Triangle.class.getCanonicalName() + "\n" + "global java.util.List list \n" + "rule \"show\" \n" + "when \n" + " $m : Triangle( deliveries.size == 0) \n" + "then \n" + " list.add('r1'); \n" + "end \n"; final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add(ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL); if (kbuilder.hasErrors()) { fail(kbuilder.getErrors().toString()); } } @Test public void testNestedEnum() { final String str = ""+ "package org.drools.compiler.test \n" + "import " + Triangle.class.getCanonicalName() + "\n" + "global java.util.List list \n" + "rule \"show\" \n" + "when \n" + " $t: Triangle(t == Triangle.Type.ACUTE) \n" + "then \n" + " list.add($t.getT()); \n" + "end \n"; final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add(ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL); if (kbuilder.hasErrors()) { fail(kbuilder.getErrors().toString()); } final InternalKnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addPackages(kbuilder.getKnowledgePackages()); final KieSession ksession = createKnowledgeSession(kbase); final List list = new ArrayList(); ksession.setGlobal("list", list); final Triangle t = new Triangle(Triangle.Type.ACUTE); ksession.insert(t); ksession.fireAllRules(); assertEquals(Triangle.Type.ACUTE, list.get(0)); } @Test public void testNestedEnumWithMap() { final String str = ""+ "package org.drools.compiler.test \n" + "import " + DMap.class.getCanonicalName() + " \n" + "import " + Triangle.class.getCanonicalName() + "\n" + "global java.util.List list \n" + "rule \"show\" \n" + "when \n" + " $m : DMap( this[Triangle.Type.ACUTE] == 'xxx') \n" + "then \n" + " list.add('r1'); \n" + "end \n"; final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add(ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL); if (kbuilder.hasErrors()) { fail(kbuilder.getErrors().toString()); } final InternalKnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addPackages(kbuilder.getKnowledgePackages()); final KieSession ksession = createKnowledgeSession(kbase); final List list = new ArrayList(); ksession.setGlobal("list", list); final DMap m = new DMap(); m.put(Triangle.Type.ACUTE, "xxx"); ksession.insert(m); ksession.fireAllRules(); assertEquals("r1", list.get(0)); } @Test public void testNewConstructor() { final String str = ""+ "package org.drools.compiler.test \n" + "import " + Person.class.getCanonicalName() + "\n" + "import " + Address.class.getCanonicalName() + "\n" + "global java.util.List list \n" + "rule \"show\" \n" + "when \n" + " $m : Person( address == new Address('s1')) \n" + "then \n" + " list.add('r1'); \n" + "end \n"; final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add(ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL); if (kbuilder.hasErrors()) { fail(kbuilder.getErrors().toString()); } final InternalKnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addPackages(kbuilder.getKnowledgePackages()); final KieSession ksession = createKnowledgeSession(kbase); final List list = new ArrayList(); ksession.setGlobal("list", list); final Person p = new Person("yoda"); p.setAddress(new Address("s1")); ksession.insert(p); ksession.fireAllRules(); assertEquals("r1", list.get(0)); // Check it was built with MVELReturnValueExpression constraint final List<ObjectTypeNode> nodes = ((InternalKnowledgeBase) kbase).getRete().getObjectTypeNodes(); ObjectTypeNode node = null; for (final ObjectTypeNode n : nodes) { if (((ClassObjectType) n.getObjectType()).getClassType() == Person.class) { node = n; break; } } final AlphaNode alphanode = (AlphaNode) node.getObjectSinkPropagator().getSinks()[0]; final AlphaNodeFieldConstraint constraint = alphanode.getConstraint(); if (constraint instanceof MvelConstraint) { assertTrue(((MvelConstraint) constraint).getFieldExtractor() instanceof ClassFieldReader); final FieldValue r = ((MvelConstraint) constraint).getField(); assertEquals(p.getAddress(), r.getValue()); } } @Test public void testArrayAccessorWithGenerics() { final String str = ""+ "package org.drools.compiler.test \n" + "import " + Person.class.getCanonicalName() + "\n" + "import " + Address.class.getCanonicalName() + "\n" + "global java.util.List list \n" + "rule \"show\" \n" + "when \n" + " $m : Person( addresses[0] == new Address('s1'), addresses[0].street == new Address('s1').street ) \n" + "then \n" + " list.add('r1'); \n" + "end \n"; final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add(ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL); if (kbuilder.hasErrors()) { fail(kbuilder.getErrors().toString()); } final InternalKnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addPackages(kbuilder.getKnowledgePackages()); final KieSession ksession = createKnowledgeSession(kbase); final List list = new ArrayList(); ksession.setGlobal("list", list); final Person p = new Person("yoda"); p.addAddress(new Address("s1")); ksession.insert(p); ksession.fireAllRules(); assertEquals("r1", list.get(0)); // Check it was built with MVELReturnValueExpression constraint final List<ObjectTypeNode> nodes = ((InternalKnowledgeBase) kbase).getRete().getObjectTypeNodes(); ObjectTypeNode node = null; for (final ObjectTypeNode n : nodes) { if (((ClassObjectType) n.getObjectType()).getClassType() == Person.class) { node = n; break; } } AlphaNode alphanode = (AlphaNode) node.getObjectSinkPropagator().getSinks()[0]; AlphaNodeFieldConstraint constraint = alphanode.getConstraint(); if (constraint instanceof MvelConstraint) { assertTrue(((MvelConstraint) constraint).getFieldExtractor() instanceof MVELObjectClassFieldReader); assertEquals(new Address("s1"), ((MvelConstraint) constraint).getField().getValue()); } alphanode = (AlphaNode) alphanode.getObjectSinkPropagator().getSinks()[0]; constraint = alphanode.getConstraint(); if (constraint instanceof MvelConstraint) { assertTrue(((MvelConstraint) constraint).getFieldExtractor() instanceof MVELObjectClassFieldReader); assertEquals(new Address("s1").getStreet(), ((MvelConstraint) constraint).getField().getValue()); } } @Test public void testArrayAccessorWithStaticFieldAccess() { final String str = ""+ "package org.drools.compiler.test \n" + "import " + Person.class.getCanonicalName() + "\n" + "import " + Address.class.getCanonicalName() + "\n" + "import " + Triangle.class.getCanonicalName() + "\n" + "global java.util.List list \n" + "rule \"show\" \n" + "when \n" + " $m : Person( addresses[Triangle.ZERO] == new Address('s1'), addresses[Triangle.ZERO].street == new Address('s1').street ) \n" + "then \n" + " list.add('r1'); \n" + "end \n"; final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add(ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL); if (kbuilder.hasErrors()) { fail(kbuilder.getErrors().toString()); } final InternalKnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addPackages(kbuilder.getKnowledgePackages()); final KieSession ksession = createKnowledgeSession(kbase); final List list = new ArrayList(); ksession.setGlobal("list", list); final Person p = new Person("yoda"); p.addAddress(new Address("s1")); ksession.insert(p); ksession.fireAllRules(); assertEquals("r1", list.get(0)); // Check it was built with MVELReturnValueExpression constraint final List<ObjectTypeNode> nodes = ((InternalKnowledgeBase) kbase).getRete().getObjectTypeNodes(); ObjectTypeNode node = null; for (final ObjectTypeNode n : nodes) { if (((ClassObjectType) n.getObjectType()).getClassType() == Person.class) { node = n; break; } } AlphaNode alphanode = (AlphaNode) node.getObjectSinkPropagator().getSinks()[0]; AlphaNodeFieldConstraint constraint = alphanode.getConstraint(); if (constraint instanceof MvelConstraint) { assertTrue(((MvelConstraint) alphanode.getConstraint()).getFieldExtractor() instanceof MVELObjectClassFieldReader); assertEquals(new Address("s1"), ((MvelConstraint) alphanode.getConstraint()).getField().getValue()); } alphanode = (AlphaNode) alphanode.getObjectSinkPropagator().getSinks()[0]; constraint = alphanode.getConstraint(); if (constraint instanceof MvelConstraint) { assertTrue(((MvelConstraint) alphanode.getConstraint()).getFieldExtractor() instanceof MVELObjectClassFieldReader); assertEquals(new Address("s1").getStreet(), ((MvelConstraint) alphanode.getConstraint()).getField().getValue()); } } @Test public void testMapAccessorWithStaticFieldAccess() { final String str = ""+ "package org.drools.compiler.test \n" + "import " + Person.class.getCanonicalName() + "\n" + "import " + Address.class.getCanonicalName() + "\n" + "import " + TestEnum.class.getCanonicalName() + "\n" + "global java.util.List list \n" + "rule \"show\" \n" + "when \n" + " $m : Person( namedAddresses[TestEnum.ONE] == new Address('s1'), namedAddresses[TestEnum.ONE].street == new Address('s1').street ) \n" + "then \n" + " list.add('r1'); \n" + "end \n"; final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add(ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL); if (kbuilder.hasErrors()) { fail(kbuilder.getErrors().toString()); } final InternalKnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addPackages(kbuilder.getKnowledgePackages()); final KieSession ksession = createKnowledgeSession(kbase); final List list = new ArrayList(); ksession.setGlobal("list", list); final Person p = new Person("yoda"); p.getNamedAddresses().put(TestEnum.ONE, new Address("s1")); ksession.insert(p); ksession.fireAllRules(); assertEquals("r1", list.get(0)); // Check it was built with MVELReturnValueExpression constraint final List<ObjectTypeNode> nodes = ((InternalKnowledgeBase) kbase).getRete().getObjectTypeNodes(); ObjectTypeNode node = null; for (final ObjectTypeNode n : nodes) { if (((ClassObjectType) n.getObjectType()).getClassType() == Person.class) { node = n; break; } } AlphaNode alphanode = (AlphaNode) node.getObjectSinkPropagator().getSinks()[0]; AlphaNodeFieldConstraint constraint = alphanode.getConstraint(); if (constraint instanceof MvelConstraint) { assertTrue(((MvelConstraint) alphanode.getConstraint()).getFieldExtractor() instanceof MVELObjectClassFieldReader); assertEquals(new Address("s1"), ((MvelConstraint) alphanode.getConstraint()).getField().getValue()); } alphanode = (AlphaNode) alphanode.getObjectSinkPropagator().getSinks()[0]; constraint = alphanode.getConstraint(); if (constraint instanceof MvelConstraint) { assertTrue(((MvelConstraint) alphanode.getConstraint()).getFieldExtractor() instanceof MVELObjectClassFieldReader); assertEquals(new Address("s1").getStreet(), ((MvelConstraint) alphanode.getConstraint()).getField().getValue()); } } @Test public void testArrayAccessorWithoutGenerics() { final String str = ""+ "package org.drools.compiler.test \n" + "import " + Person.class.getCanonicalName() + "\n" + "import " + Address.class.getCanonicalName() + "\n" + "global java.util.List list \n" + "rule \"show\" \n" + "when \n" + " $m : Person( addressesNoGenerics[0].street == new Address('s1').street) \n" + "then \n" + " list.add('r1'); \n" + "end \n"; final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add(ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL); // This should fail as there are no generics for the List assertTrue(kbuilder.hasErrors()); } public static class DMap extends HashMap { } public static class Triangle { public static final int ZERO = 0; private List<Map<String, Object>> deliveries; public static enum Type { ACUTE, OBTUSE; } private Type t; public Triangle(final Type t) { this.t = t; } public Type getT() { return t; } public void setT(final Type t) { this.t = t; } public List<Map<String, Object>> getDeliveries() { return deliveries; } public void setDeliveries(final List<Map<String, Object>> deliveries) { this.deliveries = deliveries; } } public Object compiledExecute(final String ex) { final Serializable compiled = MVEL.compileExpression( ex ); return MVEL.executeExpression( compiled, new Object(), new HashMap() ); } @Test public void test1() { final ParserContext pc = new ParserContext(); pc.addInput("x", String.class); pc.setStrongTyping(true); final Object o = MVEL.compileExpression("x.startsWith('d')", pc); final Map vars = new HashMap(); vars.put("x", "d"); MVEL.executeExpression(o, vars); System.out.println(o); } @Test public void testTokensInString(){ //should query antldr DFA63 class but don't know how final String [] operators = {"," ,"=" , "|=", "*"}; //test various in consequence final String strBegin = "" + "package org.kie \n" + "import org.drools.compiler.Cheese \n" + "dialect \"mvel\"\n" + "rule rule1 \n" + "when \n" + "$c:Cheese(type==\"swiss\") \n" + "then \n"+ "modify($c){ type = \"swiss"; final String strEnd = "good\"};\n" + "end\n"; final StringBuilder failures = new StringBuilder(); for (final String oper : operators) { final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); final String rule = strBegin + oper + strEnd; System.out.print(rule); kbuilder.add(ResourceFactory.newByteArrayResource(rule.getBytes()), ResourceType.DRL); if (kbuilder.hasErrors()) { failures.append(kbuilder.getErrors().toString()); } } final String failStr = failures.toString(); if (failStr.length() > 0) { fail(failStr); } } @Test public void testGeneratedBeansMVEL() throws IllegalAccessException, InstantiationException { final KieBase kbase = loadKnowledgeBase("test_GeneratedBeansMVEL.drl"); // Retrieve the generated fact type final FactType pf = kbase.getFactType("mortgages", "Applicant"); final FactType af = kbase.getFactType("mortgages", "LoanApplication"); final Object person = pf.newInstance(); pf.set(person, "creditRating", "OK"); final Object application = af.newInstance(); final KieSession ksession = createKnowledgeSession(kbase); ksession.insert(person); ksession.insert(application); ksession.fireAllRules(); } @Test public void testMVELClassReferences() throws InstantiationException, IllegalAccessException { final String str = "package org.drools.compiler\n" + "declare Assignment\n" + " source : Class\n" + " target : Class\n" + "end\n" + "rule ObjectIsAssignable1\n" + "when\n" + " Assignment( $t: target == java.lang.Object.class || target == source )\n" + "then\n" + "end\n" + "rule ObjectIsAssignable2\n" + "when\n" + " Assignment( $t: target == source || target == java.lang.Object.class )\n" + "then\n" + "end"; final KieBase kbase = loadKnowledgeBaseFromString(str); final KieSession ksession = createKnowledgeSession(kbase); final FactType asgType = kbase.getFactType("org.drools.compiler", "Assignment"); final Object asg = asgType.newInstance(); asgType.set(asg, "source", Object.class); asgType.set(asg, "target", Object.class); ksession.insert(asg); final int rules = ksession.fireAllRules(); ksession.dispose(); assertEquals(2, rules); } @Test public void testMVELConstraintsWithFloatingPointNumbersInScientificNotation() { final String rule = "package test; \n" + "dialect \"mvel\"\n" + "global java.util.List list;" + "\n" + "declare Bean \n" + " field : double \n" + "end \n" + "\n" + "rule \"Init\" \n" + "when \n" + "then \n" + "\t insert( new Bean( 1.0E-2 ) ); \n" + "end \n" + "\n" + "rule \"Check\" \n" + "when \n" + "\t Bean( field < 1.0E-1 ) \n" + "then \n" + "\t list.add( \"OK\" ); \n" + "end"; final KieBase kbase = loadKnowledgeBaseFromString(rule); final KieSession kSession = kbase.newKieSession(); final List<String> list = new ArrayList<String>(); kSession.setGlobal("list", list); kSession.fireAllRules(); assertEquals(1, list.size()); } @Test public void testMvelDoubleInvocation() { final String rule = "package org.drools.compiler\n" + "import " + TestUtility.class.getCanonicalName() + ";\n" + "import " + TestFact.class.getCanonicalName() + ";\n" + "rule \"First Rule\"\n" + " when\n" + " $tf : TestFact(TestUtility.utilMethod(s, \"Value1\") == true\n" + " && i > 0\n" + " )\n" + " then\n" + " System.out.println(\"First Rule Fires\");\n" + "end\n" + "\n" + "rule \"Second Rule\"\n" + " when\n" + " $tf : TestFact(TestUtility.utilMethod(s, \"Value2\") == true\n" + " && i > 0\n" + " )\n" + " then\n" + " System.out.println(\"Second Rule Fires\");\n" + "end\n" + "\n" + "rule \"Third Rule\"\n" + " when\n" + " $tf : TestFact(TestUtility.utilMethod(s, \"Value3\") == true\n" + " && i > 0\n" + " )\n" + " then\n" + " System.out.println(\"Third Rule Fires\");\n" + "end "; final KieBase kbase = loadKnowledgeBaseFromString(rule); final KieSession ksession = createKnowledgeSession(kbase); final TestFact fact = new TestFact(); fact.setS("asdf"); fact.setI(10); ksession.insert(fact); ksession.fireAllRules(); ksession.dispose(); } public static class TestUtility { public static Boolean utilMethod(final String s1, final String s2) { Boolean result = null; if (s1 != null) { result = s1.equals(s2); } return result; } } public static class TestFact { private int i; private String s; public int getI() { return i; } public void setI(final int i) { this.i = i; } public String getS() { return s; } public void setS(final String s) { this.s = s; } } @Test public void testMVELSoundex() throws Exception { // read in the source final KieBase kbase = SerializationHelper.serializeObject(loadKnowledgeBase("MVEL_soundex.drl")); KieSession ksession = createKnowledgeSession(kbase); ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true); final Cheese c = new Cheese("fubar", 2); ksession.insert(c); ksession.fireAllRules(); assertEquals(42, c.getPrice()); } @Test public void testMVELSoundexNoCharParam() throws Exception { // read in the source final KieBase kbase = SerializationHelper.serializeObject(loadKnowledgeBase("MVEL_soundexNPE2500.drl")); KieSession ksession = createKnowledgeSession(kbase); ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true); final Cheese foobarCheese = new Cheese("foobar", 2); final Cheese nullCheese = new Cheese(null, 2); final Cheese starCheese = new Cheese("*", 2); ksession.insert(foobarCheese); ksession.insert(nullCheese); ksession.insert(starCheese); ksession.fireAllRules(); assertEquals(42, foobarCheese.getPrice()); assertEquals(2, nullCheese.getPrice()); assertEquals(2, starCheese.getPrice()); } @Test public void testMVELRewrite() throws Exception { // read in the source final KieBase kbase = SerializationHelper.serializeObject(loadKnowledgeBase("test_MVELrewrite.drl")); KieSession ksession = createKnowledgeSession(kbase); ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true); final List results = new ArrayList(); ksession.setGlobal("results", results); final Cheese brie = new Cheese("brie", 2); final Cheese stilton = new Cheese("stilton", 2); final Cheesery cheesery = new Cheesery(); cheesery.addCheese(brie); cheesery.addCheese(stilton); ksession.insert(cheesery); ksession.fireAllRules(); assertEquals(1, results.size()); assertEquals(cheesery, results.get(0)); } @Test public void testMVELTypeCoercion() { final String str = "package org.drools.compiler.test; \n" + "\n" + "global java.util.List list;" + "\n" + "declare Bean\n" + // NOTICE: THIS WORKS WHEN THE FIELD IS "LIST", BUT USED TO WORK WITH ARRAYLIST TOO " field : java.util.ArrayList\n" + "end\n" + "\n" + "\n" + "rule \"Init\"\n" + "when \n" + "then\n" + " insert( new Bean( new java.util.ArrayList( java.util.Arrays.asList( \"x\" ) ) ) );\n" + "end\n" + "\n" + "rule \"Check\"\n" + "when\n" + " $b : Bean( $fld : field == [\"x\"] )\n" + "then\n" + " System.out.println( $fld );\n" + " list.add( \"OK\" ); \n" + "end"; final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add(ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL); if (kbuilder.hasErrors()) { fail(kbuilder.getErrors().toString()); } final KieBaseConfiguration kbConf = KnowledgeBaseFactory.newKnowledgeBaseConfiguration(); kbConf.setOption(EqualityBehaviorOption.EQUALITY); final InternalKnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(kbConf); kbase.addPackages(kbuilder.getKnowledgePackages()); final KieSession ksession = kbase.newKieSession(); final List list = new ArrayList(); ksession.setGlobal("list", list); ksession.fireAllRules(); assertTrue(list.contains("OK")); ksession.dispose(); } @Test public void testNoMvelSyntaxInFunctions() { // JBRULES-3433 final String str = "import java.util.*;\n" + "dialect \"mvel\"\n" + "function Integer englishToInt(String englishNumber) { \n" + " Map m = [\"one\":1, \"two\":2, \"three\":3, \"four\":4, \"five\":5]; \n" + " Object obj = m.get(englishNumber.toLowerCase()); \n" + " return Integer.parseInt(obj.toString()); \n" + "}\n"; final KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add(ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL); assertTrue(kbuilder.hasErrors()); } }
/* * JBoss, Home of Professional Open Source * Copyright 2009, Red Hat, Inc. and/or its affiliates, and individual contributors * by the @authors tag. See the copyright.txt in the distribution for a * full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hibernate.validator.internal.engine; import java.lang.annotation.ElementType; import java.lang.reflect.Constructor; import java.lang.reflect.Method; import java.lang.reflect.Type; import java.util.HashSet; import java.util.IdentityHashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import javax.validation.ConstraintValidatorFactory; import javax.validation.ConstraintViolation; import javax.validation.MessageInterpolator; import javax.validation.ParameterNameProvider; import javax.validation.Path; import javax.validation.TraversableResolver; import javax.validation.ValidationException; import javax.validation.metadata.ConstraintDescriptor; import com.fasterxml.classmate.ResolvedType; import com.fasterxml.classmate.TypeResolver; import org.hibernate.validator.internal.engine.constraintvalidation.ConstraintValidatorContextImpl; import org.hibernate.validator.internal.engine.constraintvalidation.ConstraintValidatorManager; import org.hibernate.validator.internal.engine.constraintvalidation.ConstraintViolationCreationContext; import org.hibernate.validator.internal.engine.path.PathImpl; import org.hibernate.validator.internal.metadata.core.MetaConstraint; import org.hibernate.validator.internal.metadata.raw.ExecutableElement; import org.hibernate.validator.internal.util.IdentitySet; import org.hibernate.validator.internal.util.TypeHelper; import org.hibernate.validator.internal.util.TypeResolutionHelper; import org.hibernate.validator.internal.util.logging.Log; import org.hibernate.validator.internal.util.logging.LoggerFactory; import org.hibernate.validator.spi.valuehandling.ValidatedValueUnwrapper; import static org.hibernate.validator.internal.util.CollectionHelper.newHashMap; import static org.hibernate.validator.internal.util.CollectionHelper.newHashSet; /** * Context object keeping track of all required data for a validation call. * * We use this object to collect all failing constraints, but also to have access to resources like * constraint validator factory, message interpolator, traversable resolver, etc. * * @author Hardy Ferentschik * @author Emmanuel Bernard * @author Gunnar Morling */ public class ValidationContext<T> { private static final Log log = LoggerFactory.make(); /** * Caches and manages life cycle of constraint validator instances. */ private final ConstraintValidatorManager constraintValidatorManager; /** * The root bean of the validation. */ private final T rootBean; /** * The root bean class of the validation. */ private final Class<T> rootBeanClass; /** * The method of the current validation call in case of executable validation. */ private final ExecutableElement executable; /** * The validated parameters in case of executable parameter validation. */ private final Object[] executableParameters; /** * The validated return value in case of executable return value validation. */ private final Object executableReturnValue; /** * Maps a group to an identity set to keep track of already validated objects. We have to make sure * that each object gets only validated once per group and property path. */ private final Map<Class<?>, IdentitySet> processedBeansPerGroup; /** * Maps an object to a list of paths in which it has been validated. The objects are the bean instances. */ private final Map<Object, Set<PathImpl>> processedPathsPerBean; /** * Maps processed constraints to the bean and path for which they have been processed. */ private final Map<BeanAndPath, IdentitySet> processedMetaConstraints; /** * Contains all failing constraints so far. */ private final Set<ConstraintViolation<T>> failingConstraintViolations; /** * The message resolver which should be used in this context. */ private final MessageInterpolator messageInterpolator; /** * The constraint factory which should be used in this context. */ private final ConstraintValidatorFactory constraintValidatorFactory; /** * Allows a JPA provider to decide whether a property should be validated. */ private final TraversableResolver traversableResolver; /** * Parameter name provider which should be used in this context. */ private final ParameterNameProvider parameterNameProvider; /** * List of value un-wrappers. */ private final List<ValidatedValueUnwrapper<?>> validatedValueUnwrappers; /** * Used for resolving generic type information. */ private final TypeResolutionHelper typeResolutionHelper; /** * Whether or not validation should fail on the first constraint violation. */ private final boolean failFast; private ValidationContext(ConstraintValidatorManager constraintValidatorManager, MessageInterpolator messageInterpolator, ConstraintValidatorFactory constraintValidatorFactory, TraversableResolver traversableResolver, ParameterNameProvider parameterNameProvider, List<ValidatedValueUnwrapper<?>> validatedValueUnwrappers, TypeResolutionHelper typeResolutionHelper, boolean failFast, T rootBean, Class<T> rootBeanClass, ExecutableElement executable, Object[] executableParameters, Object executableReturnValue) { this.constraintValidatorManager = constraintValidatorManager; this.messageInterpolator = messageInterpolator; this.constraintValidatorFactory = constraintValidatorFactory; this.traversableResolver = traversableResolver; this.parameterNameProvider = parameterNameProvider; this.validatedValueUnwrappers = validatedValueUnwrappers; this.typeResolutionHelper = typeResolutionHelper; this.failFast = failFast; this.rootBean = rootBean; this.rootBeanClass = rootBeanClass; this.executable = executable; this.executableParameters = executableParameters; this.executableReturnValue = executableReturnValue; this.processedBeansPerGroup = newHashMap(); this.processedPathsPerBean = new IdentityHashMap<Object, Set<PathImpl>>(); this.processedMetaConstraints = newHashMap(); this.failingConstraintViolations = newHashSet(); } public static ValidationContextBuilder getValidationContext( ConstraintValidatorManager constraintValidatorManager, MessageInterpolator messageInterpolator, ConstraintValidatorFactory constraintValidatorFactory, TraversableResolver traversableResolver, List<ValidatedValueUnwrapper<?>> validatedValueUnwrappers, TypeResolutionHelper typeResolutionHelper, boolean failFast) { return new ValidationContextBuilder( constraintValidatorManager, messageInterpolator, constraintValidatorFactory, traversableResolver, validatedValueUnwrappers, typeResolutionHelper, failFast ); } public T getRootBean() { return rootBean; } public Class<T> getRootBeanClass() { return rootBeanClass; } public ExecutableElement getExecutable() { return executable; } public TraversableResolver getTraversableResolver() { return traversableResolver; } public boolean isFailFastModeEnabled() { return failFast; } public ConstraintValidatorManager getConstraintValidatorManager() { return constraintValidatorManager; } /** * Returns a list with the current executable's parameter names as retrieved * from the current {@link ParameterNameProvider}. * * @return The current executable's parameter names,if this context was * created for parameter validation, {@code null} otherwise. */ public List<String> getParameterNames() { if ( parameterNameProvider == null ) { return null; } else if ( executable.getElementType() == ElementType.METHOD ) { return parameterNameProvider.getParameterNames( (Method) executable.getMember() ); } else { return parameterNameProvider.getParameterNames( (Constructor<?>) executable.getMember() ); } } public Set<ConstraintViolation<T>> createConstraintViolations(ValueContext<?, ?> localContext, ConstraintValidatorContextImpl constraintValidatorContext) { Set<ConstraintViolation<T>> constraintViolations = newHashSet(); for ( ConstraintViolationCreationContext constraintViolationCreationContext : constraintValidatorContext.getConstraintViolationCreationContexts() ) { ConstraintViolation<T> violation = createConstraintViolation( localContext, constraintViolationCreationContext, constraintValidatorContext.getConstraintDescriptor() ); constraintViolations.add( violation ); } return constraintViolations; } public ConstraintValidatorFactory getConstraintValidatorFactory() { return constraintValidatorFactory; } public boolean isBeanAlreadyValidated(Object value, Class<?> group, PathImpl path) { boolean alreadyValidated; alreadyValidated = isAlreadyValidatedForCurrentGroup( value, group ); if ( alreadyValidated ) { alreadyValidated = isAlreadyValidatedForPath( value, path ); } return alreadyValidated; } public void markCurrentBeanAsProcessed(ValueContext<?, ?> valueContext) { markCurrentBeanAsProcessedForCurrentGroup( valueContext.getCurrentBean(), valueContext.getCurrentGroup() ); markCurrentBeanAsProcessedForCurrentPath( valueContext.getCurrentBean(), valueContext.getPropertyPath() ); } public void addConstraintFailures(Set<ConstraintViolation<T>> failingConstraintViolations) { this.failingConstraintViolations.addAll( failingConstraintViolations ); } public Set<ConstraintViolation<T>> getFailingConstraints() { return failingConstraintViolations; } public ConstraintViolation<T> createConstraintViolation(ValueContext<?, ?> localContext, ConstraintViolationCreationContext constraintViolationCreationContext, ConstraintDescriptor<?> descriptor) { String messageTemplate = constraintViolationCreationContext.getMessage(); String interpolatedMessage = interpolate( messageTemplate, localContext.getCurrentValidatedValue(), descriptor, constraintViolationCreationContext.getExpressionVariables() ); // at this point we make a copy of the path to avoid side effects Path path = PathImpl.createCopy( constraintViolationCreationContext.getPath() ); if ( executableParameters != null ) { return ConstraintViolationImpl.forParameterValidation( messageTemplate, interpolatedMessage, getRootBeanClass(), getRootBean(), localContext.getCurrentBean(), localContext.getCurrentValidatedValue(), path, descriptor, localContext.getElementType(), executableParameters ); } else if ( executableReturnValue != null ) { return ConstraintViolationImpl.forReturnValueValidation( messageTemplate, interpolatedMessage, getRootBeanClass(), getRootBean(), localContext.getCurrentBean(), localContext.getCurrentValidatedValue(), path, descriptor, localContext.getElementType(), executableReturnValue ); } else { return ConstraintViolationImpl.forBeanValidation( messageTemplate, interpolatedMessage, getRootBeanClass(), getRootBean(), localContext.getCurrentBean(), localContext.getCurrentValidatedValue(), path, descriptor, localContext.getElementType() ); } } public boolean hasMetaConstraintBeenProcessed(Object bean, Path path, MetaConstraint<?> metaConstraint) { // TODO switch to proper multi key map (HF) IdentitySet processedConstraints = processedMetaConstraints.get( new BeanAndPath( bean, path ) ); return processedConstraints != null && processedConstraints.contains( metaConstraint ); } public void markConstraintProcessed(Object bean, Path path, MetaConstraint<?> metaConstraint) { // TODO switch to proper multi key map (HF) BeanAndPath beanAndPath = new BeanAndPath( bean, path ); if ( processedMetaConstraints.containsKey( beanAndPath ) ) { processedMetaConstraints.get( beanAndPath ).add( metaConstraint ); } else { IdentitySet set = new IdentitySet(); set.add( metaConstraint ); processedMetaConstraints.put( beanAndPath, set ); } } /** * Returns the first validated value handler found which supports the given type. * <p> * If required this could be enhanced to search for the most-specific handler and raise an exception in case more * than one matching handler is found (or a scheme of prioritizing handlers to process several handlers in order. * * @param type the type to be handled * * @return the handler for the given type or {@code null} if no matching handler was found */ public ValidatedValueUnwrapper<?> getValidatedValueUnwrapper(Type type) { TypeResolver typeResolver = typeResolutionHelper.getTypeResolver(); for ( ValidatedValueUnwrapper<?> handler : validatedValueUnwrappers ) { ResolvedType handlerType = typeResolver.resolve( handler.getClass() ); List<ResolvedType> typeParameters = handlerType.typeParametersFor( ValidatedValueUnwrapper.class ); if ( TypeHelper.isAssignable( typeParameters.get( 0 ).getErasedType(), type ) ) { return handler; } } return null; } @Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append( "ValidationContext" ); sb.append( "{rootBean=" ).append( rootBean ); sb.append( '}' ); return sb.toString(); } private String interpolate(String messageTemplate, Object validatedValue, ConstraintDescriptor<?> descriptor, Map<String, Object> messageParameters) { MessageInterpolatorContext context = new MessageInterpolatorContext( descriptor, validatedValue, getRootBeanClass(), messageParameters ); try { return messageInterpolator.interpolate( messageTemplate, context ); } catch ( ValidationException ve ) { throw ve; } catch ( Exception e ) { throw log.getExceptionOccurredDuringMessageInterpolationException( e ); } } private boolean isAlreadyValidatedForPath(Object value, PathImpl path) { Set<PathImpl> pathSet = processedPathsPerBean.get( value ); if ( pathSet == null ) { return false; } for ( PathImpl p : pathSet ) { if ( path.isRootPath() || p.isRootPath() || isSubPathOf( path, p ) || isSubPathOf( p, path ) ) { return true; } } return false; } private boolean isSubPathOf(Path p1, Path p2) { Iterator<Path.Node> p1Iter = p1.iterator(); Iterator<Path.Node> p2Iter = p2.iterator(); while ( p1Iter.hasNext() ) { Path.Node p1Node = p1Iter.next(); if ( !p2Iter.hasNext() ) { return false; } Path.Node p2Node = p2Iter.next(); if ( !p1Node.equals( p2Node ) ) { return false; } } return true; } private boolean isAlreadyValidatedForCurrentGroup(Object value, Class<?> group) { IdentitySet objectsProcessedInCurrentGroups = processedBeansPerGroup.get( group ); return objectsProcessedInCurrentGroups != null && objectsProcessedInCurrentGroups.contains( value ); } private void markCurrentBeanAsProcessedForCurrentPath(Object value, PathImpl path) { if ( processedPathsPerBean.containsKey( value ) ) { processedPathsPerBean.get( value ).add( path ); } else { Set<PathImpl> set = new HashSet<PathImpl>(); set.add( path ); processedPathsPerBean.put( value, set ); } } private void markCurrentBeanAsProcessedForCurrentGroup(Object value, Class<?> group) { if ( processedBeansPerGroup.containsKey( group ) ) { processedBeansPerGroup.get( group ).add( value ); } else { IdentitySet set = new IdentitySet(); set.add( value ); processedBeansPerGroup.put( group, set ); } } /** * Builder for creating {@link ValidationContext}s suited for the different kinds of validation. * * Retrieve a builder with all common attributes via {@link ValidationContext#getValidationContext(ConstraintValidatorManager, * MessageInterpolator, ConstraintValidatorFactory, TraversableResolver, List, TypeResolutionHelper, boolean)} and then invoke one of * the dedicated methods such as {@link #forValidate(Object)}. * * @author Gunnar Morling */ public static class ValidationContextBuilder { private final ConstraintValidatorManager constraintValidatorManager; private final MessageInterpolator messageInterpolator; private final ConstraintValidatorFactory constraintValidatorFactory; private final TraversableResolver traversableResolver; private final List<ValidatedValueUnwrapper<?>> validatedValueUnwrappers; private final TypeResolutionHelper typeResolutionHelper; private final boolean failFast; private ValidationContextBuilder( ConstraintValidatorManager constraintValidatorManager, MessageInterpolator messageInterpolator, ConstraintValidatorFactory constraintValidatorFactory, TraversableResolver traversableResolver, List<ValidatedValueUnwrapper<?>> validatedValueUnwrappers, TypeResolutionHelper typeResolutionHelper, boolean failFast) { this.constraintValidatorManager = constraintValidatorManager; this.messageInterpolator = messageInterpolator; this.constraintValidatorFactory = constraintValidatorFactory; this.traversableResolver = traversableResolver; this.validatedValueUnwrappers = validatedValueUnwrappers; this.typeResolutionHelper = typeResolutionHelper; this.failFast = failFast; } public <T> ValidationContext<T> forValidate(T rootBean) { @SuppressWarnings("unchecked") Class<T> rootBeanClass = (Class<T>) rootBean.getClass(); return new ValidationContext<T>( constraintValidatorManager, messageInterpolator, constraintValidatorFactory, traversableResolver, null, //parameter name provider, validatedValueUnwrappers, typeResolutionHelper, failFast, rootBean, rootBeanClass, null, //executable null, //executable parameters null //executable return value ); } public <T> ValidationContext<T> forValidateProperty(T rootBean) { @SuppressWarnings("unchecked") Class<T> rootBeanClass = (Class<T>) rootBean.getClass(); return new ValidationContext<T>( constraintValidatorManager, messageInterpolator, constraintValidatorFactory, traversableResolver, null, //parameter name provider, validatedValueUnwrappers, typeResolutionHelper, failFast, rootBean, rootBeanClass, null, //executable null, //executable parameters null //executable return value ); } public <T> ValidationContext<T> forValidateValue(Class<T> rootBeanClass) { return new ValidationContext<T>( constraintValidatorManager, messageInterpolator, constraintValidatorFactory, traversableResolver, null, //parameter name provider validatedValueUnwrappers, typeResolutionHelper, failFast, null, //root bean rootBeanClass, null, //executable null, //executable parameters null //executable return value ); } public <T> ValidationContext<T> forValidateParameters( ParameterNameProvider parameterNameProvider, T rootBean, ExecutableElement executable, Object[] executableParameters) { @SuppressWarnings("unchecked") Class<T> rootBeanClass = rootBean != null ? (Class<T>) rootBean.getClass() : (Class<T>) executable.getMember() .getDeclaringClass(); return new ValidationContext<T>( constraintValidatorManager, messageInterpolator, constraintValidatorFactory, traversableResolver, parameterNameProvider, validatedValueUnwrappers, typeResolutionHelper, failFast, rootBean, rootBeanClass, executable, executableParameters, null //executable return value ); } public <T> ValidationContext<T> forValidateReturnValue( T rootBean, ExecutableElement executable, Object executableReturnValue) { @SuppressWarnings("unchecked") Class<T> rootBeanClass = rootBean != null ? (Class<T>) rootBean.getClass() : (Class<T>) executable.getMember() .getDeclaringClass(); return new ValidationContext<T>( constraintValidatorManager, messageInterpolator, constraintValidatorFactory, traversableResolver, null, //parameter name provider validatedValueUnwrappers, typeResolutionHelper, failFast, rootBean, rootBeanClass, executable, null, //executable parameters executableReturnValue ); } } private static final class BeanAndPath { private final Object bean; private final Path path; private final int hashCode; private BeanAndPath(Object bean, Path path) { this.bean = bean; this.path = path; // pre-calculate hash code, the class is immutable and hashCode is needed often this.hashCode = createHashCode(); } @Override public boolean equals(Object o) { if ( this == o ) { return true; } if ( o == null || getClass() != o.getClass() ) { return false; } BeanAndPath that = (BeanAndPath) o; if ( bean != that.bean ) { // instance equality return false; } if ( !path.equals( that.path ) ) { return false; } return true; } @Override public int hashCode() { return hashCode; } private int createHashCode() { int result = System.identityHashCode( bean ); result = 31 * result + path.hashCode(); return result; } } }
/* * Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.application.authenticator.samlsso; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.identity.application.authentication.framework.AbstractApplicationAuthenticator; import org.wso2.carbon.identity.application.authentication.framework.AuthenticatorStateInfo; import org.wso2.carbon.identity.application.authentication.framework.FederatedApplicationAuthenticator; import org.wso2.carbon.identity.application.authentication.framework.context.AuthenticationContext; import org.wso2.carbon.identity.application.authentication.framework.exception.AuthenticationFailedException; import org.wso2.carbon.identity.application.authentication.framework.exception.LogoutFailedException; import org.wso2.carbon.identity.application.authentication.framework.model.AuthenticatedUser; import org.wso2.carbon.identity.application.authentication.framework.util.FrameworkConstants; import org.wso2.carbon.identity.application.authentication.framework.util.FrameworkUtils; import org.wso2.carbon.identity.application.authenticator.samlsso.exception.SAMLSSOException; import org.wso2.carbon.identity.application.authenticator.samlsso.internal.SAMLSSOAuthenticatorServiceComponent; import org.wso2.carbon.identity.application.authenticator.samlsso.manager.DefaultSAML2SSOManager; import org.wso2.carbon.identity.application.authenticator.samlsso.manager.SAML2SSOManager; import org.wso2.carbon.identity.application.authenticator.samlsso.model.StateInfo; import org.wso2.carbon.identity.application.authenticator.samlsso.util.SSOConstants; import org.wso2.carbon.identity.application.authenticator.samlsso.util.SSOUtils; import org.wso2.carbon.identity.application.common.model.ClaimMapping; import org.wso2.carbon.identity.application.common.util.IdentityApplicationConstants; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.PrintWriter; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.util.HashMap; import java.util.Map; public class SAMLSSOAuthenticator extends AbstractApplicationAuthenticator implements FederatedApplicationAuthenticator { private static final long serialVersionUID = -8097512332218044859L; public static final String AS_REQUEST = "AS_REQUEST"; private static Log log = LogFactory.getLog(SAMLSSOAuthenticator.class); @Override public boolean canHandle(HttpServletRequest request) { if (log.isTraceEnabled()) { log.trace("Inside canHandle()"); } if (request.getParameter("SAMLResponse") != null) { return true; } return false; } @Override protected void initiateAuthenticationRequest(HttpServletRequest request, HttpServletResponse response, AuthenticationContext context) throws AuthenticationFailedException { Map<String, String> authenticatorProperties = context.getAuthenticatorProperties(); String idpURL = authenticatorProperties .get(IdentityApplicationConstants.Authenticator.SAML2SSO.SSO_URL); String ssoUrl = ""; boolean isPost = false; try { String requestMethod = authenticatorProperties .get(IdentityApplicationConstants.Authenticator.SAML2SSO.REQUEST_METHOD); if (requestMethod != null && requestMethod.trim().length() != 0) { if (SSOConstants.POST.equalsIgnoreCase(requestMethod)) { isPost = true; } else if (SSOConstants.REDIRECT.equalsIgnoreCase(requestMethod)) { isPost = false; } else if (AS_REQUEST.equalsIgnoreCase(requestMethod)) { isPost = context.getAuthenticationRequest().isPost(); } } else { isPost = false; } if (isPost) { sendPostRequest(request, response, false, false, idpURL, context); return; } else { SAML2SSOManager saml2SSOManager = getSAML2SSOManagerInstance(); saml2SSOManager.init(context.getTenantDomain(), context.getAuthenticatorProperties(), context.getExternalIdP().getIdentityProvider()); ssoUrl = saml2SSOManager.buildRequest(request, false, false, idpURL, context); generateAuthenticationRequest(request, response, ssoUrl, authenticatorProperties); } } catch (SAMLSSOException e) { throw new AuthenticationFailedException(e.getMessage(), e); } return; } private void generateAuthenticationRequest(HttpServletRequest request, HttpServletResponse response, String ssoUrl, Map<String, String> authenticatorProperties) throws AuthenticationFailedException { try { String domain = request.getParameter("domain"); if (domain != null) { ssoUrl = ssoUrl + "&fidp=" + domain; } if (authenticatorProperties != null) { String queryString = authenticatorProperties .get(FrameworkConstants.QUERY_PARAMS); if (queryString != null) { if (!queryString.startsWith("&")) { ssoUrl = ssoUrl + "&" + queryString; } else { ssoUrl = ssoUrl + queryString; } } } response.sendRedirect(ssoUrl); } catch (IOException e) { throw new AuthenticationFailedException( "Error while sending the redirect to federated SAML IdP", e); } } @Override protected void processAuthenticationResponse(HttpServletRequest request, HttpServletResponse response, AuthenticationContext context) throws AuthenticationFailedException { try { SAML2SSOManager saml2SSOManager = getSAML2SSOManagerInstance(); saml2SSOManager.init(context.getTenantDomain(), context.getAuthenticatorProperties(), context.getExternalIdP().getIdentityProvider()); saml2SSOManager.processResponse(request); Map<ClaimMapping, String> receivedClaims = (Map<ClaimMapping, String>) request .getSession(false).getAttribute("samlssoAttributes"); String subject = null; String isSubjectInClaimsProp = context.getAuthenticatorProperties().get( IdentityApplicationConstants.Authenticator.SAML2SSO.IS_USER_ID_IN_CLAIMS); if ("true".equalsIgnoreCase(isSubjectInClaimsProp)) { subject = FrameworkUtils.getFederatedSubjectFromClaims( context.getExternalIdP().getIdentityProvider(), receivedClaims); if (subject == null) { log.warn("Subject claim could not be found amongst attribute statements. " + "Defaulting to Name Identifier."); } } if (subject == null) { subject = (String) request.getSession().getAttribute("username"); } if (subject == null) { throw new SAMLSSOException("Cannot find federated User Identifier"); } Object sessionIndexObj = request.getSession(false).getAttribute(SSOConstants.IDP_SESSION); String sessionIndex = null; if (sessionIndexObj != null) { sessionIndex = (String) sessionIndexObj; } StateInfo stateInfoDO = new StateInfo(); stateInfoDO.setSessionIndex(sessionIndex); stateInfoDO.setSubject(subject); context.setStateInfo(stateInfoDO); AuthenticatedUser authenticatedUser = AuthenticatedUser.createFederateAuthenticatedUserFromSubjectIdentifier(subject); authenticatedUser.setUserAttributes(receivedClaims); context.setSubject(authenticatedUser); } catch (SAMLSSOException e) { throw new AuthenticationFailedException(e.getMessage(), e); } } @Override public String getContextIdentifier(HttpServletRequest request) { if (log.isTraceEnabled()) { log.trace("Inside getContextIdentifier()"); } String identifier = request.getParameter("sessionDataKey"); if (identifier == null) { identifier = request.getParameter("RelayState"); if (identifier != null) { // TODO: SHOULD ensure that the value has not been tampered with by using a checksum, a pseudo-random value, or similar means. try { return URLDecoder.decode(identifier, "UTF-8"); } catch (UnsupportedEncodingException e) { log.error("Exception while URL decoding the Relay State", e); } } } return identifier; } @Override public String getFriendlyName() { return SSOConstants.AUTHENTICATOR_FRIENDLY_NAME; } @Override public String getName() { return SSOConstants.AUTHENTICATOR_NAME; } @Override protected void initiateLogoutRequest(HttpServletRequest request, HttpServletResponse response, AuthenticationContext context) throws LogoutFailedException { boolean logoutEnabled = false; String logoutEnabledProp = context.getAuthenticatorProperties().get( IdentityApplicationConstants.Authenticator.SAML2SSO.IS_LOGOUT_ENABLED); if (logoutEnabledProp != null && "true".equalsIgnoreCase(logoutEnabledProp)) { logoutEnabled = true; } if (logoutEnabled) { //send logout request to external idp String idpLogoutURL = context.getAuthenticatorProperties().get( IdentityApplicationConstants.Authenticator.SAML2SSO.LOGOUT_REQ_URL); if (idpLogoutURL == null || idpLogoutURL.trim().length() == 0) { idpLogoutURL = context.getAuthenticatorProperties().get( IdentityApplicationConstants.Authenticator.SAML2SSO.SSO_URL); } if (idpLogoutURL == null || idpLogoutURL.trim().length() == 0) { throw new LogoutFailedException( "Logout is enabled for the IdP but Logout URL is not configured"); } AuthenticatorStateInfo stateInfo = context.getStateInfo(); if (stateInfo instanceof StateInfo) { request.getSession().setAttribute("logoutSessionIndex", ((StateInfo) stateInfo).getSessionIndex()); request.getSession().setAttribute("logoutUsername", ((StateInfo) stateInfo).getSubject()); } try { SAML2SSOManager saml2SSOManager = getSAML2SSOManagerInstance(); saml2SSOManager.init(context.getTenantDomain(), context .getAuthenticatorProperties(), context.getExternalIdP() .getIdentityProvider()); boolean isPost = false; Map<String, String> authenticatorProperties = context.getAuthenticatorProperties(); String requestMethod = authenticatorProperties .get(IdentityApplicationConstants.Authenticator.SAML2SSO.REQUEST_METHOD); if (requestMethod != null && requestMethod.trim().length() != 0) { if ("POST".equalsIgnoreCase(requestMethod)) { isPost = true; } else if ("REDIRECT".equalsIgnoreCase(requestMethod)) { isPost = false; } else if ("AS_REQUEST".equalsIgnoreCase(requestMethod)) { isPost = context.getAuthenticationRequest().isPost(); } } else { isPost = false; } if (isPost) { sendPostRequest(request, response, true, false, idpLogoutURL, context); } else { String logoutURL = saml2SSOManager.buildRequest(request, true, false, idpLogoutURL, context); response.sendRedirect(logoutURL); } } catch (IOException e) { throw new LogoutFailedException(e.getMessage(), e); } catch (SAMLSSOException e) { throw new LogoutFailedException(e.getMessage(), e); } } else { throw new UnsupportedOperationException(); } } @Override protected void processLogoutResponse(HttpServletRequest request, HttpServletResponse response, AuthenticationContext context) throws LogoutFailedException { throw new UnsupportedOperationException(); } private void sendPostRequest(HttpServletRequest request, HttpServletResponse response, boolean isLogout, boolean isPassive, String loginPage, AuthenticationContext context) throws SAMLSSOException { SAML2SSOManager saml2SSOManager = getSAML2SSOManagerInstance(); saml2SSOManager.init(context.getTenantDomain(), context.getAuthenticatorProperties(), context.getExternalIdP().getIdentityProvider()); if (!(saml2SSOManager instanceof DefaultSAML2SSOManager)) { throw new SAMLSSOException("HTTP-POST is not supported"); } String encodedRequest = ((DefaultSAML2SSOManager) saml2SSOManager).buildPostRequest( request, isLogout, isPassive, loginPage); String relayState = context.getContextIdentifier(); Map<String, String> reqParamMap = getAdditionalRequestParams(request, context); String postPageInputs = buildPostPageInputs(encodedRequest, relayState, reqParamMap); printPostPage(response, loginPage, postPageInputs); } private SAML2SSOManager getSAML2SSOManagerInstance() throws SAMLSSOException { String managerClassName = getAuthenticatorConfig().getParameterMap() .get(SSOConstants.ServerConfig.SAML2_SSO_MANAGER); if (managerClassName != null) { try { Class clazz = Class.forName(managerClassName); return (SAML2SSOManager) clazz.newInstance(); } catch (ClassNotFoundException e) { throw new SAMLSSOException(e.getMessage(), e); } catch (InstantiationException e) { throw new SAMLSSOException(e.getMessage(), e); } catch (IllegalAccessException e) { throw new SAMLSSOException(e.getMessage(), e); } } else { return new DefaultSAML2SSOManager(); } } private String buildPostPageInputs(String encodedRequest, String relayState, Map<String, String> reqParamMap) { StringBuilder hiddenInputBuilder = new StringBuilder(""); hiddenInputBuilder.append("<input type='hidden' name='SAMLRequest' value='") .append(encodedRequest).append("'>"); if (relayState != null) { hiddenInputBuilder.append("<input type='hidden' name='RelayState' value='") .append(relayState).append("'>"); } for (Map.Entry<String, String> reqParam : reqParamMap.entrySet()) { String paramName = reqParam.getKey(); String paramValue = reqParam.getValue(); hiddenInputBuilder.append("<input type='hidden' name='").append(paramName) .append("' value='").append(paramValue).append("'>"); } return hiddenInputBuilder.toString(); } private Map<String, String> getAdditionalRequestParams(HttpServletRequest request, AuthenticationContext context) { Map<String, String> reqParamMap = new HashMap<String, String>(); Map<String, String> authenticatorProperties = context.getAuthenticatorProperties(); if (authenticatorProperties != null) { String queryString = authenticatorProperties.get(FrameworkConstants.QUERY_PARAMS); if (queryString != null) { reqParamMap = SSOUtils.getQueryMap(queryString); } } String fidp = request.getParameter("domain"); if (fidp != null) { reqParamMap.put("fidp", fidp); } return reqParamMap; } private void printPostPage(HttpServletResponse response, String url, String postPageInputs) throws SAMLSSOException { try { String postPage = SAMLSSOAuthenticatorServiceComponent.getPostPage(); if (postPage != null) { String pageWithURL = postPage.replace("$url", url); String finalPage = pageWithURL.replace("<!--$params-->", postPageInputs); PrintWriter out = response.getWriter(); out.print(finalPage); if (log.isDebugEnabled()) { log.debug("HTTP-POST page: " + finalPage); } } else { PrintWriter out = response.getWriter(); out.println("<html>"); out.println("<body>"); out.println("<p>You are now redirected to " + url); out.println(" If the redirection fails, please click the post button.</p>"); out.println("<form method='post' action='" + url + "'>"); out.println("<p>"); out.println(postPageInputs); out.println("<button type='submit'>POST</button>"); out.println("</p>"); out.println("</form>"); out.println("<script type='text/javascript'>"); out.println("document.forms[0].submit();"); out.println("</script>"); out.println("</body>"); out.println("</html>"); } } catch (Exception e) { throw new SAMLSSOException("Error while sending POST request", e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.flume.source; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; import javax.net.ssl.SSLEngine; import com.google.common.annotations.VisibleForTesting; import org.apache.flume.ChannelException; import org.apache.flume.Context; import org.apache.flume.Event; import org.apache.flume.EventDrivenSource; import org.apache.flume.conf.Configurable; import org.apache.flume.conf.Configurables; import org.apache.flume.instrumentation.SourceCounter; import org.jboss.netty.bootstrap.ServerBootstrap; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFactory; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.ChannelPipeline; import org.jboss.netty.channel.ChannelPipelineFactory; import org.jboss.netty.channel.Channels; import org.jboss.netty.channel.MessageEvent; import org.jboss.netty.channel.SimpleChannelHandler; import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory; import org.jboss.netty.handler.ssl.SslHandler; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @deprecated use {@link MultiportSyslogTCPSource} instead. */ @Deprecated public class SyslogTcpSource extends SslContextAwareAbstractSource implements EventDrivenSource, Configurable { private static final Logger logger = LoggerFactory.getLogger(SyslogTcpSource.class); private int port; private String host = null; private Channel nettyChannel; private Integer eventSize; private Map<String, String> formaterProp; private SourceCounter sourceCounter; private Set<String> keepFields; private String clientIPHeader; private String clientHostnameHeader; public class syslogTcpHandler extends SimpleChannelHandler { private SyslogUtils syslogUtils = new SyslogUtils(); private String clientIPHeader; private String clientHostnameHeader; public void setEventSize(int eventSize) { syslogUtils.setEventSize(eventSize); } public void setKeepFields(Set<String> keepFields) { syslogUtils.setKeepFields(keepFields); } public void setFormater(Map<String, String> prop) { syslogUtils.addFormats(prop); } public void setClientIPHeader(String clientIPHeader) { this.clientIPHeader = clientIPHeader; } public void setClientHostnameHeader(String clientHostnameHeader) { this.clientHostnameHeader = clientHostnameHeader; } @Override public void messageReceived(ChannelHandlerContext ctx, MessageEvent mEvent) { ChannelBuffer buff = (ChannelBuffer) mEvent.getMessage(); while (buff.readable()) { Event e = syslogUtils.extractEvent(buff); if (e == null) { logger.debug("Parsed partial event, event will be generated when " + "rest of the event is received."); continue; } if (clientIPHeader != null) { e.getHeaders().put(clientIPHeader, SyslogUtils.getIP(ctx.getChannel().getRemoteAddress())); } if (clientHostnameHeader != null) { e.getHeaders().put(clientHostnameHeader, SyslogUtils.getHostname(ctx.getChannel().getRemoteAddress())); } sourceCounter.incrementEventReceivedCount(); try { getChannelProcessor().processEvent(e); sourceCounter.incrementEventAcceptedCount(); } catch (ChannelException ex) { logger.error("Error writting to channel, event dropped", ex); sourceCounter.incrementChannelWriteFail(); } catch (RuntimeException ex) { logger.error("Error parsing event from syslog stream, event dropped", ex); sourceCounter.incrementEventReadFail(); return; } } } } @Override public void start() { ChannelFactory factory = new NioServerSocketChannelFactory( Executors.newCachedThreadPool(), Executors.newCachedThreadPool()); ServerBootstrap serverBootstrap = new ServerBootstrap(factory); serverBootstrap.setPipelineFactory(new PipelineFactory( eventSize, formaterProp, keepFields, clientIPHeader, clientHostnameHeader, getSslEngineSupplier(false) )); logger.info("Syslog TCP Source starting..."); if (host == null) { nettyChannel = serverBootstrap.bind(new InetSocketAddress(port)); } else { nettyChannel = serverBootstrap.bind(new InetSocketAddress(host, port)); } sourceCounter.start(); super.start(); } @Override public void stop() { logger.info("Syslog TCP Source stopping..."); logger.info("Metrics: {}", sourceCounter); if (nettyChannel != null) { nettyChannel.close(); try { nettyChannel.getCloseFuture().await(60, TimeUnit.SECONDS); } catch (InterruptedException e) { logger.warn("netty server stop interrupted", e); } finally { nettyChannel = null; } } sourceCounter.stop(); super.stop(); } @Override public void configure(Context context) { configureSsl(context); Configurables.ensureRequiredNonNull(context, SyslogSourceConfigurationConstants.CONFIG_PORT); port = context.getInteger(SyslogSourceConfigurationConstants.CONFIG_PORT); host = context.getString(SyslogSourceConfigurationConstants.CONFIG_HOST); eventSize = context.getInteger("eventSize", SyslogUtils.DEFAULT_SIZE); formaterProp = context.getSubProperties( SyslogSourceConfigurationConstants.CONFIG_FORMAT_PREFIX); keepFields = SyslogUtils.chooseFieldsToKeep( context.getString( SyslogSourceConfigurationConstants.CONFIG_KEEP_FIELDS, SyslogSourceConfigurationConstants.DEFAULT_KEEP_FIELDS)); clientIPHeader = context.getString(SyslogSourceConfigurationConstants.CONFIG_CLIENT_IP_HEADER); clientHostnameHeader = context.getString(SyslogSourceConfigurationConstants.CONFIG_CLIENT_HOSTNAME_HEADER); if (sourceCounter == null) { sourceCounter = new SourceCounter(getName()); } } @VisibleForTesting InetSocketAddress getBoundAddress() { SocketAddress localAddress = nettyChannel.getLocalAddress(); if (!(localAddress instanceof InetSocketAddress)) { throw new IllegalArgumentException("Not bound to an internet address"); } return (InetSocketAddress) localAddress; } @VisibleForTesting SourceCounter getSourceCounter() { return sourceCounter; } private class PipelineFactory implements ChannelPipelineFactory { private final Integer eventSize; private final Map<String, String> formaterProp; private final Set<String> keepFields; private String clientIPHeader; private String clientHostnameHeader; private Supplier<Optional<SSLEngine>> sslEngineSupplier; public PipelineFactory(Integer eventSize, Map<String, String> formaterProp, Set<String> keepFields, String clientIPHeader, String clientHostnameHeader, Supplier<Optional<SSLEngine>> sslEngineSupplier) { this.eventSize = eventSize; this.formaterProp = formaterProp; this.keepFields = keepFields; this.clientIPHeader = clientIPHeader; this.clientHostnameHeader = clientHostnameHeader; this.sslEngineSupplier = sslEngineSupplier; } @Override public ChannelPipeline getPipeline() { syslogTcpHandler handler = new syslogTcpHandler(); handler.setEventSize(eventSize); handler.setFormater(formaterProp); handler.setKeepFields(keepFields); handler.setClientIPHeader(clientIPHeader); handler.setClientHostnameHeader(clientHostnameHeader); ChannelPipeline pipeline = Channels.pipeline(handler); sslEngineSupplier.get().ifPresent(sslEngine -> { pipeline.addFirst("ssl", new SslHandler(sslEngine)); }); return pipeline; } } }
/* * Copyright 2012 WSO2, Inc. (http://wso2.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.deserializer; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import org.apache.synapse.Mediator; import org.apache.synapse.aspects.statistics.StatisticsConfigurable; import org.apache.synapse.mediators.base.SequenceMediator; import org.apache.synapse.mediators.builtin.SendMediator; import org.apache.synapse.mediators.filters.InMediator; import org.apache.synapse.mediators.filters.OutMediator; import org.eclipse.core.runtime.Assert; import org.eclipse.gmf.runtime.diagram.ui.editparts.GraphicalEditPart; import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart; import org.eclipse.gmf.runtime.emf.type.core.IElementType; import org.eclipse.ui.part.FileEditorInput; import org.wso2.developerstudio.eclipse.gmf.esb.EsbFactory; import org.wso2.developerstudio.eclipse.gmf.esb.EsbNode; import org.wso2.developerstudio.eclipse.gmf.esb.KeyType; import org.wso2.developerstudio.eclipse.gmf.esb.MediatorFlow; import org.wso2.developerstudio.eclipse.gmf.esb.NamespacedProperty; import org.wso2.developerstudio.eclipse.gmf.esb.ProxyService; import org.wso2.developerstudio.eclipse.gmf.esb.RegistryKeyProperty; import org.wso2.developerstudio.eclipse.gmf.esb.Sequence; import org.wso2.developerstudio.eclipse.gmf.esb.Sequences; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.utils.ElementDuplicator; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.TemplateTemplateCompartmentEditPart; import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbElementTypes; import static org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage.Literals.*; import org.apache.commons.lang.StringUtils; /** * Sequence mediator deserializer */ public class SequenceDeserializer extends AbstractEsbNodeDeserializer<SequenceMediator, EsbNode> { @Override public EsbNode createNode(IGraphicalEditPart part,SequenceMediator sequence) { EsbNode node = null; if(sequence.getKey()!=null){ Sequence sequenceModel = (Sequence) DeserializerUtils.createNode(part, EsbElementTypes.Sequence_3503); setCommonProperties(sequence, sequenceModel); executeSetValueCommand(sequenceModel, SEQUENCE__NAME, sequence.getKey().getKeyValue()); if (sequence.getKey().getExpression()!=null) { executeSetValueCommand(sequenceModel, SEQUENCE__REFERRING_SEQUENCE_TYPE, KeyType.DYNAMIC); NamespacedProperty namespacedProperty = createNamespacedProperty(sequence.getKey().getExpression()); executeSetValueCommand(sequenceModel, SEQUENCE__DYNAMIC_REFERENCE_KEY,namespacedProperty); //executeSetValueCommand(sequenceModel, SEQUENCE__NAME, sequence.getKey().getExpression()); }else{ executeSetValueCommand(sequenceModel, SEQUENCE__NAME, sequence.getKey().getKeyValue()); } node = sequenceModel; } else if(sequence.getName()!=null){ if ("main".equals(sequence.getName())) { node = deserializeMainSequence(part,sequence); } else{ setReversed(false); IElementType sequencesType = (part instanceof TemplateTemplateCompartmentEditPart) ? EsbElementTypes.Sequences_3665 : EsbElementTypes.Sequences_3614; Sequences sequenceModel = (Sequences) DeserializerUtils.createNode(part, sequencesType); executeSetValueCommand(sequenceModel, SEQUENCES__NAME, sequence.getName()); if(StringUtils.isNotBlank(sequence.getErrorHandler())){ executeSetValueCommand(sequenceModel.getOnError(), REGISTRY_KEY_PROPERTY__KEY_VALUE, sequence.getErrorHandler()); } // Fixing TOOLS-2652 if (sequence.getTraceState() == 1) { executeSetValueCommand(sequenceModel,SEQUENCES__TRACE_ENABLED, new Boolean(true)); } else { executeSetValueCommand(sequenceModel,SEQUENCES__TRACE_ENABLED, new Boolean(false)); } StatisticsConfigurable statisticsConfigurable = sequence.getAspectConfiguration(); if (statisticsConfigurable != null && statisticsConfigurable.isStatisticsEnable()) { executeSetValueCommand(sequenceModel,SEQUENCES__STATISTICS_ENABLED, new Boolean(true)); }else{ executeSetValueCommand(sequenceModel,SEQUENCES__STATISTICS_ENABLED, new Boolean(false)); } refreshEditPartMap(); addRootInputConnector(sequenceModel.getInputConnector()); IGraphicalEditPart compartment = (IGraphicalEditPart) getEditpart( sequenceModel.getMediatorFlow()).getChildren().get(0); setRootCompartment((GraphicalEditPart)compartment); deserializeSequence(compartment, sequence, sequenceModel.getOutputConnector()); deserializeSequence(compartment, new SequenceMediator(), sequenceModel.getInputConnector()); addPairMediatorFlow(sequenceModel.getOutputConnector(), sequenceModel.getInputConnector()); node = sequenceModel; } } else{ Assert.isTrue(false, "Unsupported sequence mediator configuration"); } return node; } private ProxyService deserializeMainSequence(IGraphicalEditPart part, SequenceMediator sequence) { ProxyService sequenceModel = (ProxyService) DeserializerUtils.createNode(part, EsbElementTypes.ProxyService_3001); setElementToEdit(sequenceModel); refreshEditPartMap(); executeSetValueCommand(PROXY_SERVICE__NAME,"main"); executeSetValueCommand(PROXY_SERVICE__MAIN_SEQUENCE,true); addRootInputConnector(sequenceModel.getInputConnector()); MediatorFlow mediatorFlow = sequenceModel.getContainer().getSequenceAndEndpointContainer() .getMediatorFlow(); GraphicalEditPart compartment = (GraphicalEditPart) ((getEditpart(mediatorFlow)) .getChildren().get(0)); if(StringUtils.isNotBlank(sequence.getErrorHandler())){ RegistryKeyProperty onErrorSeq = EsbFactory.eINSTANCE.createRegistryKeyProperty(); onErrorSeq.setKeyValue(sequence.getErrorHandler()); executeSetValueCommand(PROXY_SERVICE__ON_ERROR, onErrorSeq); } // Fixing TOOLS-2652 if (sequence.getTraceState() == 1) { executeSetValueCommand(SEQUENCES__TRACE_ENABLED, new Boolean(true)); } else { executeSetValueCommand(SEQUENCES__TRACE_ENABLED, new Boolean(false)); } StatisticsConfigurable statisticsConfigurable = sequence.getAspectConfiguration(); if (statisticsConfigurable != null && statisticsConfigurable.isStatisticsEnable()) { executeSetValueCommand(SEQUENCES__STATISTICS_ENABLED, new Boolean(true)); }else{ executeSetValueCommand(SEQUENCES__STATISTICS_ENABLED, new Boolean(false)); } InMediator inMediator = getInMediator(sequence); SequenceMediator inSequence = new SequenceMediator(); inSequence.addAll(inMediator.getList()); setRootCompartment(compartment); deserializeSequence(compartment, inSequence, sequenceModel.getOutputConnector()); setRootCompartment(null); OutMediator outMediator = getOutMediator(sequence); SequenceMediator outSequence = new SequenceMediator(); if (outMediator.getList().size() > 0) { outSequence.addAll(outMediator.getList()); } setRootCompartment(compartment); deserializeSequence(compartment, outSequence, sequenceModel.getInputConnector()); setRootCompartment(null); addPairMediatorFlow(sequenceModel.getOutputConnector(),sequenceModel.getInputConnector()); return sequenceModel; } private InMediator getInMediator(SequenceMediator sequence) { InMediator inMediator = null; List<Mediator> mediatorList = sequence.getList(); for(Iterator<Mediator> i = mediatorList.iterator();i.hasNext();){ Mediator next = i.next(); if(next instanceof InMediator){ inMediator = (InMediator) next; break; } } if(inMediator == null){ inMediator = new InMediator(); } for(Iterator<Mediator> i = mediatorList.iterator();i.hasNext();){ Mediator next = i.next(); if(!(next instanceof OutMediator || next instanceof InMediator)){ inMediator.addChild(next); } } /* TOOLS-1510 LinkedList<Mediator> inMediatorList = new LinkedList<Mediator>(); inMediatorList.addAll(inMediator.getList()); Mediator last = null; if(inMediatorList.size()>0){ last = inMediatorList.getLast(); } if (last == null || !(last instanceof SendMediator)) { inMediator.addChild(new SendMediator()); }*/ return inMediator; } private OutMediator getOutMediator(SequenceMediator sequence) { OutMediator outMediator = null; List<Mediator> mediatorList = sequence.getList(); for(Iterator<Mediator> i = mediatorList.iterator();i.hasNext();){ Mediator next = i.next(); if(next instanceof OutMediator){ outMediator = (OutMediator) next; break; } } if(outMediator == null){ outMediator = new OutMediator(); } else { for(Iterator<Mediator> i = mediatorList.iterator();i.hasNext();){ Mediator next = i.next(); if(!(next instanceof InMediator || next instanceof OutMediator)){ outMediator.addChild(next); } } } /* TOOLS-1510 LinkedList<Mediator> outMediatorList = new LinkedList<Mediator>(); outMediatorList.addAll(outMediator.getList()); Mediator last = null; if(outMediatorList.size()>0){ last = outMediatorList.getLast(); } if (last == null || !(last instanceof SendMediator)) { outMediator.addChild(new SendMediator()); }*/ return outMediator; } private void duplicatorEndPoints(GraphicalEditPart rootCompartment, String key) { FileEditorInput input = (FileEditorInput) getDiagramEditor().getEditorInput(); ElementDuplicator duplicator = new ElementDuplicator(input.getFile().getProject()); //duplicator.duplicateEndPoints(rootCompartment, key); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.MultiFields; import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; import org.apache.lucene.queries.TermsQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.similarity.SimilarityProvider; import org.elasticsearch.search.DocValueFormat; import org.joda.time.DateTimeZone; import java.io.IOException; import java.util.List; import java.util.Objects; /** * This defines the core properties and functions to operate on a field. */ public abstract class MappedFieldType extends FieldType { private String name; private float boost; // TODO: remove this docvalues flag and use docValuesType private boolean docValues; private NamedAnalyzer indexAnalyzer; private NamedAnalyzer searchAnalyzer; private NamedAnalyzer searchQuoteAnalyzer; private SimilarityProvider similarity; private Object nullValue; private String nullValueAsString; // for sending null value to _all field private boolean eagerGlobalOrdinals; protected MappedFieldType(MappedFieldType ref) { super(ref); this.name = ref.name(); this.boost = ref.boost(); this.docValues = ref.hasDocValues(); this.indexAnalyzer = ref.indexAnalyzer(); this.searchAnalyzer = ref.searchAnalyzer(); this.searchQuoteAnalyzer = ref.searchQuoteAnalyzer(); this.similarity = ref.similarity(); this.nullValue = ref.nullValue(); this.nullValueAsString = ref.nullValueAsString(); this.eagerGlobalOrdinals = ref.eagerGlobalOrdinals; } public MappedFieldType() { setTokenized(true); setStored(false); setStoreTermVectors(false); setOmitNorms(false); setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); setBoost(1.0f); } @Override public abstract MappedFieldType clone(); /** Return a fielddata builder for this field. */ public IndexFieldData.Builder fielddataBuilder() { throw new IllegalArgumentException("Fielddata is not supported on field [" + name() + "] of type [" + typeName() + "]"); } @Override public boolean equals(Object o) { if (!super.equals(o)) return false; MappedFieldType fieldType = (MappedFieldType) o; // check similarity first because we need to check the name, and it might be null // TODO: SimilarityProvider should have equals? if (similarity == null || fieldType.similarity == null) { if (similarity != fieldType.similarity) { return false; } } else { if (Objects.equals(similarity.name(), fieldType.similarity.name()) == false) { return false; } } return boost == fieldType.boost && docValues == fieldType.docValues && Objects.equals(name, fieldType.name) && Objects.equals(indexAnalyzer, fieldType.indexAnalyzer) && Objects.equals(searchAnalyzer, fieldType.searchAnalyzer) && Objects.equals(searchQuoteAnalyzer(), fieldType.searchQuoteAnalyzer()) && Objects.equals(eagerGlobalOrdinals, fieldType.eagerGlobalOrdinals) && Objects.equals(nullValue, fieldType.nullValue) && Objects.equals(nullValueAsString, fieldType.nullValueAsString); } @Override public int hashCode() { return Objects.hash(super.hashCode(), name, boost, docValues, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer, eagerGlobalOrdinals, similarity == null ? null : similarity.name(), nullValue, nullValueAsString); } // norelease: we need to override freeze() and add safety checks that all settings are actually set /** Returns the name of this type, as would be specified in mapping properties */ public abstract String typeName(); /** Checks this type is the same type as other. Adds a conflict if they are different. */ private final void checkTypeName(MappedFieldType other) { if (typeName().equals(other.typeName()) == false) { throw new IllegalArgumentException("mapper [" + name + "] cannot be changed from type [" + typeName() + "] to [" + other.typeName() + "]"); } else if (getClass() != other.getClass()) { throw new IllegalStateException("Type names equal for class " + getClass().getSimpleName() + " and " + other.getClass().getSimpleName()); } } /** * Checks for any conflicts between this field type and other. * If strict is true, all properties must be equal. * Otherwise, only properties which must never change in an index are checked. */ public void checkCompatibility(MappedFieldType other, List<String> conflicts, boolean strict) { checkTypeName(other); boolean indexed = indexOptions() != IndexOptions.NONE; boolean mergeWithIndexed = other.indexOptions() != IndexOptions.NONE; // TODO: should be validating if index options go "up" (but "down" is ok) if (indexed != mergeWithIndexed || tokenized() != other.tokenized()) { conflicts.add("mapper [" + name() + "] has different [index] values"); } if (stored() != other.stored()) { conflicts.add("mapper [" + name() + "] has different [store] values"); } if (hasDocValues() != other.hasDocValues()) { conflicts.add("mapper [" + name() + "] has different [doc_values] values"); } if (omitNorms() && !other.omitNorms()) { conflicts.add("mapper [" + name() + "] has different [norms] values, cannot change from disable to enabled"); } if (storeTermVectors() != other.storeTermVectors()) { conflicts.add("mapper [" + name() + "] has different [store_term_vector] values"); } if (storeTermVectorOffsets() != other.storeTermVectorOffsets()) { conflicts.add("mapper [" + name() + "] has different [store_term_vector_offsets] values"); } if (storeTermVectorPositions() != other.storeTermVectorPositions()) { conflicts.add("mapper [" + name() + "] has different [store_term_vector_positions] values"); } if (storeTermVectorPayloads() != other.storeTermVectorPayloads()) { conflicts.add("mapper [" + name() + "] has different [store_term_vector_payloads] values"); } // null and "default"-named index analyzers both mean the default is used if (indexAnalyzer() == null || "default".equals(indexAnalyzer().name())) { if (other.indexAnalyzer() != null && "default".equals(other.indexAnalyzer().name()) == false) { conflicts.add("mapper [" + name() + "] has different [analyzer]"); } } else if (other.indexAnalyzer() == null || "default".equals(other.indexAnalyzer().name())) { conflicts.add("mapper [" + name() + "] has different [analyzer]"); } else if (indexAnalyzer().name().equals(other.indexAnalyzer().name()) == false) { conflicts.add("mapper [" + name() + "] has different [analyzer]"); } if (Objects.equals(similarity(), other.similarity()) == false) { conflicts.add("mapper [" + name() + "] has different [similarity]"); } if (strict) { if (omitNorms() != other.omitNorms()) { conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [omit_norms] across all types."); } if (boost() != other.boost()) { conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [boost] across all types."); } if (Objects.equals(searchAnalyzer(), other.searchAnalyzer()) == false) { conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [search_analyzer] across all types."); } if (Objects.equals(searchQuoteAnalyzer(), other.searchQuoteAnalyzer()) == false) { conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [search_quote_analyzer] across all types."); } if (Objects.equals(nullValue(), other.nullValue()) == false) { conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [null_value] across all types."); } if (eagerGlobalOrdinals() != other.eagerGlobalOrdinals()) { conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [eager_global_ordinals] across all types."); } } } public String name() { return name; } public void setName(String name) { checkIfFrozen(); this.name = name; } public float boost() { return boost; } public void setBoost(float boost) { checkIfFrozen(); this.boost = boost; } public boolean hasDocValues() { return docValues; } public void setHasDocValues(boolean hasDocValues) { checkIfFrozen(); this.docValues = hasDocValues; } public NamedAnalyzer indexAnalyzer() { return indexAnalyzer; } public void setIndexAnalyzer(NamedAnalyzer analyzer) { checkIfFrozen(); this.indexAnalyzer = analyzer; } public NamedAnalyzer searchAnalyzer() { return searchAnalyzer; } public void setSearchAnalyzer(NamedAnalyzer analyzer) { checkIfFrozen(); this.searchAnalyzer = analyzer; } public NamedAnalyzer searchQuoteAnalyzer() { return searchQuoteAnalyzer == null ? searchAnalyzer : searchQuoteAnalyzer; } public void setSearchQuoteAnalyzer(NamedAnalyzer analyzer) { checkIfFrozen(); this.searchQuoteAnalyzer = analyzer; } public SimilarityProvider similarity() { return similarity; } public void setSimilarity(SimilarityProvider similarity) { checkIfFrozen(); this.similarity = similarity; } /** Returns the value that should be added when JSON null is found, or null if no value should be added */ public Object nullValue() { return nullValue; } /** Returns the null value stringified, so it can be used for e.g. _all field, or null if there is no null value */ public String nullValueAsString() { return nullValueAsString; } /** Sets the null value and initializes the string version */ public void setNullValue(Object nullValue) { checkIfFrozen(); this.nullValue = nullValue; this.nullValueAsString = nullValue == null ? null : nullValue.toString(); } /** Given a value that comes from the stored fields API, convert it to the * expected type. For instance a date field would store dates as longs and * format it back to a string in this method. */ public Object valueForSearch(Object value) { return value; } /** Returns the indexed value used to construct search "values". * This method is used for the default implementations of most * query factory methods such as {@link #termQuery}. */ protected BytesRef indexedValueForSearch(Object value) { return BytesRefs.toBytesRef(value); } /** Generates a query that will only match documents that contain the given value. * The default implementation returns a {@link TermQuery} over the value bytes, * boosted by {@link #boost()}. * @throws IllegalArgumentException if {@code value} cannot be converted to the expected data type */ public Query termQuery(Object value, @Nullable QueryShardContext context) { TermQuery query = new TermQuery(new Term(name(), indexedValueForSearch(value))); if (boost == 1f || (context != null && context.indexVersionCreated().before(Version.V_5_0_0_alpha1))) { return query; } return new BoostQuery(query, boost); } public Query termsQuery(List values, @Nullable QueryShardContext context) { BytesRef[] bytesRefs = new BytesRef[values.size()]; for (int i = 0; i < bytesRefs.length; i++) { bytesRefs[i] = indexedValueForSearch(values.get(i)); } return new TermsQuery(name(), bytesRefs); } public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { return new TermRangeQuery(name(), lowerTerm == null ? null : indexedValueForSearch(lowerTerm), upperTerm == null ? null : indexedValueForSearch(upperTerm), includeLower, includeUpper); } public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { return new FuzzyQuery(new Term(name(), indexedValueForSearch(value)), fuzziness.asDistance(BytesRefs.toString(value)), prefixLength, maxExpansions, transpositions); } public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) { PrefixQuery query = new PrefixQuery(new Term(name(), indexedValueForSearch(value))); if (method != null) { query.setRewriteMethod(method); } return query; } public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) { throw new QueryShardException(context, "Can only use regular expression on keyword and text fields - not on [" + name + "] which is of type [" + typeName() + "]"); } public Query nullValueQuery() { if (nullValue == null) { return null; } return new ConstantScoreQuery(termQuery(nullValue, null)); } /** * @return a {@link FieldStats} instance that maps to the type of this * field or {@code null} if the provided index has no stats about the * current field */ public FieldStats stats(IndexReader reader) throws IOException { int maxDoc = reader.maxDoc(); Terms terms = MultiFields.getTerms(reader, name()); if (terms == null) { return null; } return new FieldStats.Text( maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), terms.getMin(), terms.getMax() ); } /** * An enum used to describe the relation between the range of terms in a * shard when compared with a query range */ public static enum Relation { WITHIN, INTERSECTS, DISJOINT; } /** Return whether all values of the given {@link IndexReader} are within the range, * outside the range or cross the range. The default implementation returns * {@link Relation#INTERSECTS}, which is always fine to return when there is * no way to check whether values are actually within bounds. */ public Relation isFieldWithinQuery( IndexReader reader, Object from, Object to, boolean includeLower, boolean includeUpper, DateTimeZone timeZone, DateMathParser dateMathParser) throws IOException { return Relation.INTERSECTS; } /** A term query to use when parsing a query string. Can return <tt>null</tt>. */ @Nullable public Query queryStringTermQuery(Term term) { return null; } protected final void failIfNoDocValues() { if (hasDocValues() == false) { throw new IllegalStateException("Can't load fielddata on [" + name() + "] because fielddata is unsupported on fields of type [" + typeName() + "]. Use doc values instead."); } } public boolean eagerGlobalOrdinals() { return eagerGlobalOrdinals; } public void setEagerGlobalOrdinals(boolean eagerGlobalOrdinals) { checkIfFrozen(); this.eagerGlobalOrdinals = eagerGlobalOrdinals; } /** Return a {@link DocValueFormat} that can be used to display and parse * values as returned by the fielddata API. * The default implementation returns a {@link DocValueFormat#RAW}. */ public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) { if (format != null) { throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats"); } if (timeZone != null) { throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones"); } return DocValueFormat.RAW; } /** * Extract a {@link Term} from a query created with {@link #termQuery} by * recursively removing {@link BoostQuery} wrappers. * @throws IllegalArgumentException if the wrapped query is not a {@link TermQuery} */ public static Term extractTerm(Query termQuery) { while (termQuery instanceof BoostQuery) { termQuery = ((BoostQuery) termQuery).getQuery(); } if (termQuery instanceof TermQuery == false) { throw new IllegalArgumentException("Cannot extract a term from a query of type " + termQuery.getClass() + ": " + termQuery); } return ((TermQuery) termQuery).getTerm(); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.bigquery.model; /** * Model definition for ProjectList. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the BigQuery API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class ProjectList extends com.google.api.client.json.GenericJson { /** * A hash of the page of results * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String etag; /** * The type of list. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kind; /** * A token to request the next page of results. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String nextPageToken; /** * Projects to which you have at least READ access. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<Projects> projects; static { // hack to force ProGuard to consider Projects used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(Projects.class); } /** * The total number of projects in the list. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer totalItems; /** * A hash of the page of results * @return value or {@code null} for none */ public java.lang.String getEtag() { return etag; } /** * A hash of the page of results * @param etag etag or {@code null} for none */ public ProjectList setEtag(java.lang.String etag) { this.etag = etag; return this; } /** * The type of list. * @return value or {@code null} for none */ public java.lang.String getKind() { return kind; } /** * The type of list. * @param kind kind or {@code null} for none */ public ProjectList setKind(java.lang.String kind) { this.kind = kind; return this; } /** * A token to request the next page of results. * @return value or {@code null} for none */ public java.lang.String getNextPageToken() { return nextPageToken; } /** * A token to request the next page of results. * @param nextPageToken nextPageToken or {@code null} for none */ public ProjectList setNextPageToken(java.lang.String nextPageToken) { this.nextPageToken = nextPageToken; return this; } /** * Projects to which you have at least READ access. * @return value or {@code null} for none */ public java.util.List<Projects> getProjects() { return projects; } /** * Projects to which you have at least READ access. * @param projects projects or {@code null} for none */ public ProjectList setProjects(java.util.List<Projects> projects) { this.projects = projects; return this; } /** * The total number of projects in the list. * @return value or {@code null} for none */ public java.lang.Integer getTotalItems() { return totalItems; } /** * The total number of projects in the list. * @param totalItems totalItems or {@code null} for none */ public ProjectList setTotalItems(java.lang.Integer totalItems) { this.totalItems = totalItems; return this; } @Override public ProjectList set(String fieldName, Object value) { return (ProjectList) super.set(fieldName, value); } @Override public ProjectList clone() { return (ProjectList) super.clone(); } /** * Model definition for ProjectListProjects. */ public static final class Projects extends com.google.api.client.json.GenericJson { /** * A descriptive name for this project. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String friendlyName; /** * An opaque ID of this project. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String id; /** * The resource type. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kind; /** * The numeric ID of this project. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.math.BigInteger numericId; /** * A unique reference to this project. * The value may be {@code null}. */ @com.google.api.client.util.Key private ProjectReference projectReference; /** * A descriptive name for this project. * @return value or {@code null} for none */ public java.lang.String getFriendlyName() { return friendlyName; } /** * A descriptive name for this project. * @param friendlyName friendlyName or {@code null} for none */ public Projects setFriendlyName(java.lang.String friendlyName) { this.friendlyName = friendlyName; return this; } /** * An opaque ID of this project. * @return value or {@code null} for none */ public java.lang.String getId() { return id; } /** * An opaque ID of this project. * @param id id or {@code null} for none */ public Projects setId(java.lang.String id) { this.id = id; return this; } /** * The resource type. * @return value or {@code null} for none */ public java.lang.String getKind() { return kind; } /** * The resource type. * @param kind kind or {@code null} for none */ public Projects setKind(java.lang.String kind) { this.kind = kind; return this; } /** * The numeric ID of this project. * @return value or {@code null} for none */ public java.math.BigInteger getNumericId() { return numericId; } /** * The numeric ID of this project. * @param numericId numericId or {@code null} for none */ public Projects setNumericId(java.math.BigInteger numericId) { this.numericId = numericId; return this; } /** * A unique reference to this project. * @return value or {@code null} for none */ public ProjectReference getProjectReference() { return projectReference; } /** * A unique reference to this project. * @param projectReference projectReference or {@code null} for none */ public Projects setProjectReference(ProjectReference projectReference) { this.projectReference = projectReference; return this; } @Override public Projects set(String fieldName, Object value) { return (Projects) super.set(fieldName, value); } @Override public Projects clone() { return (Projects) super.clone(); } } }
package YOUR_PACKAGE_NAME; import android.content.Context; import android.os.Handler; import android.os.Looper; import android.support.annotation.NonNull; import android.text.TextUtils; import android.util.Log; import android.view.View; import com.applovin.nativeAds.AppLovinNativeAd; import com.applovin.nativeAds.AppLovinNativeAdLoadListener; import com.applovin.sdk.AppLovinErrorCodes; import com.applovin.sdk.AppLovinPostbackListener; import com.applovin.sdk.AppLovinPrivacySettings; import com.applovin.sdk.AppLovinSdk; import com.applovin.sdk.AppLovinSdkSettings; import com.mopub.common.MoPub; import com.mopub.common.privacy.PersonalInfoManager; import com.mopub.nativeads.CustomEventNative; import com.mopub.nativeads.NativeErrorCode; import com.mopub.nativeads.NativeImageHelper; import com.mopub.nativeads.StaticNativeAd; import java.util.ArrayList; import java.util.List; import java.util.Map; import static android.util.Log.DEBUG; import static android.util.Log.ERROR; /** * AppLovin SDK native adapter for MoPub. * <p> * Created by Thomas So on 5/27/17. */ // // PLEASE NOTE: We have renamed this class from "YOUR_PACKAGE_NAME.AppLovinNativeAdapter" to "YOUR_PACKAGE_NAME.AppLovinCustomEventNative", you can use either classname in your MoPub account. // public class AppLovinCustomEventNative extends CustomEventNative implements AppLovinNativeAdLoadListener { private static final boolean LOGGING_ENABLED = true; private static final Handler UI_HANDLER = new Handler( Looper.getMainLooper() ); private AppLovinSdk sdk; private CustomEventNativeListener nativeListener; private Context context; // // MoPub Custom Event Methods // @Override public void loadNativeAd(final Context context, final CustomEventNativeListener customEventNativeListener, final Map<String, Object> localExtras, final Map<String, String> serverExtras) { log( DEBUG, "Requesting AppLovin native ad with server extras: " + serverExtras ); // Pass the user consent from the MoPub SDK as per GDPR PersonalInfoManager personalInfoManager = MoPub.getPersonalInformationManager(); if ( personalInfoManager != null && personalInfoManager.gdprApplies() ) { boolean canCollectPersonalInfo = personalInfoManager.canCollectPersonalInformation(); AppLovinPrivacySettings.setHasUserConsent( canCollectPersonalInfo, context ); } this.context = context; this.nativeListener = customEventNativeListener; sdk = retrieveSdk( serverExtras, context ); sdk.setPluginVersion( "MoPub-3.0.0" ); sdk.getNativeAdService().loadNativeAds( 1, this ); } // // Native Ad Load Listener // @Override public void onNativeAdsLoaded(final List nativeAds) { final AppLovinNativeAd nativeAd = (AppLovinNativeAd) nativeAds.get( 0 ); log( DEBUG, "Native ad did load ad: " + nativeAd.getAdId() ); final List<String> imageUrls = new ArrayList<>( 2 ); if ( nativeAd.getIconUrl() != null ) imageUrls.add( nativeAd.getIconUrl() ); if ( nativeAd.getImageUrl() != null ) imageUrls.add( nativeAd.getImageUrl() ); // Please note: If/when we add support for videos, we must use AppLovin SDK's built-in precaching mechanism runOnUiThread( new Runnable() { @Override public void run() { NativeImageHelper.preCacheImages( context, imageUrls, new NativeImageHelper.ImageListener() { @Override public void onImagesCached() { handleNativeAdFinishedCaching( nativeAd ); } @Override public void onImagesFailedToCache(NativeErrorCode nativeErrorCode) { handleNativeAdFinishedCaching( nativeAd ); } } ); } } ); } private void handleNativeAdFinishedCaching(final AppLovinNativeAd nativeAd) { log( DEBUG, "Native ad done precaching" ); final AppLovinMopubNativeAd appLovinMopubNativeAd = new AppLovinMopubNativeAd( nativeAd, context ); nativeListener.onNativeAdLoaded( appLovinMopubNativeAd ); } @Override public void onNativeAdsFailedToLoad(final int errorCode) { log( ERROR, "Native ad video failed to load with error: " + errorCode ); nativeListener.onNativeAdFailed( toMoPubErrorCode( errorCode ) ); } private class AppLovinMopubNativeAd extends StaticNativeAd { private final AppLovinNativeAd parentNativeAd; private final Context parentContext; private View parentView; AppLovinMopubNativeAd(final AppLovinNativeAd nativeAd, final Context context) { parentNativeAd = nativeAd; parentContext = context; setTitle( nativeAd.getTitle() ); setText( nativeAd.getDescriptionText() ); setIconImageUrl( nativeAd.getIconUrl() ); setMainImageUrl( nativeAd.getImageUrl() ); setCallToAction( nativeAd.getCtaText() ); setStarRating( (double) nativeAd.getStarRating() ); setClickDestinationUrl( nativeAd.getClickUrl() ); } @Override public void prepare(@NonNull final View view) { // PLEASE NOTE: Use the code below if you would like AppLovin to handle the ad clicks for you: /* final View.OnClickListener onClickListener = new View.OnClickListener() { @Override public void onClick(View v) { parentNativeAd.launchClickTarget( parentContext ); notifyAdClicked(); } }; parentView = view; parentView.setOnClickListener( onClickListener ); // If you need to make subviews of the view clickable (e.g. CTA button), apply the click listener to them: parentView.findViewById( R.id.ID_OF_SUBVIEW ).setOnClickListener( onClickListener ); */ // As of AppLovin SDK >=7.1.0, impression tracking convenience methods have been added to AppLovinNativeAd parentNativeAd.trackImpression( new AppLovinPostbackListener() { @Override public void onPostbackSuccess(String url) { log( DEBUG, "Native ad impression successfully executed." ); notifyAdImpressed(); } @Override public void onPostbackFailure(String url, int errorCode) { log( ERROR, "Native ad impression failed to execute." ); } } ); } @Override public void clear(@NonNull final View view) { parentView = null; } @Override public void destroy() { AppLovinCustomEventNative.this.nativeListener = null; } } // // Utility Methods // private static void log(final int priority, final String message) { if ( LOGGING_ENABLED ) { Log.println( priority, "AppLovinNative", message ); } } private static NativeErrorCode toMoPubErrorCode(final int applovinErrorCode) { if ( applovinErrorCode == AppLovinErrorCodes.NO_FILL ) { return NativeErrorCode.NETWORK_NO_FILL; } else if ( applovinErrorCode == AppLovinErrorCodes.UNSPECIFIED_ERROR ) { return NativeErrorCode.NETWORK_INVALID_STATE; } else if ( applovinErrorCode == AppLovinErrorCodes.NO_NETWORK ) { return NativeErrorCode.CONNECTION_ERROR; } else if ( applovinErrorCode == AppLovinErrorCodes.FETCH_AD_TIMEOUT ) { return NativeErrorCode.NETWORK_TIMEOUT; } else if ( applovinErrorCode == AppLovinErrorCodes.UNABLE_TO_PREPARE_NATIVE_AD ) { return NativeErrorCode.INVALID_RESPONSE; } else { return NativeErrorCode.UNSPECIFIED; } } /** * Performs the given runnable on the main thread. */ private static void runOnUiThread(final Runnable runnable) { if ( Looper.myLooper() == Looper.getMainLooper() ) { runnable.run(); } else { UI_HANDLER.post( runnable ); } } /** * Retrieves the appropriate instance of AppLovin's SDK from the SDK key given in the server parameters, or Android Manifest. */ private static AppLovinSdk retrieveSdk(final Map<String, String> serverExtras, final Context context) { final String sdkKey = serverExtras != null ? serverExtras.get( "sdk_key" ) : null; final AppLovinSdk sdk; if ( !TextUtils.isEmpty( sdkKey ) ) { sdk = AppLovinSdk.getInstance( sdkKey, new AppLovinSdkSettings(), context ); } else { sdk = AppLovinSdk.getInstance( context ); } return sdk; } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: test_rpc_service.proto package org.apache.hadoop.hbase.ipc.protobuf.generated; public final class TestRpcServiceProtos { private TestRpcServiceProtos() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } /** * Protobuf service {@code TestProtobufRpcProto} * * <pre> ** * A protobuf service for use in tests * </pre> */ public static abstract class TestProtobufRpcProto implements com.google.protobuf.Service { protected TestProtobufRpcProto() {} public interface Interface { /** * <code>rpc ping(.EmptyRequestProto) returns (.EmptyResponseProto);</code> */ public abstract void ping( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done); /** * <code>rpc echo(.EchoRequestProto) returns (.EchoResponseProto);</code> */ public abstract void echo( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto> done); /** * <code>rpc error(.EmptyRequestProto) returns (.EmptyResponseProto);</code> */ public abstract void error( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done); } public static com.google.protobuf.Service newReflectiveService( final Interface impl) { return new TestProtobufRpcProto() { @java.lang.Override public void ping( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done) { impl.ping(controller, request, done); } @java.lang.Override public void echo( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto> done) { impl.echo(controller, request, done); } @java.lang.Override public void error( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done) { impl.error(controller, request, done); } }; } public static com.google.protobuf.BlockingService newReflectiveBlockingService(final BlockingInterface impl) { return new com.google.protobuf.BlockingService() { public final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptorForType() { return getDescriptor(); } public final com.google.protobuf.Message callBlockingMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, com.google.protobuf.Message request) throws com.google.protobuf.ServiceException { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.callBlockingMethod() given method descriptor for " + "wrong service type."); } switch(method.getIndex()) { case 0: return impl.ping(controller, (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto)request); case 1: return impl.echo(controller, (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto)request); case 2: return impl.error(controller, (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto)request); default: throw new java.lang.AssertionError("Can't get here."); } } public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto.getDefaultInstance(); case 1: return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto.getDefaultInstance(); case 2: return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getResponsePrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance(); case 1: return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.getDefaultInstance(); case 2: return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } }; } /** * <code>rpc ping(.EmptyRequestProto) returns (.EmptyResponseProto);</code> */ public abstract void ping( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done); /** * <code>rpc echo(.EchoRequestProto) returns (.EchoResponseProto);</code> */ public abstract void echo( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto> done); /** * <code>rpc error(.EmptyRequestProto) returns (.EmptyResponseProto);</code> */ public abstract void error( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done); public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { return org.apache.hadoop.hbase.ipc.protobuf.generated.TestRpcServiceProtos.getDescriptor().getServices().get(0); } public final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptorForType() { return getDescriptor(); } public final void callMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, com.google.protobuf.Message request, com.google.protobuf.RpcCallback< com.google.protobuf.Message> done) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.callMethod() given method descriptor for wrong " + "service type."); } switch(method.getIndex()) { case 0: this.ping(controller, (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto>specializeCallback( done)); return; case 1: this.echo(controller, (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto>specializeCallback( done)); return; case 2: this.error(controller, (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto>specializeCallback( done)); return; default: throw new java.lang.AssertionError("Can't get here."); } } public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto.getDefaultInstance(); case 1: return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto.getDefaultInstance(); case 2: return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getResponsePrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance(); case 1: return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.getDefaultInstance(); case 2: return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } public static Stub newStub( com.google.protobuf.RpcChannel channel) { return new Stub(channel); } public static final class Stub extends org.apache.hadoop.hbase.ipc.protobuf.generated.TestRpcServiceProtos.TestProtobufRpcProto implements Interface { private Stub(com.google.protobuf.RpcChannel channel) { this.channel = channel; } private final com.google.protobuf.RpcChannel channel; public com.google.protobuf.RpcChannel getChannel() { return channel; } public void ping( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done) { channel.callMethod( getDescriptor().getMethods().get(0), controller, request, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance())); } public void echo( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto> done) { channel.callMethod( getDescriptor().getMethods().get(1), controller, request, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.getDefaultInstance())); } public void error( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done) { channel.callMethod( getDescriptor().getMethods().get(2), controller, request, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance())); } } public static BlockingInterface newBlockingStub( com.google.protobuf.BlockingRpcChannel channel) { return new BlockingStub(channel); } public interface BlockingInterface { public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto ping( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto echo( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request) throws com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto error( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request) throws com.google.protobuf.ServiceException; } private static final class BlockingStub implements BlockingInterface { private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { this.channel = channel; } private final com.google.protobuf.BlockingRpcChannel channel; public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto ping( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto) channel.callBlockingMethod( getDescriptor().getMethods().get(0), controller, request, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance()); } public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto echo( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto) channel.callBlockingMethod( getDescriptor().getMethods().get(1), controller, request, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.getDefaultInstance()); } public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto error( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto) channel.callBlockingMethod( getDescriptor().getMethods().get(2), controller, request, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance()); } } // @@protoc_insertion_point(class_scope:TestProtobufRpcProto) } public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\026test_rpc_service.proto\032\ntest.proto2\250\001\n" + "\024TestProtobufRpcProto\022/\n\004ping\022\022.EmptyReq" + "uestProto\032\023.EmptyResponseProto\022-\n\004echo\022\021" + ".EchoRequestProto\032\022.EchoResponseProto\0220\n" + "\005error\022\022.EmptyRequestProto\032\023.EmptyRespon" + "seProtoBL\n.org.apache.hadoop.hbase.ipc.p" + "rotobuf.generatedB\024TestRpcServiceProtos\210" + "\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.getDescriptor(), }, assigner); } // @@protoc_insertion_point(outer_class_scope) }
package com.newsblur.fragment; import java.util.ArrayList; import java.util.List; import android.app.Activity; import android.content.ContentResolver; import android.content.ContentValues; import android.content.Intent; import android.content.SharedPreferences; import android.database.Cursor; import android.os.AsyncTask; import android.os.Bundle; import android.app.DialogFragment; import android.app.Fragment; import android.util.Log; import android.view.ContextMenu; import android.view.ContextMenu.ContextMenuInfo; import android.view.Display; import android.view.LayoutInflater; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.View.OnCreateContextMenuListener; import android.view.ViewGroup; import android.widget.ExpandableListView; import android.widget.ExpandableListView.OnChildClickListener; import android.widget.ExpandableListView.OnGroupClickListener; import android.widget.Toast; import com.newsblur.R; import com.newsblur.activity.AllStoriesItemsList; import com.newsblur.activity.FeedItemsList; import com.newsblur.activity.ItemsList; import com.newsblur.activity.NewsBlurApplication; import com.newsblur.activity.SavedStoriesItemsList; import com.newsblur.activity.SocialFeedItemsList; import com.newsblur.database.DatabaseConstants; import com.newsblur.database.FeedProvider; import com.newsblur.database.MixedExpandableListAdapter; import com.newsblur.network.APIManager; import com.newsblur.network.MarkFeedAsReadTask; import com.newsblur.network.MarkFolderAsReadTask; import com.newsblur.util.AppConstants; import com.newsblur.util.ImageLoader; import com.newsblur.util.PrefConstants; import com.newsblur.util.UIUtils; import com.newsblur.view.FolderTreeViewBinder; import com.newsblur.view.SocialFeedViewBinder; public class FolderListFragment extends Fragment implements OnGroupClickListener, OnChildClickListener, OnCreateContextMenuListener { private ContentResolver resolver; private MixedExpandableListAdapter folderAdapter; private FolderTreeViewBinder groupViewBinder; private APIManager apiManager; private int currentState = AppConstants.STATE_SOME; private SocialFeedViewBinder blogViewBinder; private SharedPreferences sharedPreferences; private ExpandableListView list; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // all cursors are initially queried in the "some" unread state to match the default view mode Cursor folderCursor = resolver.query(FeedProvider.FOLDERS_URI, null, null, new String[] { DatabaseConstants.getFolderSelectionFromState(AppConstants.STATE_SOME) }, null); Cursor socialFeedCursor = resolver.query(FeedProvider.SOCIAL_FEEDS_URI, null, DatabaseConstants.getBlogSelectionFromState(AppConstants.STATE_SOME), null, null); Cursor countCursor = resolver.query(FeedProvider.FEED_COUNT_URI, null, DatabaseConstants.getBlogSelectionFromState(AppConstants.STATE_SOME), null, null); Cursor sharedCountCursor = resolver.query(FeedProvider.SOCIALCOUNT_URI, null, DatabaseConstants.getBlogSelectionFromState(AppConstants.STATE_SOME), null, null); Cursor savedCountCursor = resolver.query(FeedProvider.STARRED_STORIES_COUNT_URI, null, null, null, null); ImageLoader imageLoader = ((NewsBlurApplication) getActivity().getApplicationContext()).getImageLoader(); groupViewBinder = new FolderTreeViewBinder(imageLoader); blogViewBinder = new SocialFeedViewBinder(getActivity()); folderAdapter = new MixedExpandableListAdapter(getActivity(), folderCursor, socialFeedCursor, countCursor, sharedCountCursor, savedCountCursor); folderAdapter.setViewBinders(groupViewBinder, blogViewBinder); } @Override public void onAttach(Activity activity) { sharedPreferences = activity.getSharedPreferences(PrefConstants.PREFERENCES, 0); resolver = activity.getContentResolver(); apiManager = new APIManager(activity); super.onAttach(activity); } @Override public void onStart() { super.onStart(); hasUpdated(); } public void hasUpdated() { folderAdapter.notifyDataSetChanged(); checkOpenFolderPreferences(); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View v = inflater.inflate(R.layout.fragment_folderfeedlist, container); list = (ExpandableListView) v.findViewById(R.id.folderfeed_list); list.setGroupIndicator(getResources().getDrawable(R.drawable.transparent)); list.setOnCreateContextMenuListener(this); Display display = getActivity().getWindowManager().getDefaultDisplay(); list.setIndicatorBounds( display.getWidth() - UIUtils.convertDPsToPixels(getActivity(), 20), display.getWidth() - UIUtils.convertDPsToPixels(getActivity(), 10)); list.setChildDivider(getActivity().getResources().getDrawable(R.drawable.divider_light)); list.setAdapter(folderAdapter); list.setOnGroupClickListener(this); list.setOnChildClickListener(this); return v; } public void checkOpenFolderPreferences() { // make sure we didn't beat construction if (this.list == null) return; if (sharedPreferences == null) { sharedPreferences = getActivity().getSharedPreferences(PrefConstants.PREFERENCES, 0); } for (int i = 0; i < folderAdapter.getGroupCount(); i++) { String groupName = folderAdapter.getGroupName(i); if (sharedPreferences.getBoolean(AppConstants.FOLDER_PRE + "_" + groupName, true)) { this.list.expandGroup(i); } else { this.list.collapseGroup(i); } } } @Override public void onCreateContextMenu(ContextMenu menu, View v, ContextMenuInfo menuInfo) { MenuInflater inflater = getActivity().getMenuInflater(); ExpandableListView.ExpandableListContextMenuInfo info = (ExpandableListView.ExpandableListContextMenuInfo) menuInfo; int type = ExpandableListView.getPackedPositionType(info.packedPosition); switch(type) { case ExpandableListView.PACKED_POSITION_TYPE_GROUP: int groupPosition = ExpandableListView.getPackedPositionGroup(info.packedPosition); if (! folderAdapter.isRowSavedStories(groupPosition) ) { inflater.inflate(R.menu.context_folder, menu); } break; case ExpandableListView.PACKED_POSITION_TYPE_CHILD: inflater.inflate(R.menu.context_feed, menu); break; } } @Override public boolean onContextItemSelected(MenuItem item) { final ExpandableListView.ExpandableListContextMenuInfo info = (ExpandableListView.ExpandableListContextMenuInfo) item.getMenuInfo(); if (item.getItemId() == R.id.menu_mark_feed_as_read) { new MarkFeedAsReadTask(getActivity(), apiManager) { @Override protected void onPostExecute(Boolean result) { if (result.booleanValue()) { ContentValues values = new ContentValues(); values.put(DatabaseConstants.FEED_NEGATIVE_COUNT, 0); values.put(DatabaseConstants.FEED_NEUTRAL_COUNT, 0); values.put(DatabaseConstants.FEED_POSITIVE_COUNT, 0); resolver.update(FeedProvider.FEEDS_URI.buildUpon().appendPath(Long.toString(info.id)).build(), values, null, null); folderAdapter.notifyDataSetChanged(); UIUtils.safeToast(getActivity(), R.string.toast_marked_feed_as_read, Toast.LENGTH_SHORT); } else { UIUtils.safeToast(getActivity(), R.string.toast_error_marking_feed_as_read, Toast.LENGTH_LONG); } } }.execute(Long.toString(info.id)); return true; } else if (item.getItemId() == R.id.menu_delete_feed) { int childPosition = ExpandableListView.getPackedPositionChild(info.packedPosition); int groupPosition = ExpandableListView.getPackedPositionGroup(info.packedPosition); Cursor childCursor = folderAdapter.getChild(groupPosition, childPosition); String feedTitle = childCursor.getString(childCursor.getColumnIndex(DatabaseConstants.FEED_TITLE)); // TODO: is there a better way to map group position onto folderName than asking the list adapter? Cursor folderCursor = ((MixedExpandableListAdapter) this.list.getExpandableListAdapter()).getGroup(groupPosition); String folderName = folderCursor.getString(folderCursor.getColumnIndex(DatabaseConstants.FOLDER_NAME)); DialogFragment deleteFeedFragment = DeleteFeedFragment.newInstance(info.id, feedTitle, folderName); deleteFeedFragment.show(getFragmentManager(), "dialog"); return true; } else if (item.getItemId() == R.id.menu_mark_folder_as_read) { int groupPosition = ExpandableListView.getPackedPositionGroup(info.packedPosition); // all folder but the root All Stories one use the simple method if (!folderAdapter.isFolderRoot(groupPosition)) { // TODO: is there a better way to get the folder ID for a group position that asking the list view? final Cursor folderCursor = ((MixedExpandableListAdapter) this.list.getExpandableListAdapter()).getGroup(groupPosition); String folderId = folderCursor.getString(folderCursor.getColumnIndex(DatabaseConstants.FOLDER_NAME)); new MarkFolderAsReadTask(apiManager, resolver) { @Override protected void onPostExecute(Boolean result) { if (result) { folderAdapter.notifyDataSetChanged(); Toast.makeText(getActivity(), R.string.toast_marked_folder_as_read, Toast.LENGTH_SHORT).show(); } else { Toast.makeText(getActivity(), R.string.toast_error_marking_feed_as_read, Toast.LENGTH_SHORT).show(); } } }.execute(folderId); } else { // TODO is social feed actually all shared stories ? Should this be used for expandable and position == 0 ? /*final Cursor socialFeedCursor = ((MixedExpandableListAdapter) list.getExpandableListAdapter()).getGroup(groupPosition); String socialFeedId = socialFeedCursor.getString(socialFeedCursor.getColumnIndex(DatabaseConstants.SOCIAL_FEED_ID)); new MarkSocialFeedAsReadTask(apiManager, resolver){ @Override protected void onPostExecute(Boolean result) { if (result.booleanValue()) { folderAdapter.notifyDataSetChanged(); Toast.makeText(getActivity(), R.string.toast_marked_socialfeed_as_read, Toast.LENGTH_SHORT).show(); } else { Toast.makeText(getActivity(), R.string.toast_error_marking_feed_as_read, Toast.LENGTH_LONG).show(); } } }.execute(socialFeedId);*/ new AsyncTask<Void, Void, Boolean>() { private List<String> feedIds = new ArrayList<String>(); @Override protected Boolean doInBackground(Void... arg) { return apiManager.markAllAsRead(); } @Override protected void onPostExecute(Boolean result) { if (result) { ContentValues values = new ContentValues(); values.put(DatabaseConstants.FEED_NEGATIVE_COUNT, 0); values.put(DatabaseConstants.FEED_NEUTRAL_COUNT, 0); values.put(DatabaseConstants.FEED_POSITIVE_COUNT, 0); resolver.update(FeedProvider.FEEDS_URI, values, null, null); folderAdapter.notifyDataSetChanged(); UIUtils.safeToast(getActivity(), R.string.toast_marked_all_stories_as_read, Toast.LENGTH_SHORT); } else { UIUtils.safeToast(getActivity(), R.string.toast_error_marking_feed_as_read, Toast.LENGTH_SHORT); } }; }.execute(); } return true; } return super.onContextItemSelected(item); } public void changeState(int state) { groupViewBinder.setState(state); blogViewBinder.setState(state); currentState = state; String groupSelection = DatabaseConstants.getFolderSelectionFromState(state); String blogSelection = DatabaseConstants.getBlogSelectionFromState(state); // the countCursor always counts neutral/"some" unreads, no matter what mode we are in String countSelection = DatabaseConstants.getBlogSelectionFromState(AppConstants.STATE_SOME); folderAdapter.currentState = state; Cursor cursor = resolver.query(FeedProvider.FOLDERS_URI, null, null, new String[] { groupSelection }, null); Cursor blogCursor = resolver.query(FeedProvider.SOCIAL_FEEDS_URI, null, blogSelection, null, null); Cursor countCursor = resolver.query(FeedProvider.FEED_COUNT_URI, null, countSelection, null, null); folderAdapter.setBlogCursor(blogCursor); folderAdapter.setGroupCursor(cursor); folderAdapter.setCountCursor(countCursor); folderAdapter.notifyDataSetChanged(); checkOpenFolderPreferences(); } @Override public boolean onGroupClick(ExpandableListView list, View group, int groupPosition, long id) { // The root "All Stories" folder goes to a special activity if (folderAdapter.isFolderRoot(groupPosition)) { Intent i = new Intent(getActivity(), AllStoriesItemsList.class); i.putExtra(AllStoriesItemsList.EXTRA_STATE, currentState); startActivity(i); return true; } else if (folderAdapter.isRowSavedStories(groupPosition)) { Intent i = new Intent(getActivity(), SavedStoriesItemsList.class); startActivity(i); return true; } else { if ((group != null) && (group.findViewById(R.id.row_foldersums) != null)) { String groupName = folderAdapter.getGroupName(groupPosition); if (list.isGroupExpanded(groupPosition)) { group.findViewById(R.id.row_foldersums).setVisibility(View.VISIBLE); sharedPreferences.edit().putBoolean(AppConstants.FOLDER_PRE + "_" + groupName, false).commit(); } else { group.findViewById(R.id.row_foldersums).setVisibility(View.INVISIBLE); sharedPreferences.edit().putBoolean(AppConstants.FOLDER_PRE + "_" + groupName, true).commit(); } } return false; } } @Override public boolean onChildClick(ExpandableListView list, View childView, int groupPosition, int childPosition, long id) { if (groupPosition == 0) { Cursor blurblogCursor = folderAdapter.getBlogCursor(childPosition); String username = blurblogCursor.getString(blurblogCursor.getColumnIndex(DatabaseConstants.SOCIAL_FEED_USERNAME)); String userIcon = blurblogCursor.getString(blurblogCursor.getColumnIndex(DatabaseConstants.SOCIAL_FEED_ICON)); String userId = blurblogCursor.getString(blurblogCursor.getColumnIndex(DatabaseConstants.SOCIAL_FEED_ID)); String blurblogTitle = blurblogCursor.getString(blurblogCursor.getColumnIndex(DatabaseConstants.SOCIAL_FEED_TITLE)); final Intent intent = new Intent(getActivity(), SocialFeedItemsList.class); intent.putExtra(ItemsList.EXTRA_BLURBLOG_USER_ICON, userIcon); intent.putExtra(ItemsList.EXTRA_BLURBLOG_USERNAME, username); intent.putExtra(ItemsList.EXTRA_BLURBLOG_TITLE, blurblogTitle); intent.putExtra(ItemsList.EXTRA_BLURBLOG_USERID, userId); intent.putExtra(ItemsList.EXTRA_STATE, currentState); getActivity().startActivity(intent); } else { final Intent intent = new Intent(getActivity(), FeedItemsList.class); Cursor childCursor = folderAdapter.getChild(groupPosition, childPosition); String feedId = childCursor.getString(childCursor.getColumnIndex(DatabaseConstants.FEED_ID)); String feedTitle = childCursor.getString(childCursor.getColumnIndex(DatabaseConstants.FEED_TITLE)); final Cursor folderCursor = ((MixedExpandableListAdapter) list.getExpandableListAdapter()).getGroup(groupPosition); String folderName = folderCursor.getString(folderCursor.getColumnIndex(DatabaseConstants.FOLDER_NAME)); intent.putExtra(FeedItemsList.EXTRA_FEED, feedId); intent.putExtra(FeedItemsList.EXTRA_FEED_TITLE, feedTitle); intent.putExtra(FeedItemsList.EXTRA_FOLDER_NAME, folderName); intent.putExtra(ItemsList.EXTRA_STATE, currentState); getActivity().startActivity(intent); } return true; } }
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.source.hls.playlist; import android.net.Uri; import androidx.annotation.IntDef; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.drm.DrmInitData; import com.google.android.exoplayer2.offline.StreamKey; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.util.ArrayList; import java.util.List; import java.util.Map; /** Represents an HLS media playlist. */ public final class HlsMediaPlaylist extends HlsPlaylist { /** Server control attributes. */ public static final class ServerControl { /** * The skip boundary for delta updates in microseconds, or {@link C#TIME_UNSET} if delta updates * are not supported. */ public final long skipUntilUs; /** * Whether the playlist can produce delta updates that skip older #EXT-X-DATERANGE tags in * addition to media segments. */ public final boolean canSkipDateRanges; /** * The server-recommended live offset in microseconds, or {@link C#TIME_UNSET} if none defined. */ public final long holdBackUs; /** * The server-recommended live offset in microseconds in low-latency mode, or {@link * C#TIME_UNSET} if none defined. */ public final long partHoldBackUs; /** Whether the server supports blocking playlist reload. */ public final boolean canBlockReload; /** * Creates a new instance. * * @param skipUntilUs See {@link #skipUntilUs}. * @param canSkipDateRanges See {@link #canSkipDateRanges}. * @param holdBackUs See {@link #holdBackUs}. * @param partHoldBackUs See {@link #partHoldBackUs}. * @param canBlockReload See {@link #canBlockReload}. */ public ServerControl( long skipUntilUs, boolean canSkipDateRanges, long holdBackUs, long partHoldBackUs, boolean canBlockReload) { this.skipUntilUs = skipUntilUs; this.canSkipDateRanges = canSkipDateRanges; this.holdBackUs = holdBackUs; this.partHoldBackUs = partHoldBackUs; this.canBlockReload = canBlockReload; } } /** Media segment reference. */ @SuppressWarnings("ComparableType") public static final class Segment extends SegmentBase { /** The human readable title of the segment. */ public final String title; /** The parts belonging to this segment. */ public final List<Part> parts; /** * Creates an instance to be used as init segment. * * @param uri See {@link #url}. * @param byteRangeOffset See {@link #byteRangeOffset}. * @param byteRangeLength See {@link #byteRangeLength}. * @param fullSegmentEncryptionKeyUri See {@link #fullSegmentEncryptionKeyUri}. * @param encryptionIV See {@link #encryptionIV}. */ public Segment( String uri, long byteRangeOffset, long byteRangeLength, @Nullable String fullSegmentEncryptionKeyUri, @Nullable String encryptionIV) { this( uri, /* initializationSegment= */ null, /* title= */ "", /* durationUs= */ 0, /* relativeDiscontinuitySequence= */ -1, /* relativeStartTimeUs= */ C.TIME_UNSET, /* drmInitData= */ null, fullSegmentEncryptionKeyUri, encryptionIV, byteRangeOffset, byteRangeLength, /* hasGapTag= */ false, /* parts= */ ImmutableList.of()); } /** * Creates an instance. * * @param url See {@link #url}. * @param initializationSegment See {@link #initializationSegment}. * @param title See {@link #title}. * @param durationUs See {@link #durationUs}. * @param relativeDiscontinuitySequence See {@link #relativeDiscontinuitySequence}. * @param relativeStartTimeUs See {@link #relativeStartTimeUs}. * @param drmInitData See {@link #drmInitData}. * @param fullSegmentEncryptionKeyUri See {@link #fullSegmentEncryptionKeyUri}. * @param encryptionIV See {@link #encryptionIV}. * @param byteRangeOffset See {@link #byteRangeOffset}. * @param byteRangeLength See {@link #byteRangeLength}. * @param hasGapTag See {@link #hasGapTag}. * @param parts See {@link #parts}. */ public Segment( String url, @Nullable Segment initializationSegment, String title, long durationUs, int relativeDiscontinuitySequence, long relativeStartTimeUs, @Nullable DrmInitData drmInitData, @Nullable String fullSegmentEncryptionKeyUri, @Nullable String encryptionIV, long byteRangeOffset, long byteRangeLength, boolean hasGapTag, List<Part> parts) { super( url, initializationSegment, durationUs, relativeDiscontinuitySequence, relativeStartTimeUs, drmInitData, fullSegmentEncryptionKeyUri, encryptionIV, byteRangeOffset, byteRangeLength, hasGapTag); this.title = title; this.parts = ImmutableList.copyOf(parts); } public Segment copyWith(long relativeStartTimeUs, int relativeDiscontinuitySequence) { List<Part> updatedParts = new ArrayList<>(); long relativePartStartTimeUs = relativeStartTimeUs; for (int i = 0; i < parts.size(); i++) { Part part = parts.get(i); updatedParts.add(part.copyWith(relativePartStartTimeUs, relativeDiscontinuitySequence)); relativePartStartTimeUs += part.durationUs; } return new Segment( url, initializationSegment, title, durationUs, relativeDiscontinuitySequence, relativeStartTimeUs, drmInitData, fullSegmentEncryptionKeyUri, encryptionIV, byteRangeOffset, byteRangeLength, hasGapTag, updatedParts); } } /** A media part. */ public static final class Part extends SegmentBase { /** Whether the part is independent. */ public final boolean isIndependent; /** Whether the part is a preloading part. */ public final boolean isPreload; /** * Creates an instance. * * @param url See {@link #url}. * @param initializationSegment See {@link #initializationSegment}. * @param durationUs See {@link #durationUs}. * @param relativeDiscontinuitySequence See {@link #relativeDiscontinuitySequence}. * @param relativeStartTimeUs See {@link #relativeStartTimeUs}. * @param drmInitData See {@link #drmInitData}. * @param fullSegmentEncryptionKeyUri See {@link #fullSegmentEncryptionKeyUri}. * @param encryptionIV See {@link #encryptionIV}. * @param byteRangeOffset See {@link #byteRangeOffset}. * @param byteRangeLength See {@link #byteRangeLength}. * @param hasGapTag See {@link #hasGapTag}. * @param isIndependent See {@link #isIndependent}. * @param isPreload See {@link #isPreload}. */ public Part( String url, @Nullable Segment initializationSegment, long durationUs, int relativeDiscontinuitySequence, long relativeStartTimeUs, @Nullable DrmInitData drmInitData, @Nullable String fullSegmentEncryptionKeyUri, @Nullable String encryptionIV, long byteRangeOffset, long byteRangeLength, boolean hasGapTag, boolean isIndependent, boolean isPreload) { super( url, initializationSegment, durationUs, relativeDiscontinuitySequence, relativeStartTimeUs, drmInitData, fullSegmentEncryptionKeyUri, encryptionIV, byteRangeOffset, byteRangeLength, hasGapTag); this.isIndependent = isIndependent; this.isPreload = isPreload; } public Part copyWith(long relativeStartTimeUs, int relativeDiscontinuitySequence) { return new Part( url, initializationSegment, durationUs, relativeDiscontinuitySequence, relativeStartTimeUs, drmInitData, fullSegmentEncryptionKeyUri, encryptionIV, byteRangeOffset, byteRangeLength, hasGapTag, isIndependent, isPreload); } } /** The base for a {@link Segment} or a {@link Part} required for playback. */ @SuppressWarnings("ComparableType") public static class SegmentBase implements Comparable<Long> { /** The url of the segment. */ public final String url; /** * The media initialization section for this segment, as defined by #EXT-X-MAP. May be null if * the media playlist does not define a media initialization section for this segment. The same * instance is used for all segments that share an EXT-X-MAP tag. */ @Nullable public final Segment initializationSegment; /** The duration of the segment in microseconds, as defined by #EXTINF or #EXT-X-PART. */ public final long durationUs; /** The number of #EXT-X-DISCONTINUITY tags in the playlist before the segment. */ public final int relativeDiscontinuitySequence; /** The start time of the segment in microseconds, relative to the start of the playlist. */ public final long relativeStartTimeUs; /** * DRM initialization data for sample decryption, or null if the segment does not use CDM-DRM * protection. */ @Nullable public final DrmInitData drmInitData; /** * The encryption identity key uri as defined by #EXT-X-KEY, or null if the segment does not use * full segment encryption with identity key. */ @Nullable public final String fullSegmentEncryptionKeyUri; /** * The encryption initialization vector as defined by #EXT-X-KEY, or null if the segment is not * encrypted. */ @Nullable public final String encryptionIV; /** * The segment's byte range offset, as defined by #EXT-X-BYTERANGE, #EXT-X-PART or * #EXT-X-PRELOAD-HINT. */ public final long byteRangeOffset; /** * The segment's byte range length, as defined by #EXT-X-BYTERANGE, #EXT-X-PART or * #EXT-X-PRELOAD-HINT, or {@link C#LENGTH_UNSET} if no byte range is specified or the byte * range is open-ended. */ public final long byteRangeLength; /** Whether the segment is marked as a gap. */ public final boolean hasGapTag; private SegmentBase( String url, @Nullable Segment initializationSegment, long durationUs, int relativeDiscontinuitySequence, long relativeStartTimeUs, @Nullable DrmInitData drmInitData, @Nullable String fullSegmentEncryptionKeyUri, @Nullable String encryptionIV, long byteRangeOffset, long byteRangeLength, boolean hasGapTag) { this.url = url; this.initializationSegment = initializationSegment; this.durationUs = durationUs; this.relativeDiscontinuitySequence = relativeDiscontinuitySequence; this.relativeStartTimeUs = relativeStartTimeUs; this.drmInitData = drmInitData; this.fullSegmentEncryptionKeyUri = fullSegmentEncryptionKeyUri; this.encryptionIV = encryptionIV; this.byteRangeOffset = byteRangeOffset; this.byteRangeLength = byteRangeLength; this.hasGapTag = hasGapTag; } @Override public int compareTo(Long relativeStartTimeUs) { return this.relativeStartTimeUs > relativeStartTimeUs ? 1 : (this.relativeStartTimeUs < relativeStartTimeUs ? -1 : 0); } } /** * A rendition report for an alternative rendition defined in another media playlist. * * <p>See RFC 8216, section 4.4.5.1.4. */ public static final class RenditionReport { /** The URI of the media playlist of the reported rendition. */ public final Uri playlistUri; /** The last media sequence that is in the playlist of the reported rendition. */ public final long lastMediaSequence; /** * The last part index that is in the playlist of the reported rendition, or {@link * C#INDEX_UNSET} if the rendition does not contain partial segments. */ public final int lastPartIndex; /** * Creates a new instance. * * @param playlistUri See {@link #playlistUri}. * @param lastMediaSequence See {@link #lastMediaSequence}. * @param lastPartIndex See {@link #lastPartIndex}. */ public RenditionReport(Uri playlistUri, long lastMediaSequence, int lastPartIndex) { this.playlistUri = playlistUri; this.lastMediaSequence = lastMediaSequence; this.lastPartIndex = lastPartIndex; } } /** * Type of the playlist, as defined by #EXT-X-PLAYLIST-TYPE. One of {@link * #PLAYLIST_TYPE_UNKNOWN}, {@link #PLAYLIST_TYPE_VOD} or {@link #PLAYLIST_TYPE_EVENT}. */ @Documented @Retention(RetentionPolicy.SOURCE) @IntDef({PLAYLIST_TYPE_UNKNOWN, PLAYLIST_TYPE_VOD, PLAYLIST_TYPE_EVENT}) public @interface PlaylistType {} public static final int PLAYLIST_TYPE_UNKNOWN = 0; public static final int PLAYLIST_TYPE_VOD = 1; public static final int PLAYLIST_TYPE_EVENT = 2; /** * The type of the playlist. See {@link PlaylistType}. */ @PlaylistType public final int playlistType; /** * The start offset in microseconds, as defined by #EXT-X-START. */ public final long startOffsetUs; /** * If {@link #hasProgramDateTime} is true, contains the datetime as microseconds since epoch. * Otherwise, contains the aggregated duration of removed segments up to this snapshot of the * playlist. */ public final long startTimeUs; /** * Whether the playlist contains the #EXT-X-DISCONTINUITY-SEQUENCE tag. */ public final boolean hasDiscontinuitySequence; /** * The discontinuity sequence number of the first media segment in the playlist, as defined by * #EXT-X-DISCONTINUITY-SEQUENCE. */ public final int discontinuitySequence; /** * The media sequence number of the first media segment in the playlist, as defined by * #EXT-X-MEDIA-SEQUENCE. */ public final long mediaSequence; /** * The compatibility version, as defined by #EXT-X-VERSION. */ public final int version; /** * The target duration in microseconds, as defined by #EXT-X-TARGETDURATION. */ public final long targetDurationUs; /** * The target duration for segment parts, as defined by #EXT-X-PART-INF, or {@link C#TIME_UNSET} * if undefined. */ public final long partTargetDurationUs; /** Whether the playlist contains the #EXT-X-ENDLIST tag. */ public final boolean hasEndTag; /** * Whether the playlist contains a #EXT-X-PROGRAM-DATE-TIME tag. */ public final boolean hasProgramDateTime; /** * Contains the CDM protection schemes used by segments in this playlist. Does not contain any key * acquisition data. Null if none of the segments in the playlist is CDM-encrypted. */ @Nullable public final DrmInitData protectionSchemes; /** * The list of segments in the playlist. */ public final List<Segment> segments; /** * The list of parts at the end of the playlist for which the segment is not in the playlist yet. */ public final List<Part> trailingParts; /** The rendition reports of alternative rendition playlists. */ public final Map<Uri, RenditionReport> renditionReports; /** The total duration of the playlist in microseconds. */ public final long durationUs; /** The attributes of the #EXT-X-SERVER-CONTROL header. */ public final ServerControl serverControl; /** * @param playlistType See {@link #playlistType}. * @param baseUri See {@link #baseUri}. * @param tags See {@link #tags}. * @param startOffsetUs See {@link #startOffsetUs}. * @param startTimeUs See {@link #startTimeUs}. * @param hasDiscontinuitySequence See {@link #hasDiscontinuitySequence}. * @param discontinuitySequence See {@link #discontinuitySequence}. * @param mediaSequence See {@link #mediaSequence}. * @param version See {@link #version}. * @param targetDurationUs See {@link #targetDurationUs}. * @param hasIndependentSegments See {@link #hasIndependentSegments}. * @param hasEndTag See {@link #hasEndTag}. * @param hasProgramDateTime See {@link #hasProgramDateTime}. * @param protectionSchemes See {@link #protectionSchemes}. * @param segments See {@link #segments}. * @param trailingParts See {@link #trailingParts}. * @param serverControl See {@link #serverControl} * @param renditionReports See {@link #renditionReports}. */ public HlsMediaPlaylist( @PlaylistType int playlistType, String baseUri, List<String> tags, long startOffsetUs, long startTimeUs, boolean hasDiscontinuitySequence, int discontinuitySequence, long mediaSequence, int version, long targetDurationUs, long partTargetDurationUs, boolean hasIndependentSegments, boolean hasEndTag, boolean hasProgramDateTime, @Nullable DrmInitData protectionSchemes, List<Segment> segments, List<Part> trailingParts, ServerControl serverControl, Map<Uri, RenditionReport> renditionReports) { super(baseUri, tags, hasIndependentSegments); this.playlistType = playlistType; this.startTimeUs = startTimeUs; this.hasDiscontinuitySequence = hasDiscontinuitySequence; this.discontinuitySequence = discontinuitySequence; this.mediaSequence = mediaSequence; this.version = version; this.targetDurationUs = targetDurationUs; this.partTargetDurationUs = partTargetDurationUs; this.hasEndTag = hasEndTag; this.hasProgramDateTime = hasProgramDateTime; this.protectionSchemes = protectionSchemes; this.segments = ImmutableList.copyOf(segments); this.trailingParts = ImmutableList.copyOf(trailingParts); this.renditionReports = ImmutableMap.copyOf(renditionReports); if (!trailingParts.isEmpty()) { Part lastPart = Iterables.getLast(trailingParts); durationUs = lastPart.relativeStartTimeUs + lastPart.durationUs; } else if (!segments.isEmpty()) { Segment lastSegment = Iterables.getLast(segments); durationUs = lastSegment.relativeStartTimeUs + lastSegment.durationUs; } else { durationUs = 0; } this.startOffsetUs = startOffsetUs == C.TIME_UNSET ? C.TIME_UNSET : startOffsetUs >= 0 ? startOffsetUs : durationUs + startOffsetUs; this.serverControl = serverControl; } @Override public HlsMediaPlaylist copy(List<StreamKey> streamKeys) { return this; } /** * Returns whether this playlist is newer than {@code other}. * * @param other The playlist to compare. * @return Whether this playlist is newer than {@code other}. */ public boolean isNewerThan(@Nullable HlsMediaPlaylist other) { if (other == null || mediaSequence > other.mediaSequence) { return true; } if (mediaSequence < other.mediaSequence) { return false; } // The media sequences are equal. int segmentCountDifference = segments.size() - other.segments.size(); if (segmentCountDifference != 0) { return segmentCountDifference > 0; } int partCount = trailingParts.size(); int otherPartCount = other.trailingParts.size(); return partCount > otherPartCount || (partCount == otherPartCount && hasEndTag && !other.hasEndTag); } /** * Returns the result of adding the duration of the playlist to its start time. */ public long getEndTimeUs() { return startTimeUs + durationUs; } /** * Returns a playlist identical to this one except for the start time, the discontinuity sequence * and {@code hasDiscontinuitySequence} values. The first two are set to the specified values, * {@code hasDiscontinuitySequence} is set to true. * * @param startTimeUs The start time for the returned playlist. * @param discontinuitySequence The discontinuity sequence for the returned playlist. * @return An identical playlist including the provided discontinuity and timing information. */ public HlsMediaPlaylist copyWith(long startTimeUs, int discontinuitySequence) { return new HlsMediaPlaylist( playlistType, baseUri, tags, startOffsetUs, startTimeUs, /* hasDiscontinuitySequence= */ true, discontinuitySequence, mediaSequence, version, targetDurationUs, partTargetDurationUs, hasIndependentSegments, hasEndTag, hasProgramDateTime, protectionSchemes, segments, trailingParts, serverControl, renditionReports); } /** * Returns a playlist identical to this one except that an end tag is added. If an end tag is * already present then the playlist will return itself. */ public HlsMediaPlaylist copyWithEndTag() { if (this.hasEndTag) { return this; } return new HlsMediaPlaylist( playlistType, baseUri, tags, startOffsetUs, startTimeUs, hasDiscontinuitySequence, discontinuitySequence, mediaSequence, version, targetDurationUs, partTargetDurationUs, hasIndependentSegments, /* hasEndTag= */ true, hasProgramDateTime, protectionSchemes, segments, trailingParts, serverControl, renditionReports); } }
package edu.berkeley.nlp.assignments; import edu.berkeley.nlp.io.PennTreebankReader; import edu.berkeley.nlp.ling.Tree; import edu.berkeley.nlp.ling.Trees; import edu.berkeley.nlp.parser.EnglishPennTreebankParseEvaluator; import edu.berkeley.nlp.util.*; import java.lang.reflect.Array; import java.util.*; import java.util.concurrent.*; /** * Harness for PCFG Parser project. * * @author Dan Klein */ public class PCFGParserTester { /** * Parsers are required to map sentences to trees. How a parser is constructed and trained is not specified. */ static interface Parser { Tree<String> getBestParse(List<String> sentence); } /** * Baseline parser (though not a baseline I've ever seen before). Tags the sentence using the baseline tagging * method, then either retrieves a known parse of that tag sequence, or builds a right-branching parse for unknown tag * sequences. */ static class BaselineParser implements Parser { CounterMap<List<String>, Tree<String>> knownParses; CounterMap<Integer, String> spanToCategories; Lexicon lexicon; public Tree<String> getBestParse(List<String> sentence) { List<String> tags = getBaselineTagging(sentence); Tree<String> annotatedBestParse = null; if (knownParses.keySet().contains(tags)) { annotatedBestParse = getBestKnownParse(tags); } else { annotatedBestParse = buildRightBranchParse(sentence, tags); } return TreeAnnotations.unAnnotateTree(annotatedBestParse); } private Tree<String> buildRightBranchParse(List<String> words, List<String> tags) { int currentPosition = words.size() - 1; Tree<String> rightBranchTree = buildTagTree(words, tags, currentPosition); while (currentPosition > 0) { currentPosition--; rightBranchTree = merge(buildTagTree(words, tags, currentPosition), rightBranchTree); } rightBranchTree = addRoot(rightBranchTree); return rightBranchTree; } private Tree<String> merge(Tree<String> leftTree, Tree<String> rightTree) { int span = leftTree.getYield().size() + rightTree.getYield().size(); String mostFrequentLabel = spanToCategories.getCounter(span).argMax(); List<Tree<String>> children = new ArrayList<Tree<String>>(); children.add(leftTree); children.add(rightTree); return new Tree<String>(mostFrequentLabel, children); } private Tree<String> addRoot(Tree<String> tree) { return new Tree<String>("ROOT", Collections.singletonList(tree)); } private Tree<String> buildTagTree(List<String> words, List<String> tags, int currentPosition) { Tree<String> leafTree = new Tree<String>(words.get(currentPosition)); Tree<String> tagTree = new Tree<String>(tags.get(currentPosition), Collections.singletonList(leafTree)); return tagTree; } private Tree<String> getBestKnownParse(List<String> tags) { return knownParses.getCounter(tags).argMax(); } private List<String> getBaselineTagging(List<String> sentence) { List<String> tags = new ArrayList<String>(); for (String word : sentence) { String tag = getBestTag(word); tags.add(tag); } return tags; } private String getBestTag(String word) { double bestScore = Double.NEGATIVE_INFINITY; String bestTag = null; for (String tag : lexicon.getAllTags()) { double score = lexicon.scoreTagging(word, tag); if (bestTag == null || score > bestScore) { bestScore = score; bestTag = tag; } } return bestTag; } public BaselineParser(List<Tree<String>> trainTrees) { System.out.print("Annotating / binarizing training trees ... "); List<Tree<String>> annotatedTrainTrees = annotateTrees(trainTrees); System.out.println("done."); System.out.print("Building grammar ... "); Grammar grammar = new Grammar(annotatedTrainTrees); System.out.println("done. (" + grammar.getStates().size() + " states)"); UnaryClosure uc = new UnaryClosure(grammar); System.out.println(uc); System.out.print("Discarding grammar and setting up a baseline parser ... "); lexicon = new Lexicon(annotatedTrainTrees); knownParses = new CounterMap<List<String>, Tree<String>>(); spanToCategories = new CounterMap<Integer, String>(); for (Tree<String> trainTree : annotatedTrainTrees) { List<String> tags = trainTree.getPreTerminalYield(); knownParses.incrementCount(tags, trainTree, 1.0); tallySpans(trainTree, 0); } System.out.println("done."); } private List<Tree<String>> annotateTrees(List<Tree<String>> trees) { List<Tree<String>> annotatedTrees = new ArrayList<Tree<String>>(); for (Tree<String> tree : trees) { annotatedTrees.add(TreeAnnotations.annotateTree(tree)); } return annotatedTrees; } private int tallySpans(Tree<String> tree, int start) { if (tree.isLeaf() || tree.isPreTerminal()) return 1; int end = start; for (Tree<String> child : tree.getChildren()) { int childSpan = tallySpans(child, end); end += childSpan; } String category = tree.getLabel(); if (!category.equals("ROOT")) spanToCategories.incrementCount(end - start, category, 1.0); return end - start; } } static class CKYParser implements Parser { Map<Integer, CounterMap<Integer, String>> pi; Map<Integer, CounterMap<Integer, String>> piUnary; Map<Integer, CounterMap<Integer, String>> piBinary; Map<Integer, HashMap<Integer, HashMap<String, UnaryRule>>> bpPi; Map<Integer, HashMap<Integer, HashMap<String, UnaryRule>>> bpUnary; Map<Integer, HashMap<Integer, HashMap<String, BinaryRule>>> bpBinary; Lexicon lexicon; Grammar grammar; UnaryClosure uc; public Tree<String> getBestParse(List<String> sentence) { pi = new HashMap<>(); bpPi = new HashMap<>(); piUnary = new HashMap<>(); piBinary = new HashMap<>(); bpUnary = new HashMap<>(); bpBinary = new HashMap<>(); // Initialize PIs for (int i = 1; i < sentence.size() + 1; i++) { pi.put(i, new CounterMap<Integer, String>()); bpPi.put(i, new HashMap<Integer, HashMap<String, UnaryRule>>()); piBinary.put(i, new CounterMap<Integer, String>()); bpBinary.put(i, new HashMap<Integer, HashMap<String, BinaryRule>>()); // Step 1 // Since the grammar doesn't have terminals, initialize all non-terminals with 0 // but the tags found by the lexicon. //for (BinaryRule binaryRule : grammar.getBinaryRules()) //{ // pi.get(i).setCount(i, binaryRule.getParent(), 0); //} //for (UnaryRule unaryRule : grammar.getUnaryRules()) //{ // pi.get(i).setCount(i, unaryRule.getParent(), 0); //} for (String tag : lexicon.getAllTags()) { if (lexicon.scoreTagging(sentence.get(i - 1), tag) > 0) { pi.get(i).setCount(i, tag, lexicon.scoreTagging(sentence.get(i - 1), tag)); UnaryRule preTerminal = new UnaryRule(tag, sentence.get(i - 1)); if (!bpPi.get(i).containsKey(i)) { bpPi.get(i).put(i, new HashMap<String, UnaryRule>()); } bpPi.get(i).get(i).put(preTerminal.getParent(), preTerminal); } } // Step 2 // Initialize all non-terminals with 0 but the ones // that produce the tags. Since all of them are 0 in // the pi Map, the max are the ones that produce the tags. piUnary.put(i, new CounterMap<Integer, String>()); bpUnary.put(i, new HashMap<Integer, HashMap<String, UnaryRule>>()); //for (BinaryRule binaryRule : grammar.getBinaryRules()) //{ // piUnary.get(i).setCount(i, binaryRule.getParent(), 0); //} //for (UnaryRule unaryRule : grammar.getUnaryRules()) //{ // piUnary.get(i).setCount(i, unaryRule.getParent(), 0); //} for (String tag : lexicon.getAllTags()) { if (!pi.get(i).getCounter(i).containsKey(tag)) { continue; } for (UnaryRule unaryRuleClosed : uc.getClosedUnaryRulesByChild(tag)) { piUnary.get(i).setCount(i, unaryRuleClosed.getParent(), unaryRuleClosed.getScore() * pi.get(i).getCount(i, tag)); if (!bpUnary.get(i).containsKey(i)) { bpUnary.get(i).put(i, new HashMap<String, UnaryRule>()); } bpUnary.get(i).get(i).put(unaryRuleClosed.getParent(), unaryRuleClosed); } UnaryRule preTerminal = new UnaryRule(tag, sentence.get(i - 1)); preTerminal.setScore(pi.get(i).getCount(i, tag)); piUnary.get(i).setCount(i, preTerminal.getParent(), preTerminal.getScore()); if (!bpUnary.get(i).containsKey(i)) { bpUnary.get(i).put(i, new HashMap<String, UnaryRule>()); } bpUnary.get(i).get(i).put(preTerminal.getParent(), preTerminal); } } // Iterate through all phrase lengths for (int l = 1; l < sentence.size(); l++) { // Iterate through all phrases of length l for (int i = 1, j = i + l; i < sentence.size() + 1 - l; i++, j++) { // Step 1 (Binary) for (String parent : grammar.binaryRulesByParent.keySet()) { double maxScore = 0; BinaryRule maxArg = null; for (BinaryRule binaryRuleX : grammar.getBinaryRulesByParent(parent)) { for (int s = i; s < j; s++) { if (!piUnary.get(i).getCounter(s).containsKey(binaryRuleX.getLeftChild()) || !piUnary.get(s + 1).getCounter(j).containsKey(binaryRuleX.getRightChild())) { continue; } double score = binaryRuleX.getScore() * piUnary.get(i).getCount(s, binaryRuleX.getLeftChild()) * piUnary.get(s + 1).getCount(j, binaryRuleX.getRightChild()); if (score > maxScore) { maxScore = score; // Copy the rule to a new object so the backtrack indexes don't get overwritten. maxArg = new BinaryRule(binaryRuleX.getParent(), binaryRuleX.getLeftChild(), binaryRuleX.getRightChild()); maxArg.setScore(binaryRuleX.getScore()); maxArg.setLeftChildLeftIndex(i); maxArg.setLeftChildRightIndex(s); maxArg.setRightChildLeftIndex(s + 1); maxArg.setRightChildRightIndex(j); } } } if (maxArg != null) { piBinary.get(i).setCount(j, parent, maxScore); if (!bpBinary.get(i).containsKey(j)) { bpBinary.get(i).put(j, new HashMap<String, BinaryRule>()); } bpBinary.get(i).get(j).put(parent, maxArg); } } // Step 2: Unary for (String parent : uc.closedUnaryRulesByParent.keySet()) { double maxScore = 0; UnaryRule maxArg = null; for (UnaryRule unaryRuleClosure : uc.getClosedUnaryRulesByParent(parent)) { if (!piBinary.get(i).getCounter(j).containsKey(unaryRuleClosure.getChild())) { continue; } double score = unaryRuleClosure.getScore() * piBinary.get(i).getCount(j, unaryRuleClosure.getChild()); if (score > maxScore) { maxScore = score; maxArg = unaryRuleClosure; } } if (maxArg != null) { piUnary.get(i).setCount(j, parent, maxScore); if (!bpUnary.get(i).containsKey(j)) { bpUnary.get(i).put(j, new HashMap<String, UnaryRule>()); } bpUnary.get(i).get(j).put(parent, maxArg); } } } } // Build tree again ArrayList<Tree<String>> children = new ArrayList<>(); String root = ""; if (sentence.size() <= 3 && piUnary.get(1) != null && piUnary.get(1).getCounter(sentence.size()) != null) { root = piUnary.get(1).getCounter(sentence.size()).argMax(); } else { root = "S"; //root = "S=ROOT"; } //children.add(buildTree(bpPi, bpUnary, bpBinary, root, 1, sentence.size(), false)); children.add(buildTree(bpPi, bpUnary, bpBinary, root, 1, sentence.size(), false)); Tree<String> annotatedBestParse = new Tree<>("ROOT", children); return TreeAnnotations.unAnnotateTree(annotatedBestParse); } private Tree<String> buildTree( Map<Integer, HashMap<Integer, HashMap<String, UnaryRule>>> bpPi, Map<Integer, HashMap<Integer, HashMap<String, UnaryRule>>> bpUnary, Map<Integer, HashMap<Integer, HashMap<String, BinaryRule>>> bpBinary, String root, int leftIndex, int rightIndex, boolean binary ) { ArrayList<Tree<String>> children = new ArrayList<>(); if (leftIndex == rightIndex && bpPi.get(leftIndex).get(rightIndex).containsKey(root)) { UnaryRule argMax = bpPi.get(leftIndex).get(rightIndex).get(root); Tree<String> terminal = new Tree<>(argMax.getChild()); children.add(terminal); } else if (binary && leftIndex != rightIndex && (bpBinary.get(leftIndex) == null || bpBinary.get(leftIndex).get(rightIndex) == null)) { System.out.println("Invalid! Binary rule could not be found in [" + leftIndex + ", " + rightIndex + "]. Looking for " + root); } else if (binary && leftIndex != rightIndex && bpBinary.get(leftIndex).get(rightIndex).containsKey(root)) { BinaryRule argMax = bpBinary.get(leftIndex).get(rightIndex).get(root); children.add(buildTree(bpPi, bpUnary, bpBinary, argMax.getLeftChild(), argMax.getLeftChildLeftIndex(), argMax.getLeftChildRightIndex(), false)); children.add(buildTree(bpPi, bpUnary, bpBinary, argMax.getRightChild(), argMax.getRightChildLeftIndex(), argMax.getRightChildRightIndex(), false)); } else if (!binary && (bpUnary.get(leftIndex) == null || bpUnary.get(leftIndex).get(rightIndex) == null)) { System.out.println("Invalid! Unary rule could not be found in [" + leftIndex + ", " + rightIndex + "]. Looking for " + root); } else if (!binary && bpUnary.get(leftIndex).get(rightIndex).containsKey(root)) { UnaryRule argMax = bpUnary.get(leftIndex).get(rightIndex).get(root); children.add(buildTree(bpPi, bpUnary, bpBinary, argMax.getChild(), leftIndex, rightIndex, true)); } Tree<String> treeRoot = new Tree<>(root, children); return treeRoot; } public CKYParser(List<Tree<String>> trainTrees) { pi = new HashMap<>(); piUnary = new HashMap<>(); System.out.print("Annotating / binarizing training trees ... "); List<Tree<String>> annotatedTrainTrees = annotateTrees(trainTrees); System.out.println("done."); System.out.print("Building grammar ... "); grammar = new Grammar(annotatedTrainTrees); System.out.println("done. (" + grammar.getStates().size() + " states)"); System.out.println("Build unary closures ... "); uc = new UnaryClosure(grammar); System.out.println("done."); //System.out.println(uc); System.out.println("Training Lexicon ... "); lexicon = new Lexicon(annotatedTrainTrees); System.out.println("done."); } private List<Tree<String>> annotateTrees(List<Tree<String>> trees) { for (Tree<String> tree : trees) { // Replace in place to avoid running out of memory. Tree<String> annotatedTree = TreeAnnotations.annotateTree(tree); //Tree<String> annotatedTree = TreeAnnotationsVertical2Order.annotateTree(tree); //Tree<String> annotatedTree = TreeAnnotationsHorizontal1Order.annotateTree(tree); //Tree<String> annotatedTree = TreeAnnotationsHorizontal2Order.annotateTree(tree); //Tree<String> annotatedTree = TreeAnnotationsVertical2OrderHorizontal2Order.annotateTree(tree); tree.setLabel(annotatedTree.getLabel()); tree.setChildren(annotatedTree.getChildren()); } return trees; } } /** * Class which contains code for annotating and binarizing trees for the parser's use, and debinarizing and * unannotating them for scoring. */ static class TreeAnnotations { public static Tree<String> annotateTree(Tree<String> unAnnotatedTree) { // Currently, the only annotation done is a lossless binarization // TODO : change the annotation from a lossless binarization to a finite-order markov process (try at least 1st and 2nd order) // TODO : mark nodes with the label of their parent nodes, giving a second order vertical markov process return binarizeTree(unAnnotatedTree); } private static Tree<String> binarizeTree(Tree<String> tree) { String label = tree.getLabel(); if (tree.isLeaf()) return new Tree<String>(label); if (tree.getChildren().size() == 1) { return new Tree<String>(label, Collections.singletonList(binarizeTree(tree.getChildren().get(0)))); } // otherwise, it's a binary-or-more local tree, so decompose it into a sequence of binary and unary trees. String intermediateLabel = "@" + label + "->"; Tree<String> intermediateTree = binarizeTreeHelper(tree, 0, intermediateLabel); return new Tree<String>(label, intermediateTree.getChildren()); } private static Tree<String> binarizeTreeHelper(Tree<String> tree, int numChildrenGenerated, String intermediateLabel) { Tree<String> leftTree = tree.getChildren().get(numChildrenGenerated); List<Tree<String>> children = new ArrayList<Tree<String>>(); children.add(binarizeTree(leftTree)); if (numChildrenGenerated < tree.getChildren().size() - 1) { Tree<String> rightTree = binarizeTreeHelper(tree, numChildrenGenerated + 1, intermediateLabel + "_" + leftTree.getLabel()); children.add(rightTree); } return new Tree<String>(intermediateLabel, children); } public static Tree<String> unAnnotateTree(Tree<String> annotatedTree) { // Remove intermediate nodes (labels beginning with "@" // Remove all material on node labels which follow their base symbol (cuts at the leftmost -, ^, or : character) // Examples: a node with label @NP->DT_JJ will be spliced out, and a node with label NP^S will be reduced to NP Tree<String> debinarizedTree = Trees.spliceNodes(annotatedTree, new Filter<String>() { public boolean accept(String s) { return s.startsWith("@"); } }); Tree<String> unAnnotatedTree = (new Trees.FunctionNodeStripper()).transformTree(debinarizedTree); return unAnnotatedTree; } } static class TreeAnnotationsVertical2Order { public static Tree<String> annotateTree(Tree<String> unAnnotatedTree) { // Currently, the only annotation done is a lossless binarization // TODO : change the annotation from a lossless binarization to a finite-order markov process (try at least 1st and 2nd order) return binarizeTree(unAnnotatedTree, ""); } private static Tree<String> binarizeTree(Tree<String> tree, String parent) { if (tree.isLeaf()) return tree; String label = tree.getLabel() + "=" + parent; if (tree.getChildren().size() == 1) { return new Tree<>(label, Collections.singletonList(binarizeTree(tree.getChildren().get(0), tree.getLabel()))); } // otherwise, it's a binary-or-more local tree, so decompose it into a sequence of binary and unary trees. String intermediateLabel = "@" + tree.getLabel() + "->"; Tree<String> intermediateTree = binarizeTreeHelper(tree, 0, intermediateLabel, tree.getLabel(), tree.getLabel()); return new Tree<>(label, intermediateTree.getChildren()); } private static Tree<String> binarizeTreeHelper(Tree<String> tree, int numChildrenGenerated, String intermediateLabel, String leftParent, String rightParent) { Tree<String> leftTree = tree.getChildren().get(numChildrenGenerated); List<Tree<String>> children = new ArrayList<>(); children.add(binarizeTree(leftTree, leftParent)); if (numChildrenGenerated < tree.getChildren().size() - 1) { Tree<String> rightTree = binarizeTreeHelper(tree, numChildrenGenerated + 1, intermediateLabel + "_" + leftTree.getLabel(), intermediateLabel + "_" + leftTree.getLabel(), leftParent); children.add(rightTree); } return new Tree<>(intermediateLabel + "=" + rightParent, children); } public static Tree<String> unAnnotateTree(Tree<String> annotatedTree) { // Remove intermediate nodes (labels beginning with "@" // Remove all material on node labels which follow their base symbol (cuts at the leftmost -, ^, or : character) // Examples: a node with label @NP->DT_JJ will be spliced out, and a node with label NP^S will be reduced to NP Tree<String> debinarizedTree = Trees.spliceNodes(annotatedTree, new Filter<String>() { public boolean accept(String s) { return s.startsWith("@"); } }); Tree<String> unAnnotatedTree = (new Trees.FunctionNodeStripper()).transformTree(debinarizedTree); return unAnnotatedTree; } } static class TreeAnnotationsHorizontal1Order { public static Tree<String> annotateTree(Tree<String> unAnnotatedTree) { // Currently, the only annotation done is a lossless binarization // TODO : mark nodes with the label of their parent nodes, giving a second order vertical markov process return binarizeTree(unAnnotatedTree); } private static Tree<String> binarizeTree(Tree<String> tree) { String label = tree.getLabel(); if (tree.isLeaf()) return new Tree<>(label); if (tree.getChildren().size() == 1) { return new Tree<>(label, Collections.singletonList(binarizeTree(tree.getChildren().get(0)))); } // otherwise, it's a binary-or-more local tree, so decompose it into a sequence of binary and unary trees. String intermediateLabel = "@" + label + "->"; Tree<String> intermediateTree = binarizeTreeHelper(tree, 0, intermediateLabel, ""); return new Tree<>(label, intermediateTree.getChildren()); } private static Tree<String> binarizeTreeHelper(Tree<String> tree, int numChildrenGenerated, String intermediateLabel, String leftLabel) { Tree<String> leftTree = tree.getChildren().get(numChildrenGenerated); List<Tree<String>> children = new ArrayList<>(); children.add(binarizeTree(leftTree)); if (numChildrenGenerated < tree.getChildren().size() - 1) { Tree<String> rightTree = binarizeTreeHelper(tree, numChildrenGenerated + 1, intermediateLabel, leftTree.getLabel()); children.add(rightTree); } return new Tree<String>(intermediateLabel + "..._" + leftLabel, children); } public static Tree<String> unAnnotateTree(Tree<String> annotatedTree) { // Remove intermediate nodes (labels beginning with "@" // Remove all material on node labels which follow their base symbol (cuts at the leftmost -, ^, or : character) // Examples: a node with label @NP->DT_JJ will be spliced out, and a node with label NP^S will be reduced to NP Tree<String> debinarizedTree = Trees.spliceNodes(annotatedTree, new Filter<String>() { public boolean accept(String s) { return s.startsWith("@"); } }); Tree<String> unAnnotatedTree = (new Trees.FunctionNodeStripper()).transformTree(debinarizedTree); return unAnnotatedTree; } } static class TreeAnnotationsHorizontal2Order { public static Tree<String> annotateTree(Tree<String> unAnnotatedTree) { // Currently, the only annotation done is a lossless binarization // TODO : mark nodes with the label of their parent nodes, giving a second order vertical markov process return binarizeTree(unAnnotatedTree); } private static Tree<String> binarizeTree(Tree<String> tree) { String label = tree.getLabel(); if (tree.isLeaf()) return new Tree<>(label); if (tree.getChildren().size() == 1) { return new Tree<>(label, Collections.singletonList(binarizeTree(tree.getChildren().get(0)))); } // otherwise, it's a binary-or-more local tree, so decompose it into a sequence of binary and unary trees. String intermediateLabel = "@" + label + "->"; Tree<String> intermediateTree = binarizeTreeHelper(tree, 0, intermediateLabel, "", ""); return new Tree<>(label, intermediateTree.getChildren()); } private static Tree<String> binarizeTreeHelper(Tree<String> tree, int numChildrenGenerated, String intermediateLabel, String leftLabel, String leftLeftLabel) { Tree<String> leftTree = tree.getChildren().get(numChildrenGenerated); List<Tree<String>> children = new ArrayList<>(); children.add(binarizeTree(leftTree)); if (numChildrenGenerated < tree.getChildren().size() - 1) { Tree<String> rightTree = binarizeTreeHelper(tree, numChildrenGenerated + 1, intermediateLabel, leftLeftLabel, leftTree.getLabel()); children.add(rightTree); } String newLabel = intermediateLabel + "..."; newLabel += leftLabel.equals("") ? "" : "_" + leftLabel; return new Tree<>(newLabel + "_" + leftLeftLabel, children); } public static Tree<String> unAnnotateTree(Tree<String> annotatedTree) { // Remove intermediate nodes (labels beginning with "@" // Remove all material on node labels which follow their base symbol (cuts at the leftmost -, ^, or : character) // Examples: a node with label @NP->DT_JJ will be spliced out, and a node with label NP^S will be reduced to NP Tree<String> debinarizedTree = Trees.spliceNodes(annotatedTree, new Filter<String>() { public boolean accept(String s) { return s.startsWith("@"); } }); Tree<String> unAnnotatedTree = (new Trees.FunctionNodeStripper()).transformTree(debinarizedTree); return unAnnotatedTree; } } static class TreeAnnotationsVertical2OrderHorizontal2Order { public static Tree<String> annotateTree(Tree<String> unAnnotatedTree) { // Currently, the only annotation done is a lossless binarization // TODO : mark nodes with the label of their parent nodes, giving a second order vertical markov process return binarizeTree(unAnnotatedTree, ""); } private static Tree<String> binarizeTree(Tree<String> tree, String parent) { if (tree.isLeaf()) return tree; String label = tree.getLabel() + "=" + parent; if (tree.getChildren().size() == 1) { return new Tree<>(label, Collections.singletonList(binarizeTree(tree.getChildren().get(0), tree.getLabel()))); } // otherwise, it's a binary-or-more local tree, so decompose it into a sequence of binary and unary trees. String intermediateLabel = "@" + tree.getLabel() + "->"; Tree<String> intermediateTree = binarizeTreeHelper(tree, 0, intermediateLabel, "", "", tree.getLabel(), tree.getLabel()); return new Tree<>(label, intermediateTree.getChildren()); } private static Tree<String> binarizeTreeHelper(Tree<String> tree, int numChildrenGenerated, String intermediateLabel, String leftLabel, String leftLeftLabel, String leftParent, String rightParent) { Tree<String> leftTree = tree.getChildren().get(numChildrenGenerated); List<Tree<String>> children = new ArrayList<>(); children.add(binarizeTree(leftTree, leftParent)); String newLabel = intermediateLabel + "..."; newLabel += leftLeftLabel.equals("") ? leftLeftLabel : "_" + leftLeftLabel; newLabel += "_" + leftTree.getLabel(); if (numChildrenGenerated < tree.getChildren().size() - 1) { Tree<String> rightTree = binarizeTreeHelper(tree, numChildrenGenerated + 1, intermediateLabel, leftLeftLabel, leftTree.getLabel(), newLabel, leftParent); children.add(rightTree); } newLabel = intermediateLabel + "..."; newLabel += leftLabel.equals("") ? "" : "_" + leftLabel; newLabel += "_" + leftLeftLabel; return new Tree<>(newLabel + "=" + rightParent, children); } public static Tree<String> unAnnotateTree(Tree<String> annotatedTree) { // Remove intermediate nodes (labels beginning with "@" // Remove all material on node labels which follow their base symbol (cuts at the leftmost -, ^, or : character) // Examples: a node with label @NP->DT_JJ will be spliced out, and a node with label NP^S will be reduced to NP Tree<String> debinarizedTree = Trees.spliceNodes(annotatedTree, new Filter<String>() { public boolean accept(String s) { return s.startsWith("@"); } }); Tree<String> unAnnotatedTree = (new Trees.FunctionNodeStripper()).transformTree(debinarizedTree); return unAnnotatedTree; } } /** * Simple default implementation of a lexicon, which scores word, tag pairs with a smoothed estimate of * P(tag|word)/P(tag). */ static class Lexicon { CounterMap<String, String> wordToTagCounters = new CounterMap<String, String>(); double totalTokens = 0.0; double totalWordTypes = 0.0; Counter<String> tagCounter = new Counter<String>(); Counter<String> wordCounter = new Counter<String>(); Counter<String> typeTagCounter = new Counter<String>(); public Set<String> getAllTags() { return tagCounter.keySet(); } public boolean isKnown(String word) { return wordCounter.keySet().contains(word); } public double scoreTagging(String word, String tag) { double p_tag = tagCounter.getCount(tag) / totalTokens; double c_word = wordCounter.getCount(word); double c_tag_and_word = wordToTagCounters.getCount(word, tag); if (c_word < 10) { // rare or unknown c_word += 1.0; c_tag_and_word += typeTagCounter.getCount(tag) / totalWordTypes; } double p_word = (1.0 + c_word) / (totalTokens + 1.0); double p_tag_given_word = c_tag_and_word / c_word; return p_tag_given_word / p_tag * p_word; } public Lexicon(List<Tree<String>> trainTrees) { for (Tree<String> trainTree : trainTrees) { List<String> words = trainTree.getYield(); List<String> tags = trainTree.getPreTerminalYield(); for (int position = 0; position < words.size(); position++) { String word = words.get(position); String tag = tags.get(position); tallyTagging(word, tag); } } } private void tallyTagging(String word, String tag) { if (!isKnown(word)) { totalWordTypes += 1.0; typeTagCounter.incrementCount(tag, 1.0); } totalTokens += 1.0; tagCounter.incrementCount(tag, 1.0); wordCounter.incrementCount(word, 1.0); wordToTagCounters.incrementCount(word, tag, 1.0); } } /** * Simple implementation of a PCFG grammar, offering the ability to look up rules by their child symbols. Rule * probability estimates are just relative frequency estimates off of training trees. */ static class Grammar { Map<String, List<BinaryRule>> binaryRulesByLeftChild = new HashMap<String, List<BinaryRule>>(); Map<String, List<BinaryRule>> binaryRulesByRightChild = new HashMap<String, List<BinaryRule>>(); Map<String, List<BinaryRule>> binaryRulesByParent = new HashMap<String, List<BinaryRule>>(); List<BinaryRule> binaryRules = new ArrayList<BinaryRule>(); Map<String, List<UnaryRule>> unaryRulesByChild = new HashMap<String, List<UnaryRule>>(); Map<String, List<UnaryRule>> unaryRulesByParent = new HashMap<String, List<UnaryRule>>(); List<UnaryRule> unaryRules = new ArrayList<UnaryRule>(); Set<String> states = new HashSet<String>(); public List<BinaryRule> getBinaryRulesByLeftChild(String leftChild) { return CollectionUtils.getValueList(binaryRulesByLeftChild, leftChild); } public List<BinaryRule> getBinaryRulesByRightChild(String rightChild) { return CollectionUtils.getValueList(binaryRulesByRightChild, rightChild); } public List<BinaryRule> getBinaryRulesByParent(String parent) { return CollectionUtils.getValueList(binaryRulesByParent, parent); } public List<BinaryRule> getBinaryRules() { return binaryRules; } public List<UnaryRule> getUnaryRulesByChild(String child) { return CollectionUtils.getValueList(unaryRulesByChild, child); } public List<UnaryRule> getUnaryRulesByParent(String parent) { return CollectionUtils.getValueList(unaryRulesByParent, parent); } public List<UnaryRule> getUnaryRules() { return unaryRules; } public Set<String> getStates() { return states; } public String toString() { StringBuilder sb = new StringBuilder(); List<String> ruleStrings = new ArrayList<String>(); for (String parent : binaryRulesByParent.keySet()) { for (BinaryRule binaryRule : getBinaryRulesByParent(parent)) { ruleStrings.add(binaryRule.toString()); } } for (String parent : unaryRulesByParent.keySet()) { for (UnaryRule unaryRule : getUnaryRulesByParent(parent)) { ruleStrings.add(unaryRule.toString()); } } for (String ruleString : CollectionUtils.sort(ruleStrings)) { sb.append(ruleString); sb.append("\n"); } return sb.toString(); } private void addBinary(BinaryRule binaryRule) { states.add(binaryRule.getParent()); states.add(binaryRule.getLeftChild()); states.add(binaryRule.getRightChild()); binaryRules.add(binaryRule); CollectionUtils.addToValueList(binaryRulesByParent, binaryRule.getParent(), binaryRule); CollectionUtils.addToValueList(binaryRulesByLeftChild, binaryRule.getLeftChild(), binaryRule); CollectionUtils.addToValueList(binaryRulesByRightChild, binaryRule.getRightChild(), binaryRule); } private void addUnary(UnaryRule unaryRule) { states.add(unaryRule.getParent()); states.add(unaryRule.getChild()); unaryRules.add(unaryRule); CollectionUtils.addToValueList(unaryRulesByParent, unaryRule.getParent(), unaryRule); CollectionUtils.addToValueList(unaryRulesByChild, unaryRule.getChild(), unaryRule); } public Grammar(List<Tree<String>> trainTrees) { Counter<UnaryRule> unaryRuleCounter = new Counter<UnaryRule>(); Counter<BinaryRule> binaryRuleCounter = new Counter<BinaryRule>(); Counter<String> symbolCounter = new Counter<String>(); for (Tree<String> trainTree : trainTrees) { tallyTree(trainTree, symbolCounter, unaryRuleCounter, binaryRuleCounter); } for (UnaryRule unaryRule : unaryRuleCounter.keySet()) { double unaryProbability = unaryRuleCounter.getCount(unaryRule) / symbolCounter.getCount(unaryRule.getParent()); unaryRule.setScore(unaryProbability); addUnary(unaryRule); } for (BinaryRule binaryRule : binaryRuleCounter.keySet()) { double binaryProbability = binaryRuleCounter.getCount(binaryRule) / symbolCounter.getCount(binaryRule.getParent()); binaryRule.setScore(binaryProbability); addBinary(binaryRule); } } private void tallyTree(Tree<String> tree, Counter<String> symbolCounter, Counter<UnaryRule> unaryRuleCounter, Counter<BinaryRule> binaryRuleCounter) { if (tree.isLeaf()) return; if (tree.isPreTerminal()) return; if (tree.getChildren().size() == 1) { UnaryRule unaryRule = makeUnaryRule(tree); symbolCounter.incrementCount(tree.getLabel(), 1.0); unaryRuleCounter.incrementCount(unaryRule, 1.0); } if (tree.getChildren().size() == 2) { BinaryRule binaryRule = makeBinaryRule(tree); symbolCounter.incrementCount(tree.getLabel(), 1.0); binaryRuleCounter.incrementCount(binaryRule, 1.0); } if (tree.getChildren().size() < 1 || tree.getChildren().size() > 2) { throw new RuntimeException("Attempted to construct a Grammar with an illegal tree (unbinarized?): " + tree); } for (Tree<String> child : tree.getChildren()) { tallyTree(child, symbolCounter, unaryRuleCounter, binaryRuleCounter); } } private UnaryRule makeUnaryRule(Tree<String> tree) { return new UnaryRule(tree.getLabel(), tree.getChildren().get(0).getLabel()); } private BinaryRule makeBinaryRule(Tree<String> tree) { return new BinaryRule(tree.getLabel(), tree.getChildren().get(0).getLabel(), tree.getChildren().get(1).getLabel()); } } static class BinaryRule { String parent; String leftChild; String rightChild; double score; // Added: Let the Rule know about its position in the tree. int leftChildLeftIndex; int leftChildRightIndex; int rightChildLeftIndex; int rightChildRightIndex; public int getLeftChildLeftIndex() { return leftChildLeftIndex; } public void setLeftChildLeftIndex(int leftChildLeftIndex) { this.leftChildLeftIndex = leftChildLeftIndex; } public int getLeftChildRightIndex() { return leftChildRightIndex; } public void setLeftChildRightIndex(int leftChildRightIndex) { this.leftChildRightIndex = leftChildRightIndex; } public int getRightChildLeftIndex() { return rightChildLeftIndex; } public void setRightChildLeftIndex(int rightChildLeftIndex) { this.rightChildLeftIndex = rightChildLeftIndex; } public int getRightChildRightIndex() { return rightChildRightIndex; } public void setRightChildRightIndex(int rightChildRightIndex) { this.rightChildRightIndex = rightChildRightIndex; } // end Added. public String getParent() { return parent; } public String getLeftChild() { return leftChild; } public String getRightChild() { return rightChild; } public double getScore() { return score; } public void setScore(double score) { this.score = score; } public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof BinaryRule)) return false; final BinaryRule binaryRule = (BinaryRule) o; if (leftChild != null ? !leftChild.equals(binaryRule.leftChild) : binaryRule.leftChild != null) return false; if (parent != null ? !parent.equals(binaryRule.parent) : binaryRule.parent != null) return false; if (rightChild != null ? !rightChild.equals(binaryRule.rightChild) : binaryRule.rightChild != null) return false; return true; } public int hashCode() { int result; result = (parent != null ? parent.hashCode() : 0); result = 29 * result + (leftChild != null ? leftChild.hashCode() : 0); result = 29 * result + (rightChild != null ? rightChild.hashCode() : 0); return result; } public String toString() { return parent + " -> " + leftChild + " " + rightChild + " %% " + score; } public BinaryRule(String parent, String leftChild, String rightChild) { this.parent = parent; this.leftChild = leftChild; this.rightChild = rightChild; } } static class UnaryRule { String parent; String child; double score; public String getParent() { return parent; } public String getChild() { return child; } public double getScore() { return score; } public void setScore(double score) { this.score = score; } public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof UnaryRule)) return false; final UnaryRule unaryRule = (UnaryRule) o; if (child != null ? !child.equals(unaryRule.child) : unaryRule.child != null) return false; if (parent != null ? !parent.equals(unaryRule.parent) : unaryRule.parent != null) return false; return true; } public int hashCode() { int result; result = (parent != null ? parent.hashCode() : 0); result = 29 * result + (child != null ? child.hashCode() : 0); return result; } public String toString() { return parent + " -> " + child + " %% " + score; } public UnaryRule(String parent, String child) { this.parent = parent; this.child = child; } } /** * Calculates and provides accessors for the REFLEXIVE, TRANSITIVE closure of the unary rules in the provided Grammar. * Each rule in this closure stands for zero or more unary rules in the original grammar. Use the getPath() method to * retrieve the full sequence of symbols (from parent to child) which support that path. */ static class UnaryClosure { Map<String, List<UnaryRule>> closedUnaryRulesByChild = new HashMap<String, List<UnaryRule>>(); Map<String, List<UnaryRule>> closedUnaryRulesByParent = new HashMap<String, List<UnaryRule>>(); Map<UnaryRule, List<String>> pathMap = new HashMap<UnaryRule, List<String>>(); public List<UnaryRule> getClosedUnaryRulesByChild(String child) { return CollectionUtils.getValueList(closedUnaryRulesByChild, child); } public List<UnaryRule> getClosedUnaryRulesByParent(String parent) { return CollectionUtils.getValueList(closedUnaryRulesByParent, parent); } public List<String> getPath(UnaryRule unaryRule) { return pathMap.get(unaryRule); } public String toString() { StringBuilder sb = new StringBuilder(); for (String parent : closedUnaryRulesByParent.keySet()) { for (UnaryRule unaryRule : getClosedUnaryRulesByParent(parent)) { List<String> path = getPath(unaryRule); // if (path.size() == 2) continue; sb.append(unaryRule); sb.append(" "); sb.append(path); sb.append("\n"); } } return sb.toString(); } public UnaryClosure(Collection<UnaryRule> unaryRules) { Map<UnaryRule, List<String>> closureMap = computeUnaryClosure(unaryRules); for (UnaryRule unaryRule : closureMap.keySet()) { addUnary(unaryRule, closureMap.get(unaryRule)); } } public UnaryClosure(Grammar grammar) { this(grammar.getUnaryRules()); // This class won't add Reflexive rules to non-terminals that don't appear in unary rules // Add them manually. for (BinaryRule binaryRule : grammar.getBinaryRules()) { boolean insert = true; for (UnaryRule uRule : this.getClosedUnaryRulesByChild(binaryRule.getParent())) { if (uRule.getParent() == uRule.getChild()) { insert = false; break; } } if (insert) { UnaryRule reflexiveRule = new UnaryRule(binaryRule.getParent(), binaryRule.getParent()); reflexiveRule.setScore(1); ArrayList<String> path = new ArrayList<>(); path.add(binaryRule.getParent()); addUnary(reflexiveRule, path); } } } private void addUnary(UnaryRule unaryRule, List<String> path) { CollectionUtils.addToValueList(closedUnaryRulesByChild, unaryRule.getChild(), unaryRule); CollectionUtils.addToValueList(closedUnaryRulesByParent, unaryRule.getParent(), unaryRule); pathMap.put(unaryRule, path); } private static Map<UnaryRule, List<String>> computeUnaryClosure(Collection<UnaryRule> unaryRules) { Map<UnaryRule, String> intermediateStates = new HashMap<UnaryRule, String>(); Counter<UnaryRule> pathCosts = new Counter<UnaryRule>(); Map<String, List<UnaryRule>> closedUnaryRulesByChild = new HashMap<String, List<UnaryRule>>(); Map<String, List<UnaryRule>> closedUnaryRulesByParent = new HashMap<String, List<UnaryRule>>(); Set<String> states = new HashSet<String>(); for (UnaryRule unaryRule : unaryRules) { relax(pathCosts, intermediateStates, closedUnaryRulesByChild, closedUnaryRulesByParent, unaryRule, null, unaryRule.getScore()); states.add(unaryRule.getParent()); states.add(unaryRule.getChild()); } for (String intermediateState : states) { List<UnaryRule> incomingRules = closedUnaryRulesByChild.get(intermediateState); List<UnaryRule> outgoingRules = closedUnaryRulesByParent.get(intermediateState); if (incomingRules == null || outgoingRules == null) continue; for (UnaryRule incomingRule : incomingRules) { for (UnaryRule outgoingRule : outgoingRules) { UnaryRule rule = new UnaryRule(incomingRule.getParent(), outgoingRule.getChild()); double newScore = pathCosts.getCount(incomingRule) * pathCosts.getCount(outgoingRule); relax(pathCosts, intermediateStates, closedUnaryRulesByChild, closedUnaryRulesByParent, rule, intermediateState, newScore); } } } for (String state : states) { UnaryRule selfLoopRule = new UnaryRule(state, state); relax(pathCosts, intermediateStates, closedUnaryRulesByChild, closedUnaryRulesByParent, selfLoopRule, null, 1.0); } Map<UnaryRule, List<String>> closureMap = new HashMap<UnaryRule, List<String>>(); for (UnaryRule unaryRule : pathCosts.keySet()) { unaryRule.setScore(pathCosts.getCount(unaryRule)); List<String> path = extractPath(unaryRule, intermediateStates); closureMap.put(unaryRule, path); } System.out.println("SIZE: " + closureMap.keySet().size()); return closureMap; } private static List<String> extractPath(UnaryRule unaryRule, Map<UnaryRule, String> intermediateStates) { List<String> path = new ArrayList<String>(); path.add(unaryRule.getParent()); String intermediateState = intermediateStates.get(unaryRule); if (intermediateState != null) { List<String> parentPath = extractPath(new UnaryRule(unaryRule.getParent(), intermediateState), intermediateStates); for (int i = 1; i < parentPath.size() - 1; i++) { String state = parentPath.get(i); path.add(state); } path.add(intermediateState); List<String> childPath = extractPath(new UnaryRule(intermediateState, unaryRule.getChild()), intermediateStates); for (int i = 1; i < childPath.size() - 1; i++) { String state = childPath.get(i); path.add(state); } } if (path.size() == 1 && unaryRule.getParent().equals(unaryRule.getChild())) return path; path.add(unaryRule.getChild()); return path; } private static void relax(Counter<UnaryRule> pathCosts, Map<UnaryRule, String> intermediateStates, Map<String, List<UnaryRule>> closedUnaryRulesByChild, Map<String, List<UnaryRule>> closedUnaryRulesByParent, UnaryRule unaryRule, String intermediateState, double newScore) { if (intermediateState != null && (intermediateState.equals(unaryRule.getParent()) || intermediateState.equals(unaryRule.getChild()))) return; boolean isNewRule = !pathCosts.containsKey(unaryRule); double oldScore = (isNewRule ? Double.NEGATIVE_INFINITY : pathCosts.getCount(unaryRule)); if (oldScore > newScore) return; if (isNewRule) { CollectionUtils.addToValueList(closedUnaryRulesByChild, unaryRule.getChild(), unaryRule); CollectionUtils.addToValueList(closedUnaryRulesByParent, unaryRule.getParent(), unaryRule); } pathCosts.setCount(unaryRule, newScore); intermediateStates.put(unaryRule, intermediateState); } } public static void main(String[] args) { // Parse command line flags and arguments Map<String, String> argMap = CommandLineUtils.simpleCommandLineParser(args); // Set up default parameters and settings String basePath = "."; boolean verbose = true; String testMode = "test"; int maxTrainLength = 1000; int maxTestLength = 40; // Update defaults using command line specifications if (argMap.containsKey("-path")) { basePath = argMap.get("-path"); System.out.println("Using base path: " + basePath); } if (argMap.containsKey("-test")) { testMode = "test"; System.out.println("Testing on final test data."); } else { System.out.println("Testing on validation data."); } if (argMap.containsKey("-maxTrainLength")) { maxTrainLength = Integer.parseInt(argMap.get("-maxTrainLength")); } System.out.println("Maximum length for training sentences: " + maxTrainLength); if (argMap.containsKey("-maxTestLength")) { maxTestLength = Integer.parseInt(argMap.get("-maxTestLength")); } System.out.println("Maximum length for test sentences: " + maxTestLength); if (argMap.containsKey("-verbose")) { verbose = true; } if (argMap.containsKey("-quiet")) { verbose = false; } System.out.print("Loading training trees (sections 2-21) ... "); List<Tree<String>> trainTrees = readTrees(basePath, 200, 2199, maxTrainLength); System.out.println("done. (" + trainTrees.size() + " trees)"); List<Tree<String>> testTrees = null; if (testMode.equalsIgnoreCase("validate")) { System.out.print("Loading validation trees (section 22) ... "); testTrees = readTrees(basePath, 2200, 2299, maxTestLength); } else { System.out.print("Loading test trees (section 23) ... "); testTrees = readTrees(basePath, 2300, 2399, maxTestLength); } System.out.println("done. (" + testTrees.size() + " trees)"); // TODO : Build a better parser! //final Parser parser = new BaselineParser(trainTrees); Parser parser = new CKYParser(trainTrees); testParser(parser, testTrees, verbose); } private static void testParser(final Parser parser, List<Tree<String>> testTrees, final boolean verbose) { final EnglishPennTreebankParseEvaluator.LabeledConstituentEval<String> eval = new EnglishPennTreebankParseEvaluator.LabeledConstituentEval<String>(Collections.singleton("ROOT"), new HashSet<String>(Arrays.asList(new String[]{"''", "``", ".", ":", ","}))); // final Object evalLock = new Object(); // // ExecutorService exec = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()); // try { // for (final Tree<String> testTree : testTrees) { // exec.submit(new Runnable() { // @Override // public void run() { // List<String> testSentence = testTree.getYield(); // // long startTime = System.nanoTime(); // Tree<String> guessedTree = parser.getBestParse(testSentence); // long endTime = System.nanoTime() - startTime; // // synchronized (evalLock) { // if (verbose) { // System.out.println("Guess:\n" + Trees.PennTreeRenderer.render(guessedTree)); // System.out.println("Gold:\n" + Trees.PennTreeRenderer.render(testTree)); // } // // eval.evaluate(guessedTree, testTree); // System.out.println("Time: " + endTime / 1000000 + " ms"); // } // } // }); // } // } finally { // exec.shutdown(); // } // // try { // boolean finished = exec.awaitTermination(50000, TimeUnit.MINUTES); // } catch (InterruptedException e) { // System.out.println("Failed"); // } for (Tree<String> testTree : testTrees) { List<String> testSentence = testTree.getYield(); Tree<String> guessedTree = parser.getBestParse(testSentence); if (verbose) { System.out.println("Guess:\n" + Trees.PennTreeRenderer.render(guessedTree)); System.out.println("Gold:\n" + Trees.PennTreeRenderer.render(testTree)); } eval.evaluate(guessedTree, testTree); } eval.display(true); } private static List<Tree<String>> readTrees(String basePath, int low, int high, int maxLength) { Collection<Tree<String>> trees = PennTreebankReader.readTrees(basePath, low, high); // normalize trees Trees.TreeTransformer<String> treeTransformer = new Trees.StandardTreeNormalizer(); List<Tree<String>> normalizedTreeList = new ArrayList<Tree<String>>(); for (Tree<String> tree : trees) { Tree<String> normalizedTree = treeTransformer.transformTree(tree); if (normalizedTree.getYield().size() > maxLength) continue; // System.out.println(Trees.PennTreeRenderer.render(normalizedTree)); normalizedTreeList.add(normalizedTree); } return normalizedTreeList; } }
package org.apache.maven.plugin.descriptor; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.apache.maven.plugin.Mojo; import org.codehaus.plexus.component.repository.ComponentDescriptor; import org.codehaus.plexus.configuration.PlexusConfiguration; import org.codehaus.plexus.configuration.xml.XmlPlexusConfiguration; /** * The bean containing the Mojo descriptor. * <br/> * For more information about the usage tag, have a look to: * <a href="http://maven.apache.org/developers/mojo-api-specification.html"> * http://maven.apache.org/developers/mojo-api-specification.html</a> * * @todo is there a need for the delegation of MavenMojoDescriptor to this? * Why not just extend ComponentDescriptor here? */ public class MojoDescriptor extends ComponentDescriptor<Mojo> implements Cloneable { /** The Plexus component type */ public static final String MAVEN_PLUGIN = "maven-plugin"; /** "once-per-session" execution strategy */ public static final String SINGLE_PASS_EXEC_STRATEGY = "once-per-session"; /** "always" execution strategy */ public static final String MULTI_PASS_EXEC_STRATEGY = "always"; private static final String DEFAULT_INSTANTIATION_STRATEGY = "per-lookup"; private static final String DEFAULT_LANGUAGE = "java"; private List<Parameter> parameters; private Map<String, Parameter> parameterMap; /** By default, the execution strategy is "once-per-session" */ private String executionStrategy = SINGLE_PASS_EXEC_STRATEGY; /** * The goal name for the Mojo, that users will reference from the command line to execute the Mojo directly, or * inside a POM in order to provide Mojo-specific configuration. */ private String goal; /** * Defines a default phase to bind a mojo execution to if the user does not explicitly set a phase in the POM. * <i>Note:</i> This will not automagically make a mojo run when the plugin declaration is added to the POM. It * merely enables the user to omit the <code>&lt;phase&gt;</code> element from the surrounding * <code>&lt;execution&gt;</code> element. */ private String phase; /** Specify the version when the Mojo was added to the API. Similar to Javadoc since. */ private String since; /** Reference the invocation phase of the Mojo. */ private String executePhase; /** Reference the invocation goal of the Mojo. */ private String executeGoal; /** Reference the invocation lifecycle of the Mojo. */ private String executeLifecycle; /** * Specify the version when the Mojo was deprecated to the API. Similar to Javadoc deprecated. This will trigger a * warning when a user tries to configure a parameter marked as deprecated. */ private String deprecated; /** * Flags this Mojo to run it in a multi module way, i.e. aggregate the build with the set of projects listed as * modules. By default, no need to aggregate the Maven project and its child modules */ private boolean aggregator = false; // ---------------------------------------------------------------------- // // ---------------------------------------------------------------------- /** Specify the required dependencies in a specified scope */ private String dependencyResolutionRequired = null; /** The scope of (transitive) dependencies that should be collected but not resolved. */ private String dependencyCollectionRequired; /** By default, the Mojo needs a Maven project to be executed */ private boolean projectRequired = true; /** By default, the Mojo is assumed to work offline as well */ private boolean onlineRequired = false; /** Plugin configuration */ private PlexusConfiguration mojoConfiguration; /** Plugin descriptor */ private PluginDescriptor pluginDescriptor; /** By default, the Mojo is inherited */ private boolean inheritedByDefault = true; /** By default, the Mojo cannot be invoked directly */ private boolean directInvocationOnly = false; /** By default, the Mojo don't need reports to run */ private boolean requiresReports = false; /** By default, mojos are not threadsafe */ private boolean threadSafe = false; /** * Default constructor. */ public MojoDescriptor() { setInstantiationStrategy( DEFAULT_INSTANTIATION_STRATEGY ); setComponentFactory( DEFAULT_LANGUAGE ); } // ---------------------------------------------------------------------- // // ---------------------------------------------------------------------- /** * @return the language of this Mojo, i.e. <code>java</code> */ public String getLanguage() { return getComponentFactory(); } /** * @param language the new language */ public void setLanguage( String language ) { setComponentFactory( language ); } /** * @return <code>true</code> if the Mojo is deprecated, <code>false</code> otherwise. */ public String getDeprecated() { return deprecated; } /** * @param deprecated <code>true</code> to deprecate the Mojo, <code>false</code> otherwise. */ public void setDeprecated( String deprecated ) { this.deprecated = deprecated; } /** * @return the list of parameters */ public List<Parameter> getParameters() { return parameters; } /** * @param parameters the new list of parameters * @throws DuplicateParameterException if any */ public void setParameters( List<Parameter> parameters ) throws DuplicateParameterException { for ( Parameter parameter : parameters ) { addParameter( parameter ); } } /** * @param parameter add a new parameter * @throws DuplicateParameterException if any */ public void addParameter( Parameter parameter ) throws DuplicateParameterException { if ( parameters != null && parameters.contains( parameter ) ) { throw new DuplicateParameterException( parameter.getName() + " has been declared multiple times in mojo with goal: " + getGoal() + " (implementation: " + getImplementation() + ")" ); } if ( parameters == null ) { parameters = new LinkedList<Parameter>(); } parameters.add( parameter ); } /** * @return the list parameters as a Map */ public Map<String, Parameter> getParameterMap() { if ( parameterMap == null ) { parameterMap = new HashMap<String, Parameter>(); if ( parameters != null ) { for ( Parameter pd : parameters ) { parameterMap.put( pd.getName(), pd ); } } } return parameterMap; } // ---------------------------------------------------------------------- // Dependency requirement // ---------------------------------------------------------------------- /** * @param requiresDependencyResolution the new required dependencies in a specified scope */ public void setDependencyResolutionRequired( String requiresDependencyResolution ) { this.dependencyResolutionRequired = requiresDependencyResolution; } public String getDependencyResolutionRequired() { return dependencyResolutionRequired; } /** * @return the required dependencies in a specified scope * @TODO the name is not intelligible */ @Deprecated public String isDependencyResolutionRequired() { return dependencyResolutionRequired; } public void setDependencyCollectionRequired( String requiresDependencyCollection ) { this.dependencyCollectionRequired = requiresDependencyCollection; } /** * Gets the scope of (transitive) dependencies that should be collected. Dependency collection refers to the process * of calculating the complete dependency tree in terms of artifact coordinates. In contrast to dependency * resolution, this does not include the download of the files for the dependency artifacts. It is meant for mojos * that only want to analyze the set of transitive dependencies, in particular during early lifecycle phases where * full dependency resolution might fail due to projects which haven't been built yet. * * @return The scope of (transitive) dependencies that should be collected or {@code null} if none. */ public String getDependencyCollectionRequired() { return dependencyCollectionRequired; } // ---------------------------------------------------------------------- // Project requirement // ---------------------------------------------------------------------- /** * @param requiresProject <code>true</code> if the Mojo needs a Maven project to be executed, <code>false</code> * otherwise. */ public void setProjectRequired( boolean requiresProject ) { this.projectRequired = requiresProject; } /** * @return <code>true</code> if the Mojo needs a Maven project to be executed, <code>false</code> otherwise. */ public boolean isProjectRequired() { return projectRequired; } // ---------------------------------------------------------------------- // Online vs. Offline requirement // ---------------------------------------------------------------------- /** * @param requiresOnline <code>true</code> if the Mojo is online, <code>false</code> otherwise. */ public void setOnlineRequired( boolean requiresOnline ) { this.onlineRequired = requiresOnline; } /** * @return <code>true</code> if the Mojo is online, <code>false</code> otherwise. */ // blech! this isn't even intelligible as a method name. provided for // consistency... public boolean isOnlineRequired() { return onlineRequired; } /** * @return <code>true</code> if the Mojo is online, <code>false</code> otherwise. */ // more english-friendly method...keep the code clean! :) public boolean requiresOnline() { return onlineRequired; } /** * @return the binded phase name of the Mojo */ public String getPhase() { return phase; } /** * @param phase the new binded phase name of the Mojo */ public void setPhase( String phase ) { this.phase = phase; } /** * @return the version when the Mojo was added to the API */ public String getSince() { return since; } /** * @param since the new version when the Mojo was added to the API */ public void setSince( String since ) { this.since = since; } /** * @return The goal name of the Mojo */ public String getGoal() { return goal; } /** * @param goal The new goal name of the Mojo */ public void setGoal( String goal ) { this.goal = goal; } /** * @return the invocation phase of the Mojo */ public String getExecutePhase() { return executePhase; } /** * @param executePhase the new invocation phase of the Mojo */ public void setExecutePhase( String executePhase ) { this.executePhase = executePhase; } /** * @return <code>true</code> if the Mojo uses <code>always</code> for the <code>executionStrategy</code> */ public boolean alwaysExecute() { return MULTI_PASS_EXEC_STRATEGY.equals( executionStrategy ); } /** * @return the execution strategy */ public String getExecutionStrategy() { return executionStrategy; } /** * @param executionStrategy the new execution strategy */ public void setExecutionStrategy( String executionStrategy ) { this.executionStrategy = executionStrategy; } /** * @return the mojo configuration */ public PlexusConfiguration getMojoConfiguration() { if ( mojoConfiguration == null ) { mojoConfiguration = new XmlPlexusConfiguration( "configuration" ); } return mojoConfiguration; } /** * @param mojoConfiguration a new mojo configuration */ public void setMojoConfiguration( PlexusConfiguration mojoConfiguration ) { this.mojoConfiguration = mojoConfiguration; } /** {@inheritDoc} */ public String getRole() { return Mojo.ROLE; } /** {@inheritDoc} */ public String getRoleHint() { return getId(); } /** * @return the id of the mojo, based on the goal name */ public String getId() { return getPluginDescriptor().getId() + ":" + getGoal(); } /** * @return the full goal name * @see PluginDescriptor#getGoalPrefix() * @see #getGoal() */ public String getFullGoalName() { return getPluginDescriptor().getGoalPrefix() + ":" + getGoal(); } /** {@inheritDoc} */ public String getComponentType() { return MAVEN_PLUGIN; } /** * @return the plugin descriptor */ public PluginDescriptor getPluginDescriptor() { return pluginDescriptor; } /** * @param pluginDescriptor the new plugin descriptor */ public void setPluginDescriptor( PluginDescriptor pluginDescriptor ) { this.pluginDescriptor = pluginDescriptor; } /** * @return <code>true</code> if the Mojo is herited, <code>false</code> otherwise. */ public boolean isInheritedByDefault() { return inheritedByDefault; } /** * @param inheritedByDefault <code>true</code> if the Mojo is herited, <code>false</code> otherwise. */ public void setInheritedByDefault( boolean inheritedByDefault ) { this.inheritedByDefault = inheritedByDefault; } /** {@inheritDoc} */ public boolean equals( Object object ) { if ( this == object ) { return true; } if ( object instanceof MojoDescriptor ) { MojoDescriptor other = (MojoDescriptor) object; if ( !compareObjects( getPluginDescriptor(), other.getPluginDescriptor() ) ) { return false; } if ( !compareObjects( getGoal(), other.getGoal() ) ) { return false; } return true; } return false; } private boolean compareObjects( Object first, Object second ) { if ( ( first == null && second != null ) || ( first != null && second == null ) ) { return false; } return first.equals( second ); } /** {@inheritDoc} */ public int hashCode() { int result = 1; String goal = getGoal(); if ( goal != null ) { result += goal.hashCode(); } PluginDescriptor pd = getPluginDescriptor(); if ( pd != null ) { result -= pd.hashCode(); } return result; } /** * @return the invocation lifecycle of the Mojo */ public String getExecuteLifecycle() { return executeLifecycle; } /** * @param executeLifecycle the new invocation lifecycle of the Mojo */ public void setExecuteLifecycle( String executeLifecycle ) { this.executeLifecycle = executeLifecycle; } /** * @param aggregator <code>true</code> if the Mojo uses the Maven project and its child modules, * <code>false</code> otherwise. */ public void setAggregator( boolean aggregator ) { this.aggregator = aggregator; } /** * @return <code>true</code> if the Mojo uses the Maven project and its child modules, * <code>false</code> otherwise. */ public boolean isAggregator() { return aggregator; } /** * @return <code>true</code> if the Mojo cannot be invoked directly, <code>false</code> otherwise. */ public boolean isDirectInvocationOnly() { return directInvocationOnly; } /** * @param directInvocationOnly <code>true</code> if the Mojo cannot be invoked directly, * <code>false</code> otherwise. */ public void setDirectInvocationOnly( boolean directInvocationOnly ) { this.directInvocationOnly = directInvocationOnly; } /** * @return <code>true</code> if the Mojo needs reports to run, <code>false</code> otherwise. */ public boolean isRequiresReports() { return requiresReports; } /** * @param requiresReports <code>true</code> if the Mojo needs reports to run, <code>false</code> otherwise. */ public void setRequiresReports( boolean requiresReports ) { this.requiresReports = requiresReports; } /** * @param executeGoal the new invocation goal of the Mojo */ public void setExecuteGoal( String executeGoal ) { this.executeGoal = executeGoal; } /** * @return the invocation goal of the Mojo */ public String getExecuteGoal() { return executeGoal; } /** * @return True if the <code>Mojo</code> is thread-safe and can be run safely in parallel */ public boolean isThreadSafe() { return threadSafe; } /** * @param threadSafe indicates that the mojo is thread-safe and can be run safely in parallel */ public void setThreadSafe( boolean threadSafe ) { this.threadSafe = threadSafe; } /** * @return {@code true} if this mojo forks either a goal or the lifecycle, {@code false} otherwise. */ public boolean isForking() { return ( getExecuteGoal() != null && getExecuteGoal().length() > 0 ) || ( getExecutePhase() != null && getExecutePhase().length() > 0 ); } /** * Creates a shallow copy of this mojo descriptor. */ @Override public MojoDescriptor clone() { try { return (MojoDescriptor) super.clone(); } catch ( CloneNotSupportedException e ) { throw new UnsupportedOperationException( e ); } } }
package org.zalando.nakadi.controller; import com.codahale.metrics.Counter; import com.codahale.metrics.MetricRegistry; import com.google.common.collect.ImmutableList; import org.hamcrest.MatcherAssert; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; import org.mockito.exceptions.base.MockitoException; import org.springframework.http.HttpStatus; import org.springframework.http.converter.StringHttpMessageConverter; import org.springframework.test.web.servlet.MockMvc; import org.springframework.web.servlet.mvc.method.annotation.StreamingResponseBody; import org.zalando.nakadi.cache.EventTypeCache; import org.zalando.nakadi.config.SecuritySettings; import org.zalando.nakadi.domain.CursorError; import org.zalando.nakadi.domain.EventType; import org.zalando.nakadi.domain.EventTypeBase; import org.zalando.nakadi.domain.NakadiCursor; import org.zalando.nakadi.domain.PartitionStatistics; import org.zalando.nakadi.domain.Timeline; import org.zalando.nakadi.domain.storage.Storage; import org.zalando.nakadi.exceptions.runtime.AccessDeniedException; import org.zalando.nakadi.exceptions.runtime.InvalidCursorException; import org.zalando.nakadi.exceptions.runtime.NoSuchEventTypeException; import org.zalando.nakadi.exceptions.runtime.ServiceTemporarilyUnavailableException; import org.zalando.nakadi.plugin.api.authz.AuthorizationService; import org.zalando.nakadi.repository.EventConsumer; import org.zalando.nakadi.repository.TopicRepository; import org.zalando.nakadi.repository.kafka.KafkaPartitionStatistics; import org.zalando.nakadi.security.Client; import org.zalando.nakadi.security.ClientResolver; import org.zalando.nakadi.security.FullAccessClient; import org.zalando.nakadi.security.NakadiClient; import org.zalando.nakadi.service.AdminService; import org.zalando.nakadi.service.AuthorizationValidator; import org.zalando.nakadi.service.EventStream; import org.zalando.nakadi.service.EventStreamChecks; import org.zalando.nakadi.service.EventStreamConfig; import org.zalando.nakadi.service.EventStreamFactory; import org.zalando.nakadi.service.EventTypeChangeListener; import org.zalando.nakadi.service.converter.CursorConverterImpl; import org.zalando.nakadi.service.timeline.TimelineService; import org.zalando.nakadi.util.ThreadUtils; import org.zalando.problem.Problem; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.ByteArrayOutputStream; import java.io.Closeable; import java.io.IOException; import java.io.OutputStream; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.Collections; import java.util.Date; import java.util.LinkedList; import java.util.List; import java.util.Optional; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import static org.springframework.test.web.servlet.setup.MockMvcBuilders.standaloneSetup; import static org.zalando.nakadi.config.SecuritySettings.AuthMode.OFF; import static org.zalando.nakadi.metrics.MetricUtils.metricNameFor; import static org.zalando.nakadi.utils.TestUtils.JACKSON_2_HTTP_MESSAGE_CONVERTER; import static org.zalando.nakadi.utils.TestUtils.JSON_TEST_HELPER; import static org.zalando.nakadi.utils.TestUtils.OBJECT_MAPPER; import static org.zalando.nakadi.utils.TestUtils.buildDefaultEventType; import static org.zalando.nakadi.utils.TestUtils.buildTimeline; import static org.zalando.nakadi.utils.TestUtils.mockAccessDeniedException; import static org.zalando.nakadi.utils.TestUtils.waitFor; import static org.zalando.problem.Status.BAD_REQUEST; import static org.zalando.problem.Status.FORBIDDEN; import static org.zalando.problem.Status.INTERNAL_SERVER_ERROR; import static org.zalando.problem.Status.NOT_FOUND; import static org.zalando.problem.Status.PRECONDITION_FAILED; import static org.zalando.problem.Status.SERVICE_UNAVAILABLE; import static org.zalando.problem.Status.UNPROCESSABLE_ENTITY; public class EventStreamControllerTest { private static final String TEST_EVENT_TYPE_NAME = "test"; private static final String TEST_TOPIC = "test-topic"; private static final EventType EVENT_TYPE = buildDefaultEventType(); private static final String CLIENT_ID = "clientId"; private static final Client FULL_ACCESS_CLIENT = new FullAccessClient(CLIENT_ID); private static final String KAFKA_CLIENT_ID = CLIENT_ID + "-" + TEST_EVENT_TYPE_NAME; private HttpServletRequest requestMock; private HttpServletResponse responseMock; private TopicRepository topicRepositoryMock; private EventStreamFactory eventStreamFactoryMock; private EventStreamController controller; private MetricRegistry metricRegistry; private MetricRegistry streamMetrics; private SecuritySettings settings; private EventStreamChecks eventStreamChecks; private EventTypeCache eventTypeCache; private TimelineService timelineService; private MockMvc mockMvc; private Timeline timeline; private AuthorizationValidator authorizationValidator; private EventTypeChangeListener eventTypeChangeListener; private AdminService adminService; private AuthorizationService authorizationService; @Before public void setup() throws UnknownHostException, InvalidCursorException { EVENT_TYPE.setName(TEST_EVENT_TYPE_NAME); timeline = buildTimeline(TEST_EVENT_TYPE_NAME, TEST_TOPIC, new Date()); topicRepositoryMock = mock(TopicRepository.class); adminService = mock(AdminService.class); authorizationService = mock(AuthorizationService.class); when(authorizationService.getSubject()).thenReturn(Optional.empty()); when(topicRepositoryMock.topicExists(TEST_TOPIC)).thenReturn(true); eventStreamFactoryMock = mock(EventStreamFactory.class); eventTypeCache = mock(EventTypeCache.class); requestMock = mock(HttpServletRequest.class); when(requestMock.getRemoteAddr()).thenReturn(InetAddress.getLoopbackAddress().getHostAddress()); when(requestMock.getRemotePort()).thenReturn(12345); responseMock = mock(HttpServletResponse.class); metricRegistry = new MetricRegistry(); streamMetrics = new MetricRegistry(); final EventConsumer.LowLevelConsumer eventConsumerMock = mock(EventConsumer.LowLevelConsumer.class); when(topicRepositoryMock.createEventConsumer( eq(KAFKA_CLIENT_ID), any(), any())) .thenReturn(eventConsumerMock); eventStreamChecks = Mockito.mock(EventStreamChecks.class); Mockito.when(eventStreamChecks.isConsumptionBlocked(any(), any())).thenReturn(false); timelineService = mock(TimelineService.class); when(timelineService.getTopicRepository((Timeline) any())).thenReturn(topicRepositoryMock); when(timelineService.getTopicRepository((EventTypeBase) any())).thenReturn(topicRepositoryMock); when(timelineService.getTopicRepository((Storage) any())).thenReturn(topicRepositoryMock); when(timelineService.getActiveTimelinesOrdered(any())).thenReturn(Collections.singletonList(timeline)); when(timelineService.getAllTimelinesOrdered(any())).thenReturn(Collections.singletonList(timeline)); authorizationValidator = mock(AuthorizationValidator.class); eventTypeChangeListener = mock(EventTypeChangeListener.class); when(eventTypeChangeListener.registerListener(any(), any())).thenReturn(mock(Closeable.class)); controller = new EventStreamController( eventTypeCache, timelineService, OBJECT_MAPPER, eventStreamFactoryMock, metricRegistry, streamMetrics, eventStreamChecks, new CursorConverterImpl(eventTypeCache, timelineService), authorizationValidator, eventTypeChangeListener, null); settings = mock(SecuritySettings.class); when(settings.getAuthMode()).thenReturn(OFF); when(settings.getAdminClientId()).thenReturn("org/zalando/nakadi"); mockMvc = standaloneSetup(controller) .setMessageConverters(new StringHttpMessageConverter(), JACKSON_2_HTTP_MESSAGE_CONVERTER) .setCustomArgumentResolvers(new ClientResolver(settings, authorizationService)) .build(); } @Test public void testCursorsForNulls() throws Exception { when(eventTypeCache.getEventType(TEST_EVENT_TYPE_NAME)).thenReturn(EVENT_TYPE); MatcherAssert.assertThat( responseToString(createStreamingResponseBody("[{\"partition\":null,\"offset\":\"0\"}]")), JSON_TEST_HELPER.matchesObject( Problem.valueOf(PRECONDITION_FAILED, "partition must not be null"))); MatcherAssert.assertThat( responseToString(createStreamingResponseBody("[{\"partition\":\"0\",\"offset\":null}]")), JSON_TEST_HELPER.matchesObject( Problem.valueOf(PRECONDITION_FAILED, "offset must not be null"))); } @Test @SuppressWarnings("unchecked") public void whenNoParamsThenDefaultsAreUsed() throws Exception { final ArgumentCaptor<EventStreamConfig> configCaptor = ArgumentCaptor.forClass(EventStreamConfig.class); final EventConsumer.LowLevelConsumer eventConsumerMock = mock(EventConsumer.LowLevelConsumer.class); when(topicRepositoryMock.createEventConsumer( any(), any(), any())) .thenReturn(eventConsumerMock); final EventStream eventStreamMock = mock(EventStream.class); when(eventStreamFactoryMock.createEventStream(any(), any(), configCaptor.capture(), any())) .thenReturn(eventStreamMock); when(eventTypeCache.getEventType(TEST_EVENT_TYPE_NAME)).thenReturn(EVENT_TYPE); mockMvc.perform( get(String.format("/event-types/%s/events", TEST_EVENT_TYPE_NAME)) .header("X-nakadi-cursors", "[{\"partition\":\"0\",\"offset\":\"000000000000000000\"}]")) .andExpect(status().isOk()); // we have to retry here as mockMvc exits at the very beginning, before the body starts streaming waitFor(() -> { final EventStreamConfig actualConfig = configCaptor.getValue(); assertThat(actualConfig.getBatchLimit(), equalTo(1)); assertThat(actualConfig.getBatchTimeout(), equalTo(30)); assertThat(actualConfig.getCursors(), equalTo(ImmutableList.of(NakadiCursor.of(timeline, "0", "000000000000000000")))); assertThat(actualConfig.getStreamKeepAliveLimit(), equalTo(0)); assertThat(actualConfig.getStreamLimit(), equalTo(0)); assertThat(actualConfig.getStreamTimeout(), greaterThanOrEqualTo(EventStreamConfig.MAX_STREAM_TIMEOUT - 1200)); assertThat(actualConfig.getStreamTimeout(), lessThanOrEqualTo(EventStreamConfig.MAX_STREAM_TIMEOUT)); }, 2000, 50, MockitoException.class); } @Test public void whenTopicNotExistsThenTopicNotFound() throws IOException { when(eventTypeCache.getEventType(TEST_EVENT_TYPE_NAME)).thenThrow(NoSuchEventTypeException.class); final StreamingResponseBody responseBody = createStreamingResponseBody(); final Problem expectedProblem = Problem.valueOf(NOT_FOUND, "topic not found"); MatcherAssert.assertThat(responseToString(responseBody), JSON_TEST_HELPER.matchesObject(expectedProblem)); } @Test public void whenStreamLimitLowerThanBatchLimitThenUnprocessableEntity() throws IOException { when(eventTypeCache.getEventType(TEST_EVENT_TYPE_NAME)).thenReturn(EVENT_TYPE); final StreamingResponseBody responseBody = createStreamingResponseBody(20, 10, 0, 0, 0, null); final Problem expectedProblem = Problem.valueOf(UNPROCESSABLE_ENTITY, "stream_limit can't be lower than batch_limit"); MatcherAssert.assertThat(responseToString(responseBody), JSON_TEST_HELPER.matchesObject(expectedProblem)); } @Test public void whenStreamTimeoutLowerThanBatchTimeoutThenUnprocessableEntity() throws IOException { when(eventTypeCache.getEventType(TEST_EVENT_TYPE_NAME)).thenReturn(EVENT_TYPE); final StreamingResponseBody responseBody = createStreamingResponseBody(0, 0, 20, 10, 0, null); final Problem expectedProblem = Problem.valueOf(UNPROCESSABLE_ENTITY, "stream_timeout can't be lower than batch_flush_timeout"); MatcherAssert.assertThat(responseToString(responseBody), JSON_TEST_HELPER.matchesObject(expectedProblem)); } @Test public void whenBatchLimitLowerThan1ThenUnprocessableEntity() throws IOException { when(eventTypeCache.getEventType(TEST_EVENT_TYPE_NAME)).thenReturn(EVENT_TYPE); final StreamingResponseBody responseBody = createStreamingResponseBody(0, 0, 0, 0, 0, null); final Problem expectedProblem = Problem.valueOf(UNPROCESSABLE_ENTITY, "batch_limit can't be lower than 1"); MatcherAssert.assertThat(responseToString(responseBody), JSON_TEST_HELPER.matchesObject(expectedProblem)); } @Test public void whenWrongCursorsFormatThenBadRequest() throws IOException { when(eventTypeCache.getEventType(TEST_EVENT_TYPE_NAME)).thenReturn(EVENT_TYPE); final StreamingResponseBody responseBody = createStreamingResponseBody(0, 0, 0, 0, 0, "cursors_with_wrong_format"); final Problem expectedProblem = Problem.valueOf(BAD_REQUEST, "incorrect syntax of X-nakadi-cursors header"); MatcherAssert.assertThat(responseToString(responseBody), JSON_TEST_HELPER.matchesObject(expectedProblem)); } @Test public void whenInvalidCursorsThenPreconditionFailed() throws Exception { final NakadiCursor cursor = NakadiCursor.of(timeline, "0", "000000000000000000"); when(eventTypeCache.getEventType(TEST_EVENT_TYPE_NAME)).thenReturn(EVENT_TYPE); when(timelineService.createEventConsumer(eq(KAFKA_CLIENT_ID), any())) .thenThrow(new InvalidCursorException(CursorError.UNAVAILABLE, cursor)); final StreamingResponseBody responseBody = createStreamingResponseBody(1, 0, 0, 0, 0, "[{\"partition\":\"0\",\"offset\":\"00000000000000000\"}]"); final Problem expectedProblem = Problem.valueOf(PRECONDITION_FAILED, "offset 000000000000000000 for partition 0 event type " + TEST_EVENT_TYPE_NAME + " is unavailable as retention time of data elapsed. " + "PATCH partition offset with valid and available offset"); MatcherAssert.assertThat(responseToString(responseBody), JSON_TEST_HELPER.matchesObject(expectedProblem)); } @Test public void whenNoCursorsThenLatestOffsetsAreUsed() throws IOException, InvalidCursorException { when(eventTypeCache.getEventType(TEST_EVENT_TYPE_NAME)).thenReturn(EVENT_TYPE); final List<PartitionStatistics> tps2 = ImmutableList.of( new KafkaPartitionStatistics(timeline, 0, 0, 87), new KafkaPartitionStatistics(timeline, 1, 0, 34)); when(timelineService.getActiveTimeline(any(EventType.class))).thenReturn(timeline); when(topicRepositoryMock.loadTopicStatistics(eq(Collections.singletonList(timeline)))) .thenReturn(tps2); final ArgumentCaptor<EventStreamConfig> configCaptor = ArgumentCaptor.forClass(EventStreamConfig.class); final EventStream eventStreamMock = mock(EventStream.class); when(eventStreamFactoryMock.createEventStream(any(), any(), configCaptor.capture(), any())) .thenReturn(eventStreamMock); final StreamingResponseBody responseBody = createStreamingResponseBody(1, 0, 1, 1, 0, null); responseBody.writeTo(new ByteArrayOutputStream()); final EventStreamConfig streamConfig = configCaptor.getValue(); assertThat( streamConfig.getCursors(), equalTo(tps2.stream().map(PartitionStatistics::getLast).collect(Collectors.toList()))); } @Test public void whenNormalCaseThenParametersArePassedToConfigAndStreamStarted() throws Exception { final EventConsumer eventConsumerMock = mock(EventConsumer.class); when(eventTypeCache.getEventType(TEST_EVENT_TYPE_NAME)).thenReturn(EVENT_TYPE); when(timelineService.createEventConsumer( eq(KAFKA_CLIENT_ID), eq(ImmutableList.of(NakadiCursor.of(timeline, "0", "000000000000000000"))))) .thenReturn(eventConsumerMock); when(timelineService.getActiveTimeline(eq(EVENT_TYPE))).thenReturn(timeline); final ArgumentCaptor<Integer> statusCaptor = getStatusCaptor(); final ArgumentCaptor<String> contentTypeCaptor = getContentTypeCaptor(); final ArgumentCaptor<EventStreamConfig> configCaptor = ArgumentCaptor.forClass(EventStreamConfig.class); final EventStream eventStreamMock = mock(EventStream.class); when(eventStreamFactoryMock.createEventStream(any(), any(), configCaptor.capture(), any())) .thenReturn(eventStreamMock); final StreamingResponseBody responseBody = createStreamingResponseBody(1, 2, 3, 4, 5, "[{\"partition\":\"0\",\"offset\":\"000000000000000000\"}]"); final OutputStream outputStream = mock(OutputStream.class); responseBody.writeTo(outputStream); final EventStreamConfig streamConfig = configCaptor.getValue(); assertThat( streamConfig, equalTo(EventStreamConfig .builder() .withCursors(ImmutableList.of( NakadiCursor.of(timeline, "0", "000000000000000000"))) .withBatchLimit(1) .withStreamLimit(2) .withBatchTimeout(3) .withStreamTimeout(4) .withStreamKeepAliveLimit(5) .build() )); assertThat(statusCaptor.getValue(), equalTo(HttpStatus.OK.value())); assertThat(contentTypeCaptor.getValue(), equalTo("application/x-json-stream")); verify(timelineService, times(1)).createEventConsumer(eq(KAFKA_CLIENT_ID), eq(ImmutableList.of(NakadiCursor.of(timeline, "0", "000000000000000000")))); verify(eventStreamFactoryMock, times(1)).createEventStream(eq(outputStream), eq(eventConsumerMock), eq(streamConfig), any()); verify(eventStreamMock, times(1)).streamEvents(any()); verify(outputStream, times(2)).flush(); verify(outputStream, times(1)).close(); } @Test public void whenNakadiExceptionIsThrownThenServiceUnavailable() throws IOException { when(eventTypeCache.getEventType(TEST_EVENT_TYPE_NAME)) .thenThrow(ServiceTemporarilyUnavailableException.class); final StreamingResponseBody responseBody = createStreamingResponseBody(); final Problem expectedProblem = Problem.valueOf(SERVICE_UNAVAILABLE); MatcherAssert.assertThat(responseToString(responseBody), JSON_TEST_HELPER.matchesObject(expectedProblem)); } @Test public void whenExceptionIsThrownThenInternalServerError() throws IOException { when(eventTypeCache.getEventType(TEST_EVENT_TYPE_NAME)).thenThrow(NullPointerException.class); final StreamingResponseBody responseBody = createStreamingResponseBody(); final Problem expectedProblem = Problem.valueOf(INTERNAL_SERVER_ERROR); MatcherAssert.assertThat(responseToString(responseBody), JSON_TEST_HELPER.matchesObject(expectedProblem)); } @Test public void reportCurrentNumberOfConsumers() throws Exception { when(eventTypeCache.getEventType(TEST_EVENT_TYPE_NAME)).thenReturn(EVENT_TYPE); final EventStream eventStream = mock(EventStream.class); // block to simulate the streaming until thread is interrupted Mockito.doAnswer(invocation -> { while (!Thread.interrupted()) { ThreadUtils.sleep(100); } return null; }).when(eventStream).streamEvents(any()); when(eventStreamFactoryMock.createEventStream(any(), any(), any(), any())).thenReturn(eventStream); // "connect" to the server final StreamingResponseBody responseBody = createStreamingResponseBody(); final LinkedList<Thread> clients = new LinkedList<>(); final Counter counter = metricRegistry.counter(metricNameFor(TEST_EVENT_TYPE_NAME, EventStreamController.CONSUMERS_COUNT_METRIC_NAME)); // create clients... for (int i = 0; i < 3; i++) { final Thread client = new Thread(() -> { try { responseBody.writeTo(new ByteArrayOutputStream()); } catch (final IOException e) { throw new RuntimeException(e); } }); client.start(); clients.add(client); waitFor( () -> assertThat(counter.getCount(), equalTo((long) clients.size())), TimeUnit.SECONDS.toMillis(5) ); } // ...and disconnect them one by one while (!clients.isEmpty()) { final Thread client = clients.pop(); client.interrupt(); client.join(); assertThat(counter.getCount(), equalTo((long) clients.size())); } } @Test public void testRead() throws Exception { prepareScopeRead(); final ArgumentCaptor<Integer> statusCaptor = getStatusCaptor(); final ArgumentCaptor<String> contentTypeCaptor = getContentTypeCaptor(); when(eventStreamFactoryMock.createEventStream(any(), any(), any(), any())) .thenReturn(mock(EventStream.class)); writeStream(); assertThat(statusCaptor.getValue(), equalTo(HttpStatus.OK.value())); assertThat(contentTypeCaptor.getValue(), equalTo("application/x-json-stream")); } @Test public void testAccessDenied() throws Exception { Mockito.doThrow(AccessDeniedException.class).when(authorizationValidator) .authorizeStreamRead(any()); when(eventTypeCache.getEventType(TEST_EVENT_TYPE_NAME)).thenReturn(EVENT_TYPE); Mockito.doThrow(mockAccessDeniedException()).when(authorizationValidator).authorizeStreamRead(any()); final StreamingResponseBody responseBody = createStreamingResponseBody(0, 0, 0, 0, 0, null); final Problem expectedProblem = Problem.valueOf(FORBIDDEN, "Access on READ some-type:some-name denied"); MatcherAssert.assertThat(responseToString(responseBody), JSON_TEST_HELPER.matchesObject(expectedProblem)); } @Test public void testAccessAllowedForAllDataAccess() throws Exception { doNothing().when(authorizationValidator).authorizeStreamRead(any()); prepareScopeRead(); final ArgumentCaptor<Integer> statusCaptor = getStatusCaptor(); final ArgumentCaptor<String> contentTypeCaptor = getContentTypeCaptor(); when(eventStreamFactoryMock.createEventStream(any(), any(), any(), any())) .thenReturn(mock(EventStream.class)); writeStream(); assertThat(statusCaptor.getValue(), equalTo(HttpStatus.OK.value())); assertThat(contentTypeCaptor.getValue(), equalTo("application/x-json-stream")); verify(authorizationValidator, times(1)).authorizeStreamRead(any()); } private void writeStream() throws Exception { final StreamingResponseBody responseBody = createStreamingResponseBody(new NakadiClient("clientId", "")); final OutputStream outputStream = mock(OutputStream.class); responseBody.writeTo(outputStream); } private ArgumentCaptor<String> getContentTypeCaptor() { final ArgumentCaptor<String> contentTypeCaptor = ArgumentCaptor.forClass(String.class); doNothing().when(responseMock).setContentType(contentTypeCaptor.capture()); return contentTypeCaptor; } private ArgumentCaptor<Integer> getStatusCaptor() { final ArgumentCaptor<Integer> statusCaptor = ArgumentCaptor.forClass(Integer.class); doNothing().when(responseMock).setStatus(statusCaptor.capture()); return statusCaptor; } private void prepareScopeRead() throws InvalidCursorException { final EventConsumer.LowLevelConsumer eventConsumerMock = mock(EventConsumer.LowLevelConsumer.class); when(eventTypeCache.getEventType(TEST_EVENT_TYPE_NAME)).thenReturn(EVENT_TYPE); when(topicRepositoryMock.createEventConsumer( eq(KAFKA_CLIENT_ID), eq(ImmutableList.of(NakadiCursor.of(timeline, "0", "0"))), any())) .thenReturn(eventConsumerMock); } protected String responseToString(final StreamingResponseBody responseBody) throws IOException { final ByteArrayOutputStream out = new ByteArrayOutputStream(); responseBody.writeTo(out); return out.toString(); } protected StreamingResponseBody createStreamingResponseBody() throws IOException { return controller.streamEvents(TEST_EVENT_TYPE_NAME, 1, 0, 0, 0, 0, null, requestMock, responseMock, FULL_ACCESS_CLIENT); } private StreamingResponseBody createStreamingResponseBody(final Client client) throws Exception { return controller.streamEvents( TEST_EVENT_TYPE_NAME, 1, 2, 3, 4, 5, "[{\"partition\":\"0\",\"offset\":\"000000000000000000\"}]", requestMock, responseMock, client); } private StreamingResponseBody createStreamingResponseBody(final String cursorsStr) throws Exception { return controller.streamEvents(TEST_EVENT_TYPE_NAME, 1, 2, 3, 4, 5, cursorsStr, requestMock, responseMock, FULL_ACCESS_CLIENT); } private StreamingResponseBody createStreamingResponseBody(final Integer batchLimit, final Integer streamLimit, final Integer batchTimeout, final Integer streamTimeout, final Integer streamKeepAliveLimit, final String cursorsStr) throws IOException { return controller.streamEvents(TEST_EVENT_TYPE_NAME, batchLimit, streamLimit, batchTimeout, streamTimeout, streamKeepAliveLimit, cursorsStr, requestMock, responseMock, FULL_ACCESS_CLIENT); } }
package com.laytonsmith.PureUtilities.VirtualFS; import com.laytonsmith.PureUtilities.Common.FileUtil; import com.laytonsmith.PureUtilities.Common.StreamUtils; import com.laytonsmith.PureUtilities.Common.StringUtils; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.EnumMap; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.SortedSet; import java.util.TreeSet; import java.util.logging.Level; import java.util.logging.Logger; import org.yaml.snakeyaml.DumperOptions; import org.yaml.snakeyaml.Yaml; /** * A collection of file system settings are tied to a glob, and * are passed in upon creation of the VirtualFileSystem. Any files * or folders that exist (or are attempted to be created) and match the glob * are first checked against these settings, upon which the request will either * be allowed, or be denied. * */ public class VirtualFileSystemSettings { /** * DO NOT CHANGE THIS. It will break future integrations if you do. * * This is the line that denotes the divider between the top and bottom parts * of a settings file. */ public static final String GENERATED_LINE = "This file is automatically generated, to keep up-to-date with new features.\n" + "# Comments you add to the file will not be retained.\n"; private static final String SETTING_TYPES; static{ List<String> list = new ArrayList<String>(); for(VirtualFileSystemSetting setting : VirtualFileSystemSetting.values()){ String s = "Setting name: " + setting.getName() + "\n" + "# Default value: " + setting.getDef().toString() + "\n" + "# Description: " + setting.getDescription() + "\n"; list.add(s); } SETTING_TYPES = StringUtils.Join(list, "\n# "); } public static String getDefaultSettingsString(){ return StreamUtils.GetString(VirtualFileSystemSettings.class.getResourceAsStream("example_settings.yml")) .replace("%%SETTING_TYPES%%", SETTING_TYPES) .replace("%%GENERATED_LINE%%", GENERATED_LINE); } public static enum VirtualFileSystemSetting{ HIDDEN("hidden", false, "If true, the file system will not allow the file or directory to be created, and if a file or directory already exists, it will not" + " be exposed. This is essentially a way to revoke both read and write privileges."), QUOTA("quota", -1, "Sets the quota for the total list of files or folders that match this glob. Quotas for a cordoned off file system will only affect files" + " that are in the virtual file system, and file sizes of externally created files won't count, but in a uncordoned file system, all files that match" + " this glob are calculated. Due to real time changes in file system size, for directory based globs, this quota may not be enforced precisely, however," + " it should generally be close. If the quota is set to -1, the quota is unrestricted, and if 0, it is \"full\". The unit of measure is bytes, so 1024 is a KB." + " This value is only applicable to the glob **, meaning that the quota can only be applied per entire virtual file system."), READONLY("readonly", false, "If true, this file or folder will not be writable."), CORDONED_OFF("cordoned-off", false, "If true, files and folders in this directory will not appear to the virtual file system, unless the file was created from within" + " the virtual file system. This glob must be the ** glob, meaning that either the whole file system is cordoned off, or the whole file system is not cordoned" + " off."), FOLDER_DEPTH("folder-depth", -1, "The number of folders deep that will be allowed to be created in this directory. The glob must be a directory if this is anything" + " other than -1. -1 means that the number of sub folders is unrestricted, 0 means that no folders can be created inside this one. This does not" + " affect existing folder structure."), ; private String name; private Object def; private String description; private VirtualFileSystemSetting(String name, Object def, String description){ this.name = name; this.def = def; this.description = description; } public String getName(){ return name; } public Object getDef() { return def; } public String getDescription() { return description; } static VirtualFileSystemSetting getSettingByName(String name){ for(VirtualFileSystemSetting s : VirtualFileSystemSetting.values()){ if(s.getName().equals(name)){ return s; } } return null; } } public static String serialize(Map<VirtualGlob, SettingGroup> settings){ DumperOptions options = new DumperOptions(); options.setPrettyFlow(true); Yaml yaml = new Yaml(options); Map<String, Map<String, Object>> serializable = new HashMap<String, Map<String, Object>>(); for(VirtualGlob glob : settings.keySet()){ Map<String, Object> inner = new HashMap<String, Object>(); for(VirtualFileSystemSetting setting : settings.get(glob).settingGroup.keySet()){ inner.put(setting.getName(), settings.get(glob).get(setting)); } serializable.put(glob.toString(), inner); } return yaml.dump(serializable); } public static Map<VirtualGlob, SettingGroup> deserialize(String settings){ Yaml yaml = new Yaml(); Map<String, Map<String, Object>> unserialized = (Map)yaml.load(settings); Map<VirtualGlob, SettingGroup> parsedSettings = new HashMap<VirtualGlob, SettingGroup>(); if(unserialized != null){ for(String glob : unserialized.keySet()){ VirtualGlob vglob = new VirtualGlob(glob); Map<String, Object> settingGroup = (Map)unserialized.get(glob); SettingGroup group = new SettingGroup(); for(String settingName : settingGroup.keySet()){ VirtualFileSystemSetting s = VirtualFileSystemSetting.getSettingByName(settingName); Object value = settingGroup.get(settingName); group.set(s, value); } parsedSettings.put(vglob, group); } } return parsedSettings; } public static class SettingGroup{ private Map<VirtualFileSystemSetting, Object> settingGroup; public SettingGroup(){ this.settingGroup = new EnumMap<VirtualFileSystemSetting, Object>(VirtualFileSystemSetting.class); } public SettingGroup(Map<VirtualFileSystemSetting, Object> settingGroup){ this.settingGroup = settingGroup; } public void set(VirtualFileSystemSetting setting, Object value){ settingGroup.put(setting, value); } public Object get(VirtualFileSystemSetting setting){ if(settingGroup.containsKey(setting)){ return settingGroup.get(setting); } else { return setting.getDef(); } } @Override public String toString() { StringBuilder b = new StringBuilder(); for(VirtualFileSystemSetting s : settingGroup.keySet()){ b.append(s.getName()).append(": ").append(settingGroup.get(s)).append("; "); } return b.toString().trim(); } } private static final Map<VirtualFileSystemSetting, Object> META_DIRECTORY_SETTINGS = new EnumMap<VirtualFileSystemSetting, Object>(VirtualFileSystemSetting.class); static{ META_DIRECTORY_SETTINGS.put(VirtualFileSystemSetting.HIDDEN, true); } private Map<VirtualGlob, SettingGroup> settings; private boolean hasQuota = false; private boolean cordonedOff = false; public VirtualFileSystemSettings(File settingsFile) throws IOException{ this(settingsFile.exists()?FileUtil.read(settingsFile):""); //Here, we will also output the serialized file, with the comment //block at top FileUtil.write(getDefaultSettingsString() + serialize(settings), settingsFile); } public VirtualFileSystemSettings(String unparsedSettings){ settings = (HashMap<VirtualGlob, SettingGroup>) deserialize(unparsedSettings); } public VirtualFileSystemSettings(Map<VirtualGlob, SettingGroup> settings){ this.settings = new HashMap<VirtualGlob, VirtualFileSystemSettings.SettingGroup>(settings); this.settings.put(new VirtualGlob(VirtualFileSystem.META_DIRECTORY), new SettingGroup(META_DIRECTORY_SETTINGS)); for(VirtualGlob g : settings.keySet()){ SettingGroup s = settings.get(g); if(s.settingGroup.keySet().contains(VirtualFileSystemSetting.QUOTA)){ if((Integer)s.settingGroup.get(VirtualFileSystemSetting.QUOTA) >= 0){ if(g.matches(new VirtualFile("/"))){ hasQuota = true; } else { Logger.getLogger(VirtualFileSystemSettings.class.getName()).log(Level.WARNING, "The \"quota\" setting can only be applied to the root of the " + "file system at this time. The quota setting for " + g.toString() + " is being ignored."); } } } if(s.settingGroup.keySet().contains(VirtualFileSystemSetting.CORDONED_OFF)){ if((Boolean)s.settingGroup.get(VirtualFileSystemSetting.CORDONED_OFF) == true){ if(g.matches(new VirtualFile("/"))){ cordonedOff = true; } else { Logger.getLogger(VirtualFileSystemSettings.class.getName()).log(Level.WARNING, "The \"cordoned-off\" setting can only be applied to the root" + " of the file system at this time. The setting for " + g.toString() + " is being ignored."); } } } } } /** * Gets the most specific value for the specified setting, for the specified file. * File specificity will match whatever is closest, so if this matches both the globs: ** and file/**, * then the file/** glob settings will win. * @param file * @param setting * @return */ public Object getSetting(VirtualFile file, VirtualFileSystemSetting setting){ SortedSet<VirtualGlob> matchedGlobs = new TreeSet<VirtualGlob>(); for(VirtualGlob glob : settings.keySet()){ if(glob.matches(file)){ matchedGlobs.add(glob); } } if(matchedGlobs.isEmpty()){ //trivial state return setting.getDef(); } else if(matchedGlobs.size() == 1){ //trivial state return settings.get(matchedGlobs.first()).get(setting); } else { throw new UnsupportedOperationException("Not supported yet."); } } public boolean hasQuota() { return hasQuota; } public boolean isCordonedOff(){ return cordonedOff; } }
/* * This file is part of ThermalRecycling, licensed under the MIT License (MIT). * * Copyright (c) OreCruncher * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.blockartistry.mod.ThermalRecycling.support; import java.util.ArrayList; import java.util.List; import org.blockartistry.mod.ThermalRecycling.ItemManager; import org.blockartistry.mod.ThermalRecycling.ModLog; import org.blockartistry.mod.ThermalRecycling.ModOptions; import org.blockartistry.mod.ThermalRecycling.data.AutoDetect; import org.blockartistry.mod.ThermalRecycling.data.ScrapHandler; import org.blockartistry.mod.ThermalRecycling.data.ScrapValue; import org.blockartistry.mod.ThermalRecycling.data.ScrappingTables; import org.blockartistry.mod.ThermalRecycling.data.registry.ItemRegistry; import org.blockartistry.mod.ThermalRecycling.items.Material; import org.blockartistry.mod.ThermalRecycling.support.recipe.RecipeDecomposition; import org.blockartistry.mod.ThermalRecycling.util.ItemStackHelper; import org.blockartistry.mod.ThermalRecycling.util.ItemStackWeightTable.ItemStackItem; import com.google.common.collect.ImmutableList; import cpw.mods.fml.common.registry.GameRegistry; import org.blockartistry.mod.ThermalRecycling.util.OreDictionaryHelper; import org.blockartistry.mod.ThermalRecycling.util.PreferredItemStacks; import net.minecraft.init.Blocks; import net.minecraft.init.Items; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.item.crafting.CraftingManager; import net.minecraft.item.crafting.IRecipe; import net.minecraftforge.common.config.Configuration; import net.minecraftforge.oredict.OreDictionary; import net.minecraftforge.oredict.ShapelessOreRecipe; public final class ModThermalRecycling extends ModPlugin { private final static List<String> oreNamesToIgnore = ImmutableList.<String> builder() .add("blockHopper", "blockCloth", "blockWool").build(); private static class EnergeticRedstoneRecipes { protected final String ore; protected final int input; protected final int output; public EnergeticRedstoneRecipes(final String ore, final int input, final int output) { this.ore = ore; this.input = input; this.output = output; } public boolean areOresAvailable() { return !OreDictionary.getOres(this.ore).isEmpty(); } public void register() { if (!areOresAvailable()) return; final List<Object> ingredients = new ArrayList<Object>(); ingredients.add(this.ore); for (int i = 0; i < input; i++) ingredients.add(Items.redstone); final ShapelessOreRecipe recipe = new ShapelessOreRecipe( new ItemStack(ItemManager.energeticRedstoneDust, output), ingredients.toArray()); GameRegistry.addRecipe(recipe); } } private static final EnergeticRedstoneRecipes[] energeticUraniumRecipes = new EnergeticRedstoneRecipes[] { new EnergeticRedstoneRecipes("dustUranium", 2, 3), new EnergeticRedstoneRecipes("crushedUranium", 2, 3), new EnergeticRedstoneRecipes("crushedPurifiedUranium", 4, 6) }; public ModThermalRecycling() { super(SupportedMod.THERMAL_RECYCLING); } @Override public boolean preInit(final Configuration config) { // Vacuum Enchant blocks registerBlockFromVacuum(true, ModOptions.getVacuumItemBlacklist()); return true; } @Override public boolean initialize() { // Use the Forge dictionary to find equivalent ore to set the // appropriate scrap value. registerScrapValuesForge(ScrapValue.NONE, "dustSulfur", "dustCoal", "dustWood"); registerScrapValuesForge(ScrapValue.STANDARD, "ingotIron", "ingotGold", "ingotCopper", "ingotTin", "ingotSilver", "ingotLead", "ingotNickle", "ingotPlatinum", "ingotManaInfused", "ingotElectrum", "ingotInvar", "ingotBronze", "ingotSignalum", "ingotEnderium", "ingotSteel"); registerScrapValuesForge(ScrapValue.STANDARD, "dustIron", "dustGold", "dustCopper", "dustTin", "dustSilver", "dustLead", "dustNickle", "dustPlatinum", "dustManaInfused", "dustElectrum", "dustInvar", "dustBronze", "dustSignalum", "dustEnderium", "dustSteel"); registerScrapValuesForge(ScrapValue.SUPERIOR, "blockIron", "blockGold", "blockCopper", "blockTin", "blockSilver", "blockLead", "blockNickle", "blockPlatinum", "blockManaInfused", "blockElectrum", "blockInvar", "blockBronze", "blockSignalum", "blockEnderium", "blockSteel"); registerScrapValuesForge(ScrapValue.STANDARD, "oreIron", "oreGold", "oreCopper", "oreTin", "oreSilver", "oreLead", "oreNickle", "orePlatinum", "oreManaInfused", "oreElectrum", "oreInvar", "oreBronze", "oreSignalum", "oreEnderium"); registerScrapValuesForge(ScrapValue.POOR, "nuggetIron", "nuggetGold", "nuggetCopper", "nuggetTin", "nuggetSilver", "nuggetLead", "nuggetNickle", "nuggetPlatinum", "nuggetManaInfused", "nuggetElectrum", "nuggetInvar", "nuggetBronze", "nuggetSignalum", "nuggetEnderium", "nuggetSteel", "dustObsidian"); registerScrapValuesForge(ScrapValue.SUPERIOR, "gemDiamond", "gemEmerald", "oreDiamond", "oreEmerald", "blockDiamond", "blockEmerald"); registerScrapValuesForge(ScrapValue.STANDARD, "nuggetDiamond", "nuggetEmerald"); // Tiny Piles from IC2 if (SupportedMod.INDUSTRIAL_CRAFT.isLoaded()) { registerScrapValuesForge(ScrapValue.NONE, "dustTinySulfur", "dustTinyLapis", "dustTinyObsidian"); registerScrapValuesForge(ScrapValue.POOR, "dustTinyIron", "dustTinyCopper", "dustTinyGold", "dustTinyTin", "dustTinySilver", "dustTinyLead", "dustTinyBronze", "dustTinyLithium"); } // Scan the OreDictionary looking for blocks/items that we want // to prevent from being scrapped. for (final String oreName : OreDictionaryHelper.getOreNames()) { if (oreName.startsWith("block") || oreName.startsWith("dust") || oreName.startsWith("ingot") || oreName.startsWith("nugget")) { if(oreNamesToIgnore.contains(oreName)) continue; for (final ItemStack stack : OreDictionaryHelper.getOres(oreName)) { ItemRegistry.setBlockedFromScrapping(stack, true); } } } registerRecycleToWoodDustForge(1, "logWood"); registerRecycleToWoodDustForge(2, "plankWood"); registerRecycleToWoodDustForge(8, "treeSapling"); registerRecipesToIgnoreForge("logWood", "plankWood", "treeSapling"); // Configure extraction recipes registerExtractionRecipe(ScrappingTables.poorScrap, new ItemStackItem(null, 120), new ItemStackItem(ScrappingTables.standardScrap, 60), new ItemStackItem(ItemStackHelper.getItemStack("minecraft:dye:15").get(), 10), new ItemStackItem(PreferredItemStacks.instance.dustCoal, 10), new ItemStackItem(PreferredItemStacks.instance.dustCharcoal, 10), new ItemStackItem(PreferredItemStacks.instance.sulfer, 10), new ItemStackItem(PreferredItemStacks.instance.dustIron, 20), new ItemStackItem(PreferredItemStacks.instance.dustTin, 20), new ItemStackItem(PreferredItemStacks.instance.dustCopper, 20), new ItemStackItem(PreferredItemStacks.instance.dustNickel, 20)); registerExtractionRecipe(ScrappingTables.standardScrap, new ItemStackItem(null, 78), new ItemStackItem(ScrappingTables.superiorScrap, 52), new ItemStackItem(PreferredItemStacks.instance.dustCoal, 10), new ItemStackItem(ItemStackHelper.getItemStack("ThermalFoundation:material:17").get(), 10), new ItemStackItem(PreferredItemStacks.instance.dustIron, 20), new ItemStackItem(PreferredItemStacks.instance.dustTin, 20), new ItemStackItem(PreferredItemStacks.instance.dustCopper, 20), new ItemStackItem(PreferredItemStacks.instance.dustSilver, 20), new ItemStackItem(PreferredItemStacks.instance.dustLead, 20), new ItemStackItem(PreferredItemStacks.instance.dustGold, 10)); registerExtractionRecipe(ScrappingTables.superiorScrap, new ItemStackItem(PreferredItemStacks.instance.dustGold, 20), new ItemStackItem(PreferredItemStacks.instance.dustPlatinum, 20), new ItemStackItem(PreferredItemStacks.instance.dustElectrum, 20), new ItemStackItem(PreferredItemStacks.instance.dustSignalum, 10), new ItemStackItem(PreferredItemStacks.instance.dustLumium, 10), new ItemStackItem(PreferredItemStacks.instance.dustEnderium, 10)); registerExtractionRecipe(new ItemStack(ItemManager.recyclingScrapBox, 1, OreDictionaryHelper.WILDCARD_VALUE), new ItemStackItem(ScrappingTables.KEEP, 1)); // Soylent Red and Yellow registerExtractionRecipe(new ItemStack(Blocks.pumpkin, 6), new ItemStackItem(new ItemStack(ItemManager.soylentYellow), 1)); registerExtractionRecipe(new ItemStack(Items.carrot, 12), new ItemStackItem(new ItemStack(ItemManager.soylentYellow), 1)); registerExtractionRecipe(new ItemStack(Items.potato, 16), new ItemStackItem(new ItemStack(ItemManager.soylentYellow), 1)); registerExtractionRecipe(new ItemStack(Items.apple, 12), new ItemStackItem(new ItemStack(ItemManager.soylentYellow), 1)); registerExtractionRecipe(new ItemStack(Items.beef, 6), new ItemStackItem(new ItemStack(ItemManager.soylentRed), 1)); registerExtractionRecipe(new ItemStack(Items.porkchop, 8), new ItemStackItem(new ItemStack(ItemManager.soylentRed), 1)); registerExtractionRecipe(new ItemStack(Items.fish, 12), new ItemStackItem(new ItemStack(ItemManager.soylentRed), 1)); registerExtractionRecipe(new ItemStack(Items.chicken, 8), new ItemStackItem(new ItemStack(ItemManager.soylentRed), 1)); registerExtractionRecipe(new ItemStack(Items.rotten_flesh, 16), new ItemStackItem(new ItemStack(ItemManager.soylentGreen), 1)); // RTG - Extract an RTG Energy Cell to a Housing - loses anything // energy, etc. registerExtractionRecipe(new ItemStack(ItemManager.energyCell, 1, OreDictionaryHelper.WILDCARD_VALUE), new ItemStackItem(new ItemStack(ItemManager.material, 1, Material.RTG_HOUSING), 1)); // RTG - Extract a Depleted RTG Energy Cell to a Housing registerExtractionRecipe(new ItemStack(ItemManager.material, 1, Material.RTG_DEPLETED), new ItemStackItem(new ItemStack(ItemManager.material, 1, Material.RTG_HOUSING), 1)); // //////////////////// // // Add recipe blacklist items first // before processing! // // //////////////////// // Apply the blacklist from the configuration. We need to fix up // each entry with a ^ so the underlying routine just does what it // needs to do. for (final String s : ModOptions.getRecyclerBlacklist()) { registerItemBlockedFromScrapping(true, "^" + s); } // If there is uranium dust in the ore dictionary create a crafting // recipe for Energetic Redstone Dust. if (ModOptions.getEnergeticRedstoneUraniumCrafting()) { for (final EnergeticRedstoneRecipes r : energeticUraniumRecipes) r.register(); } return true; } private void processRecipeList(final List<Object> recipes, final boolean vanillaOnly) { // Process all registered recipes for (final Object o : recipes) { final IRecipe recipe = (IRecipe) o; final ItemStack stack = recipe.getRecipeOutput(); try { // Check to see if this item should have a recipe in // the list. This does not mean that something later // on can't add one - just means by default it will // not be included. if (stack != null && (!vanillaOnly || ItemStackHelper.isVanilla(stack))) { if (!ItemRegistry.isRecipeIgnored(stack)) { // If the name is prefixed with any of the mods // we know about then we can create the recipe. final String name = Item.itemRegistry.getNameForObject(stack.getItem()); if (SupportedMod.isModWhitelisted(name)) { final List<ItemStack> output = RecipeDecomposition.decompose(recipe); if (output != null && !output.isEmpty()) { if (vanillaOnly && !ItemStackHelper.isVanilla(output)) continue; recycler.useRecipe(recipe).save(); } } } } } catch (Throwable t) { ModLog.warn("processRecipeList: Unable to register recipe for [%s]", ItemStackHelper.resolveName(stack)); } } } @SuppressWarnings("unchecked") @Override public boolean postInit() { // //////////////////// // // Process the recipes // // //////////////////// final List<Object> recipes = CraftingManager.getInstance().getRecipeList(); processRecipeList(recipes, true); processRecipeList(recipes, false); // Lock our tables ScrapHandler.freeze(); // AutoDetect scrap values AutoDetect.detect(); return true; } }
package org.hl7.fhir.dstu3.model; /* Copyright (c) 2011+, HL7, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of HL7 nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ // Generated on Sat, Jan 30, 2016 09:18-0500 for FHIR v1.3.0 import java.util.*; import org.hl7.fhir.utilities.Utilities; import ca.uhn.fhir.model.api.annotation.Child; import ca.uhn.fhir.model.api.annotation.Description; import ca.uhn.fhir.model.api.annotation.DatatypeDef; import ca.uhn.fhir.model.api.annotation.Block; import org.hl7.fhir.dstu3.exceptions.FHIRException; import org.hl7.fhir.dstu3.model.Enumerations.*; import org.hl7.fhir.instance.model.api.*; /** * A digital signature along with supporting context. The signature may be electronic/cryptographic in nature, or a graphical image representing a hand-written signature, or a signature process. Different Signature approaches have different utilities. */ @DatatypeDef(name="Signature") public class Signature extends Type implements ICompositeType { /** * An indication of the reason that the entity signed this document. This may be explicitly included as part of the signature information and can be used when determining accountability for various actions concerning the document. */ @Child(name = "type", type = {Coding.class}, order=0, min=1, max=Child.MAX_UNLIMITED, modifier=false, summary=true) @Description(shortDefinition="Indication of the reason the entity signed the object(s)", formalDefinition="An indication of the reason that the entity signed this document. This may be explicitly included as part of the signature information and can be used when determining accountability for various actions concerning the document." ) protected List<Coding> type; /** * When the digital signature was signed. */ @Child(name = "when", type = {InstantType.class}, order=1, min=1, max=1, modifier=false, summary=true) @Description(shortDefinition="When the signature was created", formalDefinition="When the digital signature was signed." ) protected InstantType when; /** * A reference to an application-usable description of the person that signed the certificate (e.g. the signature used their private key). */ @Child(name = "who", type = {UriType.class, Practitioner.class, RelatedPerson.class, Patient.class, Device.class, Organization.class}, order=2, min=1, max=1, modifier=false, summary=true) @Description(shortDefinition="Who signed the signature", formalDefinition="A reference to an application-usable description of the person that signed the certificate (e.g. the signature used their private key)." ) protected Type who; /** * A mime type that indicates the technical format of the signature. Important mime types are application/signature+xml for X ML DigSig, application/jwt for JWT, and image/* for a graphical image of a signature. */ @Child(name = "contentType", type = {CodeType.class}, order=3, min=1, max=1, modifier=false, summary=true) @Description(shortDefinition="The technical format of the signature", formalDefinition="A mime type that indicates the technical format of the signature. Important mime types are application/signature+xml for X ML DigSig, application/jwt for JWT, and image/* for a graphical image of a signature." ) protected CodeType contentType; /** * The base64 encoding of the Signature content. */ @Child(name = "blob", type = {Base64BinaryType.class}, order=4, min=1, max=1, modifier=false, summary=true) @Description(shortDefinition="The actual signature content (XML DigSig. JWT, picture, etc.)", formalDefinition="The base64 encoding of the Signature content." ) protected Base64BinaryType blob; private static final long serialVersionUID = -452432714L; /** * Constructor */ public Signature() { super(); } /** * Constructor */ public Signature(InstantType when, Type who, CodeType contentType, Base64BinaryType blob) { super(); this.when = when; this.who = who; this.contentType = contentType; this.blob = blob; } /** * @return {@link #type} (An indication of the reason that the entity signed this document. This may be explicitly included as part of the signature information and can be used when determining accountability for various actions concerning the document.) */ public List<Coding> getType() { if (this.type == null) this.type = new ArrayList<Coding>(); return this.type; } public boolean hasType() { if (this.type == null) return false; for (Coding item : this.type) if (!item.isEmpty()) return true; return false; } /** * @return {@link #type} (An indication of the reason that the entity signed this document. This may be explicitly included as part of the signature information and can be used when determining accountability for various actions concerning the document.) */ // syntactic sugar public Coding addType() { //3 Coding t = new Coding(); if (this.type == null) this.type = new ArrayList<Coding>(); this.type.add(t); return t; } // syntactic sugar public Signature addType(Coding t) { //3 if (t == null) return this; if (this.type == null) this.type = new ArrayList<Coding>(); this.type.add(t); return this; } /** * @return {@link #when} (When the digital signature was signed.). This is the underlying object with id, value and extensions. The accessor "getWhen" gives direct access to the value */ public InstantType getWhenElement() { if (this.when == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create Signature.when"); else if (Configuration.doAutoCreate()) this.when = new InstantType(); // bb return this.when; } public boolean hasWhenElement() { return this.when != null && !this.when.isEmpty(); } public boolean hasWhen() { return this.when != null && !this.when.isEmpty(); } /** * @param value {@link #when} (When the digital signature was signed.). This is the underlying object with id, value and extensions. The accessor "getWhen" gives direct access to the value */ public Signature setWhenElement(InstantType value) { this.when = value; return this; } /** * @return When the digital signature was signed. */ public Date getWhen() { return this.when == null ? null : this.when.getValue(); } /** * @param value When the digital signature was signed. */ public Signature setWhen(Date value) { if (this.when == null) this.when = new InstantType(); this.when.setValue(value); return this; } /** * @return {@link #who} (A reference to an application-usable description of the person that signed the certificate (e.g. the signature used their private key).) */ public Type getWho() { return this.who; } /** * @return {@link #who} (A reference to an application-usable description of the person that signed the certificate (e.g. the signature used their private key).) */ public UriType getWhoUriType() throws FHIRException { if (!(this.who instanceof UriType)) throw new FHIRException("Type mismatch: the type UriType was expected, but "+this.who.getClass().getName()+" was encountered"); return (UriType) this.who; } public boolean hasWhoUriType() { return this.who instanceof UriType; } /** * @return {@link #who} (A reference to an application-usable description of the person that signed the certificate (e.g. the signature used their private key).) */ public Reference getWhoReference() throws FHIRException { if (!(this.who instanceof Reference)) throw new FHIRException("Type mismatch: the type Reference was expected, but "+this.who.getClass().getName()+" was encountered"); return (Reference) this.who; } public boolean hasWhoReference() { return this.who instanceof Reference; } public boolean hasWho() { return this.who != null && !this.who.isEmpty(); } /** * @param value {@link #who} (A reference to an application-usable description of the person that signed the certificate (e.g. the signature used their private key).) */ public Signature setWho(Type value) { this.who = value; return this; } /** * @return {@link #contentType} (A mime type that indicates the technical format of the signature. Important mime types are application/signature+xml for X ML DigSig, application/jwt for JWT, and image/* for a graphical image of a signature.). This is the underlying object with id, value and extensions. The accessor "getContentType" gives direct access to the value */ public CodeType getContentTypeElement() { if (this.contentType == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create Signature.contentType"); else if (Configuration.doAutoCreate()) this.contentType = new CodeType(); // bb return this.contentType; } public boolean hasContentTypeElement() { return this.contentType != null && !this.contentType.isEmpty(); } public boolean hasContentType() { return this.contentType != null && !this.contentType.isEmpty(); } /** * @param value {@link #contentType} (A mime type that indicates the technical format of the signature. Important mime types are application/signature+xml for X ML DigSig, application/jwt for JWT, and image/* for a graphical image of a signature.). This is the underlying object with id, value and extensions. The accessor "getContentType" gives direct access to the value */ public Signature setContentTypeElement(CodeType value) { this.contentType = value; return this; } /** * @return A mime type that indicates the technical format of the signature. Important mime types are application/signature+xml for X ML DigSig, application/jwt for JWT, and image/* for a graphical image of a signature. */ public String getContentType() { return this.contentType == null ? null : this.contentType.getValue(); } /** * @param value A mime type that indicates the technical format of the signature. Important mime types are application/signature+xml for X ML DigSig, application/jwt for JWT, and image/* for a graphical image of a signature. */ public Signature setContentType(String value) { if (this.contentType == null) this.contentType = new CodeType(); this.contentType.setValue(value); return this; } /** * @return {@link #blob} (The base64 encoding of the Signature content.). This is the underlying object with id, value and extensions. The accessor "getBlob" gives direct access to the value */ public Base64BinaryType getBlobElement() { if (this.blob == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create Signature.blob"); else if (Configuration.doAutoCreate()) this.blob = new Base64BinaryType(); // bb return this.blob; } public boolean hasBlobElement() { return this.blob != null && !this.blob.isEmpty(); } public boolean hasBlob() { return this.blob != null && !this.blob.isEmpty(); } /** * @param value {@link #blob} (The base64 encoding of the Signature content.). This is the underlying object with id, value and extensions. The accessor "getBlob" gives direct access to the value */ public Signature setBlobElement(Base64BinaryType value) { this.blob = value; return this; } /** * @return The base64 encoding of the Signature content. */ public byte[] getBlob() { return this.blob == null ? null : this.blob.getValue(); } /** * @param value The base64 encoding of the Signature content. */ public Signature setBlob(byte[] value) { if (this.blob == null) this.blob = new Base64BinaryType(); this.blob.setValue(value); return this; } protected void listChildren(List<Property> childrenList) { super.listChildren(childrenList); childrenList.add(new Property("type", "Coding", "An indication of the reason that the entity signed this document. This may be explicitly included as part of the signature information and can be used when determining accountability for various actions concerning the document.", 0, java.lang.Integer.MAX_VALUE, type)); childrenList.add(new Property("when", "instant", "When the digital signature was signed.", 0, java.lang.Integer.MAX_VALUE, when)); childrenList.add(new Property("who[x]", "uri|Reference(Practitioner|RelatedPerson|Patient|Device|Organization)", "A reference to an application-usable description of the person that signed the certificate (e.g. the signature used their private key).", 0, java.lang.Integer.MAX_VALUE, who)); childrenList.add(new Property("contentType", "code", "A mime type that indicates the technical format of the signature. Important mime types are application/signature+xml for X ML DigSig, application/jwt for JWT, and image/* for a graphical image of a signature.", 0, java.lang.Integer.MAX_VALUE, contentType)); childrenList.add(new Property("blob", "base64Binary", "The base64 encoding of the Signature content.", 0, java.lang.Integer.MAX_VALUE, blob)); } @Override public void setProperty(String name, Base value) throws FHIRException { if (name.equals("type")) this.getType().add(castToCoding(value)); else if (name.equals("when")) this.when = castToInstant(value); // InstantType else if (name.equals("who[x]")) this.who = (Type) value; // Type else if (name.equals("contentType")) this.contentType = castToCode(value); // CodeType else if (name.equals("blob")) this.blob = castToBase64Binary(value); // Base64BinaryType else super.setProperty(name, value); } @Override public Base addChild(String name) throws FHIRException { if (name.equals("type")) { return addType(); } else if (name.equals("when")) { throw new FHIRException("Cannot call addChild on a primitive type Signature.when"); } else if (name.equals("whoUri")) { this.who = new UriType(); return this.who; } else if (name.equals("whoReference")) { this.who = new Reference(); return this.who; } else if (name.equals("contentType")) { throw new FHIRException("Cannot call addChild on a primitive type Signature.contentType"); } else if (name.equals("blob")) { throw new FHIRException("Cannot call addChild on a primitive type Signature.blob"); } else return super.addChild(name); } public String fhirType() { return "Signature"; } public Signature copy() { Signature dst = new Signature(); copyValues(dst); if (type != null) { dst.type = new ArrayList<Coding>(); for (Coding i : type) dst.type.add(i.copy()); }; dst.when = when == null ? null : when.copy(); dst.who = who == null ? null : who.copy(); dst.contentType = contentType == null ? null : contentType.copy(); dst.blob = blob == null ? null : blob.copy(); return dst; } protected Signature typedCopy() { return copy(); } @Override public boolean equalsDeep(Base other) { if (!super.equalsDeep(other)) return false; if (!(other instanceof Signature)) return false; Signature o = (Signature) other; return compareDeep(type, o.type, true) && compareDeep(when, o.when, true) && compareDeep(who, o.who, true) && compareDeep(contentType, o.contentType, true) && compareDeep(blob, o.blob, true); } @Override public boolean equalsShallow(Base other) { if (!super.equalsShallow(other)) return false; if (!(other instanceof Signature)) return false; Signature o = (Signature) other; return compareValues(when, o.when, true) && compareValues(contentType, o.contentType, true) && compareValues(blob, o.blob, true) ; } public boolean isEmpty() { return super.isEmpty() && (type == null || type.isEmpty()) && (when == null || when.isEmpty()) && (who == null || who.isEmpty()) && (contentType == null || contentType.isEmpty()) && (blob == null || blob.isEmpty()) ; } }
/* ****************************************************************************** * Copyright (c) 2019 Konduit K.K. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ //================== GENERATED CODE - DO NOT MODIFY THIS FILE ================== package org.nd4j.linalg.factory.ops; import static org.nd4j.linalg.factory.NDValidation.isSameType; import org.nd4j.base.Preconditions; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.NDValidation; import org.nd4j.linalg.factory.Nd4j; public class NDNN { public NDNN() { } /** * Neural network batch normalization operation.<br> * For details, see <a href="https://arxiv.org/abs/1502.03167">https://arxiv.org/abs/1502.03167</a><br> * * @param input Input variable. (NUMERIC type) * @param mean Mean value. For 1d axis, this should match input.size(axis) (NUMERIC type) * @param variance Variance value. For 1d axis, this should match input.size(axis) (NUMERIC type) * @param gamma Gamma value. For 1d axis, this should match input.size(axis) (NUMERIC type) * @param beta Beta value. For 1d axis, this should match input.size(axis) (NUMERIC type) * @param epsilon Epsilon constant for numerical stability (to avoid division by 0) * @param axis For 2d CNN activations: 1 for NCHW format activations, or 3 for NHWC format activations. * For 3d CNN activations: 1 for NCDHW format, 4 for NDHWC * For 1d/RNN activations: 1 for NCW format, 2 for NWC (Size: AtLeast(min=1)) * @return output variable for batch normalization (NUMERIC type) */ public INDArray batchNorm(INDArray input, INDArray mean, INDArray variance, INDArray gamma, INDArray beta, double epsilon, int... axis) { NDValidation.validateNumerical("batchNorm", "input", input); NDValidation.validateNumerical("batchNorm", "mean", mean); NDValidation.validateNumerical("batchNorm", "variance", variance); NDValidation.validateNumerical("batchNorm", "gamma", gamma); NDValidation.validateNumerical("batchNorm", "beta", beta); Preconditions.checkArgument(axis.length >= 1, "axis has incorrect size/length. Expected: axis.length >= 1, got %s", axis.length); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.layers.convolution.BatchNorm(input, mean, variance, gamma, beta, epsilon, axis))[0]; } /** * Bias addition operation: a special case of addition, typically used with CNN 4D activations and a 1D bias vector<br> * * @param input 4d input variable (NUMERIC type) * @param bias 1d bias (NUMERIC type) * @param nchw The format - nchw=true means [minibatch, channels, height, width] format; nchw=false - [minibatch, height, width, channels]. * Unused for 2d inputs * @return output Output variable, after applying bias add operation (NUMERIC type) */ public INDArray biasAdd(INDArray input, INDArray bias, boolean nchw) { NDValidation.validateNumerical("biasAdd", "input", input); NDValidation.validateNumerical("biasAdd", "bias", bias); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.broadcast.BiasAdd(input, bias, nchw))[0]; } /** * This operation performs dot product attention on the given timeseries input with the given queries<br> * out = sum(similarity(k_i, q) * v_i)<br> * <br> * similarity(k, q) = softmax(k * q) where x * q is the dot product of x and q<br> * <br> * Optionally with normalization step:<br> * similarity(k, q) = softmax(k * q / sqrt(size(q))<br> * <br> * See also "Attention is all you need" (https://arxiv.org/abs/1706.03762, p. 4, eq. 1)<br> * <br> * Note: This supports multiple queries at once, if only one query is available the queries vector still has to<br> * be 3D but can have queryCount = 1<br> * <br> * Note: keys and values usually is the same array. If you want to use it as the same array, simply pass it for<br> * both.<br> * <br> * Note: Queries, keys and values must either be all rank 3 or all rank 4 arrays. Mixing them doesn't work. The<br> * output rank will depend on the input rank.<br> * * @param queries input 3D array "queries" of shape [batchSize, featureKeys, queryCount] * or 4D array of shape [batchSize, numHeads, featureKeys, queryCount] (NUMERIC type) * @param keys input 3D array "keys" of shape [batchSize, featureKeys, timesteps] * or 4D array of shape [batchSize, numHeads, featureKeys, timesteps] (NUMERIC type) * @param values input 3D array "values" of shape [batchSize, featureValues, timesteps] * or 4D array of shape [batchSize, numHeads, featureValues, timesteps] (NUMERIC type) * @param mask OPTIONAL; array that defines which values should be skipped of shape [batchSize, timesteps] (NUMERIC type) * @param scaled normalization, false -> do not apply normalization, true -> apply normalization * @return output Attention result arrays of shape [batchSize, featureValues, queryCount] or [batchSize, numHeads, featureValues, queryCount], * (optionally) Attention Weights of shape [batchSize, timesteps, queryCount] or [batchSize, numHeads, timesteps, queryCount] (NUMERIC type) */ public INDArray dotProductAttention(INDArray queries, INDArray keys, INDArray values, INDArray mask, boolean scaled) { NDValidation.validateNumerical("dotProductAttention", "queries", queries); NDValidation.validateNumerical("dotProductAttention", "keys", keys); NDValidation.validateNumerical("dotProductAttention", "values", values); NDValidation.validateNumerical("dotProductAttention", "mask", mask); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.transforms.custom.DotProductAttention(queries, keys, values, mask, scaled))[0]; } /** * Dropout operation<br> * * @param input Input array (NUMERIC type) * @param inputRetainProbability Probability of retaining an input (set to 0 with probability 1-p) * @return output Output (NUMERIC type) */ public INDArray dropout(INDArray input, double inputRetainProbability) { NDValidation.validateNumerical("dropout", "input", input); return Nd4j.exec(new org.nd4j.linalg.api.ops.random.impl.DropOut(input, inputRetainProbability)); } /** * Element-wise exponential linear unit (ELU) function:<br> * out = x if x > 0<br> * out = a * (exp(x) - 1) if x <= 0<br> * with constant a = 1.0<br> * <p><br> * See: <a href="https://arxiv.org/abs/1511.07289">https://arxiv.org/abs/1511.07289</a><br> * * @param x Input variable (NUMERIC type) * @return output Output variable (NUMERIC type) */ public INDArray elu(INDArray x) { NDValidation.validateNumerical("elu", "x", x); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.transforms.strict.ELU(x))[0]; } /** * GELU activation function - Gaussian Error Linear Units<br> * For more details, see <i>Gaussian Error Linear Units (GELUs)</i> - <a href="https://arxiv.org/abs/1606.08415">https://arxiv.org/abs/1606.08415</a><br> * This method uses the sigmoid approximation<br> * * @param x Input variable (NUMERIC type) * @return output Output variable (NUMERIC type) */ public INDArray gelu(INDArray x) { NDValidation.validateNumerical("gelu", "x", x); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.transforms.strict.GELU(x)); } /** * Element-wise hard sigmoid function:<br> * out[i] = 0 if in[i] <= -2.5<br> * out[1] = 0.2*in[i]+0.5 if -2.5 < in[i] < 2.5<br> * out[i] = 1 if in[i] >= 2.5<br> * * @param x Input variable (NUMERIC type) * @return output Output variable (NUMERIC type) */ public INDArray hardSigmoid(INDArray x) { NDValidation.validateNumerical("hardSigmoid", "x", x); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.transforms.strict.HardSigmoid(x)); } /** * Element-wise hard tanh function:<br> * out[i] = -1 if in[i] <= -1<br> * out[1] = in[i] if -1 < in[i] < 1<br> * out[i] = 1 if in[i] >= 1<br> * * @param x Input variable (NUMERIC type) * @return output Output variable (NUMERIC type) */ public INDArray hardTanh(INDArray x) { NDValidation.validateNumerical("hardTanh", "x", x); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.transforms.strict.HardTanh(x)); } /** * Derivative (dOut/dIn) of the element-wise hard Tanh function - hardTanh(INDArray)<br> * * @param x Input variable (NUMERIC type) * @return output Output variable (NUMERIC type) */ public INDArray hardTanhDerivative(INDArray x) { NDValidation.validateNumerical("hardTanhDerivative", "x", x); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.transforms.gradient.HardTanhDerivative(x)); } /** * Apply Layer Normalization<br> * <br> * y = gain * standardize(x) + bias<br> * * @param input Input variable (NUMERIC type) * @param gain Gain (NUMERIC type) * @param bias Bias (NUMERIC type) * @param channelsFirst For 2D input - unused. True for NCHW (minibatch, channels, height, width), false for NHWC data * @param dimensions Dimensions to perform layer norm over - dimension=1 for 2d/MLP data, dimension=1,2,3 for CNNs (Size: AtLeast(min=1)) * @return output Output variable (NUMERIC type) */ public INDArray layerNorm(INDArray input, INDArray gain, INDArray bias, boolean channelsFirst, int... dimensions) { NDValidation.validateNumerical("layerNorm", "input", input); NDValidation.validateNumerical("layerNorm", "gain", gain); NDValidation.validateNumerical("layerNorm", "bias", bias); Preconditions.checkArgument(dimensions.length >= 1, "dimensions has incorrect size/length. Expected: dimensions.length >= 1, got %s", dimensions.length); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.transforms.custom.LayerNorm(input, gain, bias, channelsFirst, dimensions))[0]; } /** * Apply Layer Normalization<br> * <br> * y = gain * standardize(x) + bias<br> * * @param input Input variable (NUMERIC type) * @param gain Gain (NUMERIC type) * @param channelsFirst For 2D input - unused. True for NCHW (minibatch, channels, height, width), false for NHWC data * @param dimensions Dimensions to perform layer norm over - dimension=1 for 2d/MLP data, dimension=1,2,3 for CNNs (Size: AtLeast(min=1)) * @return output Output variable (NUMERIC type) */ public INDArray layerNorm(INDArray input, INDArray gain, boolean channelsFirst, int... dimensions) { NDValidation.validateNumerical("layerNorm", "input", input); NDValidation.validateNumerical("layerNorm", "gain", gain); Preconditions.checkArgument(dimensions.length >= 1, "dimensions has incorrect size/length. Expected: dimensions.length >= 1, got %s", dimensions.length); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.transforms.custom.LayerNorm(input, gain, channelsFirst, dimensions))[0]; } /** * Element-wise leaky ReLU function:<br> * out = x if x >= 0.0<br> * out = alpha * x if x < cutoff<br> * Alpha value is most commonly set to 0.01<br> * * @param x Input variable (NUMERIC type) * @param alpha Cutoff - commonly 0.01 (NUMERIC type) * @return output Output variable (NUMERIC type) */ public INDArray leakyRelu(INDArray x, INDArray alpha) { NDValidation.validateNumerical("leakyRelu", "x", x); NDValidation.validateNumerical("leakyRelu", "alpha", alpha); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.scalar.LeakyReLU(x, alpha)); } /** * Leaky ReLU derivative: dOut/dIn given input.<br> * * @param x Input variable (NUMERIC type) * @param alpha Cutoff - commonly 0.01 (NUMERIC type) * @return output Output variable (NUMERIC type) */ public INDArray leakyReluDerivative(INDArray x, INDArray alpha) { NDValidation.validateNumerical("leakyReluDerivative", "x", x); NDValidation.validateNumerical("leakyReluDerivative", "alpha", alpha); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.transforms.gradient.LeakyReLUDerivative(x, alpha)); } /** * Linear layer operation: out = mmul(in,w) + bias<br> * Note that bias array is optional<br> * * @param input Input data (NUMERIC type) * @param weights Weights variable, shape [nIn, nOut] (NUMERIC type) * @param bias Optional bias variable (may be null) (NUMERIC type) * @return output Output variable (NUMERIC type) */ public INDArray linear(INDArray input, INDArray weights, INDArray bias) { NDValidation.validateNumerical("linear", "input", input); NDValidation.validateNumerical("linear", "weights", weights); NDValidation.validateNumerical("linear", "bias", bias); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.transforms.custom.XwPlusB(input, weights, bias))[0]; } /** * Element-wise sigmoid function: out[i] = log(sigmoid(in[i]))<br> * * @param x Input variable (NUMERIC type) * @return output Output variable (NUMERIC type) */ public INDArray logSigmoid(INDArray x) { NDValidation.validateNumerical("logSigmoid", "x", x); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.transforms.strict.LogSigmoid(x)); } /** * Log softmax activation<br> * * @param x (NUMERIC type) * @return output (NUMERIC type) */ public INDArray logSoftmax(INDArray x) { NDValidation.validateNumerical("logSoftmax", "x", x); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.transforms.custom.LogSoftMax(x))[0]; } /** * Log softmax activation<br> * * @param x Input (NUMERIC type) * @param dimension Dimension along which to apply log softmax * @return output Output - log(softmax(input)) (NUMERIC type) */ public INDArray logSoftmax(INDArray x, int dimension) { NDValidation.validateNumerical("logSoftmax", "x", x); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.transforms.custom.LogSoftMax(x, dimension))[0]; } /** * This performs multi-headed dot product attention on the given timeseries input<br> * out = concat(head_1, head_2, ..., head_n) * Wo<br> * head_i = dot_product_attention(Wq_i*q, Wk_i*k, Wv_i*v)<br> * <br> * Optionally with normalization when calculating the attention for each head.<br> * <br> * See also "Attention is all you need" (https://arxiv.org/abs/1706.03762, pp. 4,5, "3.2.2 Multi-Head Attention")<br> * <br> * This makes use of dot_product_attention OP support for rank 4 inputs.<br> * see dotProductAttention(INDArray, INDArray, INDArray, INDArray, boolean, boolean)<br> * * @param queries input 3D array "queries" of shape [batchSize, featureKeys, queryCount] (NUMERIC type) * @param keys input 3D array "keys" of shape [batchSize, featureKeys, timesteps] (NUMERIC type) * @param values input 3D array "values" of shape [batchSize, featureValues, timesteps] (NUMERIC type) * @param Wq input query projection weights of shape [numHeads, projectedKeys, featureKeys] (NUMERIC type) * @param Wk input key projection weights of shape [numHeads, projectedKeys, featureKeys] (NUMERIC type) * @param Wv input value projection weights of shape [numHeads, projectedValues, featureValues] (NUMERIC type) * @param Wo output projection weights of shape [numHeads * projectedValues, outSize] (NUMERIC type) * @param mask OPTIONAL; array that defines which values should be skipped of shape [batchSize, timesteps] (NUMERIC type) * @param scaled normalization, false -> do not apply normalization, true -> apply normalization * @return output Attention result arrays of shape [batchSize, outSize, queryCount] * (optionally) Attention Weights of shape [batchSize, numHeads, timesteps, queryCount] (NUMERIC type) */ public INDArray multiHeadDotProductAttention(INDArray queries, INDArray keys, INDArray values, INDArray Wq, INDArray Wk, INDArray Wv, INDArray Wo, INDArray mask, boolean scaled) { NDValidation.validateNumerical("multiHeadDotProductAttention", "queries", queries); NDValidation.validateNumerical("multiHeadDotProductAttention", "keys", keys); NDValidation.validateNumerical("multiHeadDotProductAttention", "values", values); NDValidation.validateNumerical("multiHeadDotProductAttention", "Wq", Wq); NDValidation.validateNumerical("multiHeadDotProductAttention", "Wk", Wk); NDValidation.validateNumerical("multiHeadDotProductAttention", "Wv", Wv); NDValidation.validateNumerical("multiHeadDotProductAttention", "Wo", Wo); NDValidation.validateNumerical("multiHeadDotProductAttention", "mask", mask); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.transforms.custom.MultiHeadDotProductAttention(queries, keys, values, Wq, Wk, Wv, Wo, mask, scaled))[0]; } /** * PReLU (Parameterized Rectified Linear Unit) operation. Like LeakyReLU with a learnable alpha:<br> * out[i] = in[i] if in[i] >= 0<br> * out[i] = in[i] * alpha[i] otherwise<br> * <br> * sharedAxes allows you to share learnable parameters along axes.<br> * For example, if the input has shape [batchSize, channels, height, width]<br> * and you want each channel to have its own cutoff, use sharedAxes = [2, 3] and an<br> * alpha with shape [channels].<br> * * @param input Input data (NUMERIC type) * @param alpha The cutoff variable. Note that the batch dimension (the 0th, whether it is batch or not) should not be part of alpha. (NUMERIC type) * @param sharedAxes Which axes to share cutoff parameters along. (Size: AtLeast(min=1)) * @return output Output (NUMERIC type) */ public INDArray prelu(INDArray input, INDArray alpha, int... sharedAxes) { NDValidation.validateNumerical("prelu", "input", input); NDValidation.validateNumerical("prelu", "alpha", alpha); Preconditions.checkArgument(sharedAxes.length >= 1, "sharedAxes has incorrect size/length. Expected: sharedAxes.length >= 1, got %s", sharedAxes.length); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.scalar.PRelu(input, alpha, sharedAxes))[0]; } /** * Element-wise rectified linear function with specified cutoff:<br> * out[i] = in[i] if in[i] >= cutoff<br> * out[i] = 0 otherwise<br> * * @param x Input (NUMERIC type) * @param cutoff Cutoff value for ReLU operation - x > cutoff ? x : 0. Usually 0 * @return output Output (NUMERIC type) */ public INDArray relu(INDArray x, double cutoff) { NDValidation.validateNumerical("relu", "x", x); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.scalar.RectifiedLinear(x, cutoff)); } /** * Element-wise "rectified linear 6" function with specified cutoff:<br> * out[i] = min(max(in, cutoff), 6)<br> * * @param x Input (NUMERIC type) * @param cutoff Cutoff value for ReLU operation. Usually 0 * @return output Output (NUMERIC type) */ public INDArray relu6(INDArray x, double cutoff) { NDValidation.validateNumerical("relu6", "x", x); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.scalar.Relu6(x, cutoff)); } /** * ReLU (Rectified Linear Unit) layer operation: out = relu(mmul(in,w) + bias)<br> * Note that bias array is optional<br> * * @param input Input data (NUMERIC type) * @param weights Weights variable (NUMERIC type) * @param bias Optional bias variable (may be null) (NUMERIC type) * @return output Output variable (NUMERIC type) */ public INDArray reluLayer(INDArray input, INDArray weights, INDArray bias) { NDValidation.validateNumerical("reluLayer", "input", input); NDValidation.validateNumerical("reluLayer", "weights", weights); NDValidation.validateNumerical("reluLayer", "bias", bias); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.transforms.ReluLayer(input, weights, bias))[0]; } /** * Element-wise SeLU function - Scaled exponential Lineal Unit: see <a href="https://arxiv.org/abs/1706.02515">Self-Normalizing Neural Networks</a><br> * <br> * out[i] = scale * alpha * (exp(in[i])-1) if in[i]>0, or 0 if in[i] <= 0<br> * Uses default scale and alpha values.<br> * * @param x Input variable (NUMERIC type) * @return output Output variable (NUMERIC type) */ public INDArray selu(INDArray x) { NDValidation.validateNumerical("selu", "x", x); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.transforms.strict.SELU(x)); } /** * Element-wise sigmoid function: out[i] = 1.0/(1+exp(-in[i]))<br> * * @param x Input variable (NUMERIC type) * @return output Output variable (NUMERIC type) */ public INDArray sigmoid(INDArray x) { NDValidation.validateNumerical("sigmoid", "x", x); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.transforms.strict.Sigmoid(x)); } /** * Element-wise sigmoid function derivative: dL/dIn given input and dL/dOut<br> * * @param x Input Variable (NUMERIC type) * @param wrt Gradient at the output - dL/dOut. Must have same shape as the input (NUMERIC type) * @return output Output (gradient at input of sigmoid) (NUMERIC type) */ public INDArray sigmoidDerivative(INDArray x, INDArray wrt) { NDValidation.validateNumerical("sigmoidDerivative", "x", x); NDValidation.validateNumerical("sigmoidDerivative", "wrt", wrt); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.transforms.gradient.SigmoidDerivative(x, wrt))[0]; } /** * Softmax activation, along the specified dimension<br> * * @param x Input (NUMERIC type) * @param dimension Dimension along which to apply softmax * @return output Output variable (NUMERIC type) */ public INDArray softmax(INDArray x, int dimension) { NDValidation.validateNumerical("softmax", "x", x); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.transforms.custom.SoftMax(x, dimension))[0]; } /** * Softmax derivative function<br> * * @param x Softmax input (NUMERIC type) * @param wrt Gradient at output, dL/dx (NUMERIC type) * @param dimension Softmax dimension * @return output (NUMERIC type) */ public INDArray softmaxDerivative(INDArray x, INDArray wrt, int dimension) { NDValidation.validateNumerical("softmaxDerivative", "x", x); NDValidation.validateNumerical("softmaxDerivative", "wrt", wrt); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.transforms.gradient.SoftmaxBp(x, wrt, dimension))[0]; } /** * Element-wise softplus function: out = log(exp(x) + 1)<br> * * @param x Input variable (NUMERIC type) * @return output Output variable (NUMERIC type) */ public INDArray softplus(INDArray x) { NDValidation.validateNumerical("softplus", "x", x); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.transforms.strict.SoftPlus(x)); } /** * Element-wise softsign function: out = x / (abs(x) + 1)<br> * * @param x Input variable (NUMERIC type) * @return output Output variable (NUMERIC type) */ public INDArray softsign(INDArray x) { NDValidation.validateNumerical("softsign", "x", x); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.transforms.strict.SoftSign(x)); } /** * Element-wise derivative (dOut/dIn) of the softsign function softsign(INDArray)<br> * * @param x Input variable (NUMERIC type) * @return output Output (NUMERIC type) */ public INDArray softsignDerivative(INDArray x) { NDValidation.validateNumerical("softsignDerivative", "x", x); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.transforms.gradient.SoftSignDerivative(x)); } /** * Element-wise "swish" function: out = x * sigmoid(b*x) with b=1.0<br> * See: <a href="https://arxiv.org/abs/1710.05941">https://arxiv.org/abs/1710.05941</a><br> * * @param x Input variable (NUMERIC type) * @return output Output variable (NUMERIC type) */ public INDArray swish(INDArray x) { NDValidation.validateNumerical("swish", "x", x); return Nd4j.exec(new org.nd4j.linalg.api.ops.impl.transforms.strict.Swish(x)); } }
// Copyright (C) 2008 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.reviewdb.client; import static com.google.gerrit.reviewdb.client.RefNames.REFS_CHANGES; import com.google.gerrit.extensions.client.ChangeStatus; import com.google.gwtorm.client.Column; import com.google.gwtorm.client.IntKey; import com.google.gwtorm.client.RowVersion; import com.google.gwtorm.client.StringKey; import java.sql.Timestamp; import java.util.Arrays; /** * A change proposed to be merged into a {@link Branch}. * <p> * The data graph rooted below a Change can be quite complex: * * <pre> * {@link Change} * | * +- {@link ChangeMessage}: &quot;cover letter&quot; or general comment. * | * +- {@link PatchSet}: a single variant of this change. * | * +- {@link PatchSetApproval}: a +/- vote on the change's current state. * | * +- {@link PatchLineComment}: comment about a specific line * </pre> * <p> * <h5>PatchSets</h5> * <p> * Every change has at least one PatchSet. A change starts out with one * PatchSet, the initial proposal put forth by the change owner. This * {@link Account} is usually also listed as the author and committer in the * PatchSetInfo. * <p> * Each PatchSet contains zero or more Patch records, detailing the file paths * impacted by the change (otherwise known as, the file paths the author * added/deleted/modified). Sometimes a merge commit can contain zero patches, * if the merge has no conflicts, or has no impact other than to cut off a line * of development. * <p> * Each PatchLineComment is a draft or a published comment about a single line * of the associated file. These are the inline comment entities created by * users as they perform a review. * <p> * When additional PatchSets appear under a change, these PatchSets reference * <i>replacement</i> commits; alternative commits that could be made to the * project instead of the original commit referenced by the first PatchSet. * <p> * A change has at most one current PatchSet. The current PatchSet is updated * when a new replacement PatchSet is uploaded. When a change is submitted, the * current patch set is what is merged into the destination branch. * <p> * <h5>ChangeMessage</h5> * <p> * The ChangeMessage entity is a general free-form comment about the whole * change, rather than PatchLineComment's file and line specific context. The * ChangeMessage appears at the start of any email generated by Gerrit, and is * shown on the change overview page, rather than in a file-specific context. * Users often use this entity to describe general remarks about the overall * concept proposed by the change. * <p> * <h5>PatchSetApproval</h5> * <p> * PatchSetApproval entities exist to fill in the <i>cells</i> of the approvals * table in the web UI. That is, a single PatchSetApproval record's key is the * tuple {@code (PatchSet,Account,ApprovalCategory)}. Each PatchSetApproval * carries with it a small score value, typically within the range -2..+2. * <p> * If an Account has created only PatchSetApprovals with a score value of 0, the * Change shows in their dashboard, and they are said to be CC'd (carbon copied) * on the Change, but are not a direct reviewer. This often happens when an * account was specified at upload time with the {@code --cc} command line flag, * or have published comments, but left the approval scores at 0 ("No Score"). * <p> * If an Account has one or more PatchSetApprovals with a score != 0, the Change * shows in their dashboard, and they are said to be an active reviewer. Such * individuals are highlighted when notice of a replacement patch set is sent, * or when notice of the change submission occurs. */ public final class Change { public static class Id extends IntKey<com.google.gwtorm.client.Key<?>> { private static final long serialVersionUID = 1L; @Column(id = 1) public int id; protected Id() { } public Id(final int id) { this.id = id; } @Override public int get() { return id; } @Override protected void set(int newValue) { id = newValue; } public String toRefPrefix() { return refPrefixBuilder().toString(); } StringBuilder refPrefixBuilder() { StringBuilder r = new StringBuilder(32) .append(REFS_CHANGES); int m = id % 100; if (m < 10) { r.append('0'); } return r.append(m) .append('/') .append(id) .append('/'); } /** Parse a Change.Id out of a string representation. */ public static Id parse(final String str) { final Id r = new Id(); r.fromString(str); return r; } public static Id fromRef(String ref) { int cs = startIndex(ref); if (cs < 0) { return null; } int ce = nextNonDigit(ref, cs); if (ref.substring(ce).equals(RefNames.META_SUFFIX) || PatchSet.Id.fromRef(ref, ce) >= 0) { return new Change.Id(Integer.parseInt(ref.substring(cs, ce))); } return null; } public static Id fromEditRefPart(String ref) { int startChangeId = ref.indexOf(RefNames.EDIT_PREFIX) + RefNames.EDIT_PREFIX.length(); int endChangeId = nextNonDigit(ref, startChangeId); String id = ref.substring(startChangeId, endChangeId); return new Change.Id(Integer.parseInt(id)); } public static Id fromRefPart(String ref) { Integer id = RefNames.parseShardedRefPart(ref); return id != null ? new Change.Id(id) : null; } static int startIndex(String ref) { if (ref == null || !ref.startsWith(REFS_CHANGES)) { return -1; } // Last 2 digits. int ls = REFS_CHANGES.length(); int le = nextNonDigit(ref, ls); if (le - ls != 2 || le >= ref.length() || ref.charAt(le) != '/') { return -1; } // Change ID. int cs = le + 1; if (cs >= ref.length() || ref.charAt(cs) == '0') { return -1; } int ce = nextNonDigit(ref, cs); if (ce >= ref.length() || ref.charAt(ce) != '/') { return -1; } switch (ce - cs) { case 0: return -1; case 1: if (ref.charAt(ls) != '0' || ref.charAt(ls + 1) != ref.charAt(cs)) { return -1; } break; default: if (ref.charAt(ls) != ref.charAt(ce - 2) || ref.charAt(ls + 1) != ref.charAt(ce - 1)) { return -1; } break; } return cs; } static int nextNonDigit(String s, int i) { while (i < s.length() && s.charAt(i) >= '0' && s.charAt(i) <= '9') { i++; } return i; } } /** Globally unique identification of this change. */ public static class Key extends StringKey<com.google.gwtorm.client.Key<?>> { private static final long serialVersionUID = 1L; @Column(id = 1, length = 60) protected String id; protected Key() { } public Key(final String id) { this.id = id; } @Override public String get() { return id; } @Override protected void set(String newValue) { id = newValue; } /** Construct a key that is after all keys prefixed by this key. */ public Key max() { final StringBuilder revEnd = new StringBuilder(get().length() + 1); revEnd.append(get()); revEnd.append('\u9fa5'); return new Key(revEnd.toString()); } /** Obtain a shorter version of this key string, using a leading prefix. */ public String abbreviate() { final String s = get(); return s.substring(0, Math.min(s.length(), 9)); } /** Parse a Change.Key out of a string representation. */ public static Key parse(final String str) { final Key r = new Key(); r.fromString(str); return r; } } /** Minimum database status constant for an open change. */ private static final char MIN_OPEN = 'a'; /** Database constant for {@link Status#NEW}. */ public static final char STATUS_NEW = 'n'; /** Database constant for {@link Status#DRAFT}. */ public static final char STATUS_DRAFT = 'd'; /** Maximum database status constant for an open change. */ private static final char MAX_OPEN = 'z'; /** Database constant for {@link Status#MERGED}. */ public static final char STATUS_MERGED = 'M'; /** ID number of the first patch set in a change. */ public static final int INITIAL_PATCH_SET_ID = 1; /** * Current state within the basic workflow of the change. * * <p> * Within the database, lower case codes ('a'..'z') indicate a change that is * still open, and that can be modified/refined further, while upper case * codes ('A'..'Z') indicate a change that is closed and cannot be further * modified. * */ public enum Status { /** * Change is open and pending review, or review is in progress. * * <p> * This is the default state assigned to a change when it is first created * in the database. A change stays in the NEW state throughout its review * cycle, until the change is submitted or abandoned. * * <p> * Changes in the NEW state can be moved to: * <ul> * <li>{@link #MERGED} - when the Submit Patch Set action is used; * <li>{@link #ABANDONED} - when the Abandon action is used. * </ul> */ NEW(STATUS_NEW, ChangeStatus.NEW), /** * Change is a draft change that only consists of draft patchsets. * * <p> * This is a change that is not meant to be submitted or reviewed yet. If * the uploader publishes the change, it becomes a NEW change. * Publishing is a one-way action, a change cannot return to DRAFT status. * Draft changes are only visible to the uploader and those explicitly * added as reviewers. * * <p> * Changes in the DRAFT state can be moved to: * <ul> * <li>{@link #NEW} - when the change is published, it becomes a new change; * </ul> */ DRAFT(STATUS_DRAFT, ChangeStatus.DRAFT), /** * Change is closed, and submitted to its destination branch. * * <p> * Once a change has been merged, it cannot be further modified by adding a * replacement patch set. Draft comments however may be published, * supporting a post-submit review. */ MERGED(STATUS_MERGED, ChangeStatus.MERGED), /** * Change is closed, but was not submitted to its destination branch. * * <p> * Once a change has been abandoned, it cannot be further modified by adding * a replacement patch set, and it cannot be merged. Draft comments however * may be published, permitting reviewers to send constructive feedback. */ ABANDONED('A', ChangeStatus.ABANDONED); static { boolean ok = true; if (Status.values().length != ChangeStatus.values().length) { ok = false; } for (Status s : Status.values()) { ok &= s.name().equals(s.changeStatus.name()); } if (!ok) { throw new IllegalStateException("Mismatched status mapping: " + Arrays.asList(Status.values()) + " != " + Arrays.asList(ChangeStatus.values())); } } private final char code; private final boolean closed; private final ChangeStatus changeStatus; Status(char c, ChangeStatus cs) { code = c; closed = !(MIN_OPEN <= c && c <= MAX_OPEN); changeStatus = cs; } public char getCode() { return code; } public boolean isOpen() { return !closed; } public boolean isClosed() { return closed; } public ChangeStatus asChangeStatus() { return changeStatus; } public static Status forCode(final char c) { for (final Status s : Status.values()) { if (s.code == c) { return s; } } return null; } public static Status forChangeStatus(ChangeStatus cs) { for (Status s : Status.values()) { if (s.changeStatus == cs) { return s; } } return null; } } /** Locally assigned unique identifier of the change */ @Column(id = 1) protected Id changeId; /** Globally assigned unique identifier of the change */ @Column(id = 2) protected Key changeKey; /** optimistic locking */ @Column(id = 3) @RowVersion protected int rowVersion; /** When this change was first introduced into the database. */ @Column(id = 4) protected Timestamp createdOn; /** * When was a meaningful modification last made to this record's data * <p> * Note, this update timestamp includes its children. */ @Column(id = 5) protected Timestamp lastUpdatedOn; // DELETED: id = 6 (sortkey) @Column(id = 7, name = "owner_account_id") protected Account.Id owner; /** The branch (and project) this change merges into. */ @Column(id = 8) protected Branch.NameKey dest; // DELETED: id = 9 (open) /** Current state code; see {@link Status}. */ @Column(id = 10) protected char status; // DELETED: id = 11 (nbrPatchSets) /** The current patch set. */ @Column(id = 12) protected int currentPatchSetId; /** Subject from the current patch set. */ @Column(id = 13) protected String subject; /** Topic name assigned by the user, if any. */ @Column(id = 14, notNull = false) protected String topic; // DELETED: id = 15 (lastSha1MergeTested) // DELETED: id = 16 (mergeable) /** * First line of first patch set's commit message. * <p> * Unlike {@link #subject}, this string does not change if future patch sets * change the first line. */ @Column(id = 17, notNull = false) protected String originalSubject; /** * Unique id for the changes submitted together assigned during merging. * Only set if the status is MERGED. */ @Column(id = 18, notNull = false) protected String submissionId; /** @see com.google.gerrit.server.notedb.NoteDbChangeState */ @Column(id = 101, notNull = false, length = Integer.MAX_VALUE) protected String noteDbState; protected Change() { } public Change(Change.Key newKey, Change.Id newId, Account.Id ownedBy, Branch.NameKey forBranch, Timestamp ts) { changeKey = newKey; changeId = newId; createdOn = ts; lastUpdatedOn = createdOn; owner = ownedBy; dest = forBranch; setStatus(Status.NEW); } public Change(Change other) { changeId = other.changeId; changeKey = other.changeKey; rowVersion = other.rowVersion; createdOn = other.createdOn; lastUpdatedOn = other.lastUpdatedOn; owner = other.owner; dest = other.dest; status = other.status; currentPatchSetId = other.currentPatchSetId; subject = other.subject; originalSubject = other.originalSubject; submissionId = other.submissionId; topic = other.topic; noteDbState = other.noteDbState; } /** Legacy 32 bit integer identity for a change. */ public Change.Id getId() { return changeId; } /** Legacy 32 bit integer identity for a change. */ public int getChangeId() { return changeId.get(); } /** The Change-Id tag out of the initial commit, or a natural key. */ public Change.Key getKey() { return changeKey; } public void setKey(final Change.Key k) { changeKey = k; } public Timestamp getCreatedOn() { return createdOn; } public void setCreatedOn(Timestamp ts) { createdOn = ts; } public Timestamp getLastUpdatedOn() { return lastUpdatedOn; } public void setLastUpdatedOn(Timestamp now) { lastUpdatedOn = now; } public int getRowVersion() { return rowVersion; } public Account.Id getOwner() { return owner; } public void setOwner(Account.Id owner) { this.owner = owner; } public Branch.NameKey getDest() { return dest; } public void setDest(Branch.NameKey dest) { this.dest = dest; } public Project.NameKey getProject() { return dest.getParentKey(); } public String getSubject() { return subject; } public String getOriginalSubject() { return originalSubject != null ? originalSubject : subject; } public String getOriginalSubjectOrNull() { return originalSubject; } /** Get the id of the most current {@link PatchSet} in this change. */ public PatchSet.Id currentPatchSetId() { if (currentPatchSetId > 0) { return new PatchSet.Id(changeId, currentPatchSetId); } return null; } public void setCurrentPatchSet(final PatchSetInfo ps) { if (originalSubject == null && subject != null) { // Change was created before schema upgrade. Use the last subject // associated with this change, as the most recent discussion will // be under that thread in an email client such as GMail. originalSubject = subject; } currentPatchSetId = ps.getKey().get(); subject = ps.getSubject(); if (originalSubject == null) { // Newly created changes remember the first commit's subject. originalSubject = subject; } } public void setCurrentPatchSet(PatchSet.Id psId, String subject, String originalSubject) { if (!psId.getParentKey().equals(changeId)) { throw new IllegalArgumentException( "patch set ID " + psId + " is not for change " + changeId); } currentPatchSetId = psId.get(); this.subject = subject; this.originalSubject = originalSubject; } public void clearCurrentPatchSet() { currentPatchSetId = 0; subject = null; originalSubject = null; } public String getSubmissionId() { return submissionId; } public void setSubmissionId(String id) { this.submissionId = id; } public Status getStatus() { return Status.forCode(status); } public void setStatus(Status newStatus) { status = newStatus.getCode(); } public String getTopic() { return topic; } public void setTopic(String topic) { this.topic = topic; } public String getNoteDbState() { return noteDbState; } public void setNoteDbState(String state) { noteDbState = state; } @Override public String toString() { return new StringBuilder(getClass().getSimpleName()) .append('{').append(changeId) .append(" (").append(changeKey).append("), ") .append("dest=").append(dest).append(", ") .append("status=").append(status).append('}') .toString(); } }
package com.github.neuralnetworks.builder.layer; import com.github.neuralnetworks.architecture.ConnectionFactory; import com.github.neuralnetworks.architecture.Conv2DConnection; import com.github.neuralnetworks.architecture.Layer; import com.github.neuralnetworks.architecture.NeuralNetworkImpl; import com.github.neuralnetworks.builder.activation.ActivationType; import com.github.neuralnetworks.builder.activation.LayerUtil; import com.github.neuralnetworks.builder.activation.TransferFunctionType; import com.github.neuralnetworks.builder.layer.structure.BiasLayerConnectable; import com.github.neuralnetworks.builder.layer.structure.DropOutableLayer; import com.github.neuralnetworks.builder.layer.structure.KernelUsageOptions; import com.github.neuralnetworks.builder.layer.structure.LearnableLayer; import com.github.neuralnetworks.builder.layer.structure.MainFunctionsChangeable; import com.github.neuralnetworks.builder.layer.structure.NamedSingleInputLayerBuilder; import com.github.neuralnetworks.calculation.LayerCalculatorImpl; import com.github.neuralnetworks.calculation.operations.ConnectionCalculatorImpl; import com.github.neuralnetworks.calculation.operations.OperationsFactory; import com.github.neuralnetworks.calculation.operations.cpu.ConstantConnectionCalculator; import com.github.neuralnetworks.training.Hyperparameters; import com.github.neuralnetworks.training.random.RandomInitializer; import com.github.neuralnetworks.training.random.WeightInitializerFactory; import com.github.neuralnetworks.util.Constants; /** * @author tmey */ public class ConvolutionalLayerBuilder extends NamedSingleInputLayerBuilder implements LearnableLayer, BiasLayerConnectable, DropOutableLayer, MainFunctionsChangeable, KernelUsageOptions { /** * 0 means stride = filterSize */ private int strideRows = 0; private int strideColumns = 0; private int paddingRows = 0; private int paddingColumns = 0; private int filterRows = 3; private int filterColumns = 3; private int featureMaps = 1; private ActivationType activationType = ActivationType.Nothing; private TransferFunctionType transferFunctionType = TransferFunctionType.Conv2D; // hyper parameters private float learningRate = -1; private float momentum = -1; private float l1weightDecay = -1; private float l2weightDecay = -1; private float dropoutRate = -1; private boolean addBias = true; private float biasLearningRate = -1; private float biasMomentum = -1; private float biasL1weightDecay = -1; private float biasL2weightDecay = -1; private RandomInitializer weightInitializer = null; private RandomInitializer biasWeightInitializer = null; public ConvolutionalLayerBuilder(int filterSize, int featureMaps) { super("ConvolutionalLayer"); this.setFilterSize(filterSize); this.setFeatureMaps(featureMaps); } public ConvolutionalLayerBuilder(int filterSizeH, int filterSizeW, int featureMaps) { super("ConvolutionalLayer"); this.setFilterColumns(filterSizeW); this.setFilterRows(filterSizeH); this.setFeatureMaps(featureMaps); } @Override protected Layer build(NeuralNetworkImpl neuralNetwork, String newLayerName, Layer inputLayer, Hyperparameters hyperparameters) { ConnectionFactory cf = neuralNetwork.getProperties().getParameter(Constants.CONNECTION_FACTORY); Layer newLayer = new Layer(); Layer biasLayer = null; // initialize default parameter the right way int localStrideRows = strideRows; int localStrideColumns = strideColumns; if (localStrideColumns == 0) { localStrideColumns = 1; } if (localStrideRows == 0) { localStrideRows = 1; } // search last connection int inputFMRows = -1; int inputFMCols = -1; int inputFilters = -1; int[] layerDimension = inputLayer.getLayerDimension(); if (layerDimension.length != 3) { throw new IllegalStateException("The current layer should be connected to a not 3 dimensional layer (" + layerDimension.length + ")"); } inputFMRows = layerDimension[0]; inputFMCols = layerDimension[1]; inputFilters = layerDimension[2]; if (inputFMRows < 1 || inputFMCols < 1 || inputFilters < 1) { throw new IllegalStateException("The inputLayer is"); } // connect layer Conv2DConnection convConnection = cf.conv2d(inputLayer, newLayer, inputFMRows, inputFMCols, inputFilters, filterRows, filterColumns, featureMaps, localStrideRows, localStrideColumns, paddingRows, paddingColumns); newLayer.setLayerDimension(new int[] { convConnection.getOutputFeatureMapRowsWithPadding(), convConnection.getOutputFeatureMapColumnsWithPadding(), convConnection.getOutputFilters() }); newLayer.setName(newLayerName); if (weightInitializer != null) { WeightInitializerFactory.initializeWeights(convConnection, weightInitializer); } if (neuralNetwork.getLayerCalculator() != null) { LayerUtil.changeActivationAndTransferFunction(neuralNetwork.getLayerCalculator(), newLayer, transferFunctionType, activationType); // activate drop out if (this.dropoutRate > 0) { LayerCalculatorImpl lc = (LayerCalculatorImpl) neuralNetwork.getLayerCalculator(); if (lc.getConnectionCalculator(newLayer) instanceof ConnectionCalculatorImpl) { ConnectionCalculatorImpl cc = (ConnectionCalculatorImpl) lc.getConnectionCalculator(newLayer); cc.addActivationFunction(OperationsFactory.noiseMask(this.dropoutRate, 0)); } } } Conv2DConnection biasConnection = null; if (addBias) { biasConnection = cf.conv2d(biasLayer = new Layer(), newLayer, convConnection.getOutputFeatureMapRows(), convConnection.getOutputFeatureMapColumns(), 1, 1, 1, featureMaps, 1, 1, paddingRows, paddingColumns); if (neuralNetwork.getLayerCalculator() != null) { ((LayerCalculatorImpl) neuralNetwork.getLayerCalculator()).addConnectionCalculator(biasLayer, new ConstantConnectionCalculator()); } biasLayer.setLayerDimension(new int[] { newLayer.getNeuronCount() }); biasLayer.setName("bias_layer_to_" + newLayerName); if (biasWeightInitializer != null) { WeightInitializerFactory.initializeWeights(biasConnection, biasWeightInitializer); } } // add new layer to the network neuralNetwork.addLayer(newLayer); if (biasLayer != null) { neuralNetwork.addLayer(biasLayer); } // set hyperparameters if (hyperparameters != null) { if (learningRate != -1) { hyperparameters.setLearningRate(convConnection, learningRate); } if (momentum != -1) { hyperparameters.setMomentum(convConnection, momentum); } if (l1weightDecay != -1) { hyperparameters.setL1WeightDecay(convConnection, l1weightDecay); } if (l2weightDecay != -1) { hyperparameters.setL2WeightDecay(convConnection, l2weightDecay); } // bias layer if (biasConnection != null) { if (biasLearningRate != -1) { hyperparameters.setLearningRate(biasConnection, biasLearningRate); } if (biasMomentum != -1) { hyperparameters.setMomentum(biasConnection, biasMomentum); } if (biasL1weightDecay != -1) { hyperparameters.setL1WeightDecay(biasConnection, biasL1weightDecay); } if (biasL2weightDecay != -1) { hyperparameters.setL2WeightDecay(biasConnection, biasL2weightDecay); } } } return newLayer; } public void setFilterSize(int size) { if (size <= 0) { throw new IllegalArgumentException("The filter size must be greater than 0!"); } this.filterColumns = size; this.filterRows = size; } public void setPaddingSize(int size) { // if (size <= 0) // { // throw new IllegalArgumentException("The padding size must be greater than 0!"); // } this.paddingColumns = size; this.paddingRows = size; } public void setStrideSize(int size) { if (size <= 0) { throw new IllegalArgumentException("The stride size must be greater than 0!"); } this.strideColumns = size; this.strideRows = size; } public void setFeatureMaps(int featureMaps) { if (featureMaps <= 0) { throw new IllegalArgumentException("The number of featureMaps must be greater than 0!"); } this.featureMaps = featureMaps; } public TransferFunctionType getTransferFunctionType() { return transferFunctionType; } public ConvolutionalLayerBuilder setTransferFunctionType(TransferFunctionType transferFunctionType) { if (transferFunctionType == null) { throw new IllegalArgumentException("transferFunctionType must be not null!"); } this.transferFunctionType = transferFunctionType; return this; } public ActivationType getActivationType() { return activationType; } public ConvolutionalLayerBuilder setActivationType(ActivationType activationType) { if (activationType == null) { throw new IllegalArgumentException("activationType must be not null!"); } this.activationType = activationType; return this; } @Override public void setLearningRate(float learningRate) { if (learningRate <= 0) { throw new IllegalArgumentException("The learning rate must be greater than 0!"); } this.learningRate = learningRate; } @Override public void setMomentum(float momentum) { if (momentum < 0) { throw new IllegalArgumentException("The momentum must be equals or greater than 0!"); } this.momentum = momentum; } @Override public void setL1weightDecay(float l1weightDecay) { if (l1weightDecay < 0) { throw new IllegalArgumentException("The l1weightDecay must be equals or greater than 0!"); } this.l1weightDecay = l1weightDecay; } @Override public void setL2weightDecay(float l2weightDecay) { if (l2weightDecay < 0) { throw new IllegalArgumentException("The l2weightDecay must be equals or greater than 0!"); } this.l2weightDecay = l2weightDecay; } @Override public void setDropoutRate(float dropoutRate) { if (dropoutRate < 0) { throw new IllegalArgumentException("The dropoutRate must be equals or greater than 0!"); } this.dropoutRate = dropoutRate; } @Override public boolean isAddBias() { return this.addBias; } @Override public void setAddBias(boolean addBias) { this.addBias = addBias; } @Override public RandomInitializer getWeightInitializer() { return weightInitializer; } @Override public void setWeightInitializer(RandomInitializer weightInitializer) { this.weightInitializer = weightInitializer; } @Override public RandomInitializer getBiasWeightInitializer() { return biasWeightInitializer; } @Override public void setBiasWeightInitializer(RandomInitializer biasWeightInitializer) { this.biasWeightInitializer = biasWeightInitializer; } public float getBiasLearningRate() { return biasLearningRate; } public void setBiasLearningRate(float biasLearningRate) { if (biasLearningRate <= 0) { throw new IllegalArgumentException("The biasLearningRate must be greater than 0!"); } this.biasLearningRate = biasLearningRate; } public float getBiasMomentum() { return biasMomentum; } public void setBiasMomentum(float biasMomentum) { if (biasMomentum < 0) { throw new IllegalArgumentException("The biasMomentum must be equals or greater than 0!"); } this.biasMomentum = biasMomentum; } public float getBiasL1weightDecay() { return biasL1weightDecay; } public void setBiasL1weightDecay(float biasL1weightDecay) { if (biasL1weightDecay < 0) { throw new IllegalArgumentException("The biasL1weightDecay must be equals or greater than 0!"); } this.biasL1weightDecay = biasL1weightDecay; } public float getBiasL2weightDecay() { return biasL2weightDecay; } public void setBiasL2weightDecay(float biasL2weightDecay) { if (biasL2weightDecay < 0) { throw new IllegalArgumentException("The biasL2weightDecay must be equals or greater than 0!"); } this.biasL2weightDecay = biasL2weightDecay; } public void setStrideRows(int strideRows) { if (strideRows <= 0) { throw new IllegalArgumentException("The strideRows must be greater than 0!"); } this.strideRows = strideRows; } public void setStrideColumns(int strideColumns) { if (strideColumns <= 0) { throw new IllegalArgumentException("The strideColumns must be greater than 0!"); } this.strideColumns = strideColumns; } public void setPaddingRows(int paddingRows) { if (paddingRows < 0) { throw new IllegalArgumentException("The paddingRows must be greater othan r equals 0!"); } this.paddingRows = paddingRows; } public void setPaddingColumns(int paddingColumns) { if (paddingColumns < 0) { throw new IllegalArgumentException("The paddingColumns must be greater than 0!"); } this.paddingColumns = paddingColumns; } public void setFilterRows(int filterRows) { if (filterRows <= 0) { throw new IllegalArgumentException("The filterRows must be greater than 0!"); } this.filterRows = filterRows; } public void setFilterColumns(int filterColumns) { if (filterColumns <= 0) { throw new IllegalArgumentException("The filterColumns must be greater than 0!"); } this.filterColumns = filterColumns; } @Override public String toString() { return "ConvolutionalLayerBuilder{" + "name=" + (getName() != null ? getName() : DEFAULT_LAYER_NAME) + ", inputLayer=" + getInputLayerName() + ", strideRows=" + strideRows + ", strideColumns=" + strideColumns + ", paddingRows=" + paddingRows + ", paddingColumns=" + paddingColumns + ", filterRows=" + filterRows + ", filterColumns=" + filterColumns + ", featureMaps=" + featureMaps + ", activationType=" + activationType + ", transferFunctionType=" + transferFunctionType + ", learningRate=" + learningRate + ", momentum=" + momentum + ", l1weightDecay=" + l1weightDecay + ", l2weightDecay=" + l2weightDecay + ", dropoutRate=" + dropoutRate + ", addBias=" + addBias + ", biasLearningRate=" + biasLearningRate + ", biasMomentum=" + biasMomentum + ", biasL1weightDecay=" + biasL1weightDecay + ", biasL2weightDecay=" + biasL2weightDecay + ", weightInitializer=" + weightInitializer + ", biasWeightInitializer=" + biasWeightInitializer + '}'; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.management.internal.cli.commands; import static org.apache.geode.distributed.ConfigurationProperties.STATISTIC_SAMPLING_ENABLED; import org.apache.commons.lang.StringUtils; import org.apache.geode.SystemFailure; import org.apache.geode.cache.execute.FunctionInvocationTargetException; import org.apache.geode.cache.execute.ResultCollector; import org.apache.geode.distributed.DistributedMember; import org.apache.geode.internal.cache.xmlcache.CacheXml; import org.apache.geode.internal.logging.LogService; import org.apache.geode.internal.logging.log4j.LogLevel; import org.apache.geode.management.cli.CliMetaData; import org.apache.geode.management.cli.ConverterHint; import org.apache.geode.management.cli.Result; import org.apache.geode.management.internal.cli.AbstractCliAroundInterceptor; import org.apache.geode.management.internal.cli.CliUtil; import org.apache.geode.management.internal.cli.GfshParseResult; import org.apache.geode.management.internal.cli.domain.MemberConfigurationInfo; import org.apache.geode.management.internal.cli.functions.AlterRuntimeConfigFunction; import org.apache.geode.management.internal.cli.functions.CliFunctionResult; import org.apache.geode.management.internal.cli.functions.ExportConfigFunction; import org.apache.geode.management.internal.cli.functions.GetMemberConfigInformationFunction; import org.apache.geode.management.internal.cli.i18n.CliStrings; import org.apache.geode.management.internal.cli.result.CommandResultException; import org.apache.geode.management.internal.cli.result.CompositeResultData; import org.apache.geode.management.internal.cli.result.CompositeResultData.SectionResultData; import org.apache.geode.management.internal.cli.result.ErrorResultData; import org.apache.geode.management.internal.cli.result.InfoResultData; import org.apache.geode.management.internal.cli.result.ResultBuilder; import org.apache.geode.management.internal.cli.result.TabularResultData; import org.apache.geode.management.internal.cli.shell.Gfsh; import org.apache.geode.management.internal.configuration.domain.XmlEntity; import org.apache.geode.management.internal.security.ResourceOperation; import org.apache.geode.security.ResourcePermission.Operation; import org.apache.geode.security.ResourcePermission.Resource; import org.apache.logging.log4j.Logger; import org.springframework.shell.core.annotation.CliAvailabilityIndicator; import org.springframework.shell.core.annotation.CliCommand; import org.springframework.shell.core.annotation.CliOption; import java.io.File; import java.io.IOException; import java.nio.file.Path; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.TreeSet; /**** * @since GemFire 7.0 * */ public class ConfigCommands implements GfshCommand { private final ExportConfigFunction exportConfigFunction = new ExportConfigFunction(); private final GetMemberConfigInformationFunction getMemberConfigFunction = new GetMemberConfigInformationFunction(); private final AlterRuntimeConfigFunction alterRunTimeConfigFunction = new AlterRuntimeConfigFunction(); private static Logger logger = LogService.getLogger(); @CliCommand(value = {CliStrings.DESCRIBE_CONFIG}, help = CliStrings.DESCRIBE_CONFIG__HELP) @CliMetaData(relatedTopic = {CliStrings.TOPIC_GEODE_CONFIG}) @ResourceOperation(resource = Resource.CLUSTER, operation = Operation.READ) public Result describeConfig( @CliOption(key = CliStrings.MEMBER, optionContext = ConverterHint.ALL_MEMBER_IDNAME, help = CliStrings.DESCRIBE_CONFIG__MEMBER__HELP, mandatory = true) String memberNameOrId, @CliOption(key = CliStrings.DESCRIBE_CONFIG__HIDE__DEFAULTS, help = CliStrings.DESCRIBE_CONFIG__HIDE__DEFAULTS__HELP, unspecifiedDefaultValue = "true", specifiedDefaultValue = "true") boolean hideDefaults) { Result result = null; try { DistributedMember targetMember = null; if (memberNameOrId != null && !memberNameOrId.isEmpty()) { targetMember = CliUtil.getDistributedMemberByNameOrId(memberNameOrId); } if (targetMember != null) { ResultCollector<?, ?> rc = CliUtil.executeFunction(getMemberConfigFunction, new Boolean(hideDefaults), targetMember); ArrayList<?> output = (ArrayList<?>) rc.getResult(); Object obj = output.get(0); if (obj != null && obj instanceof MemberConfigurationInfo) { MemberConfigurationInfo memberConfigInfo = (MemberConfigurationInfo) obj; CompositeResultData crd = ResultBuilder.createCompositeResultData(); crd.setHeader( CliStrings.format(CliStrings.DESCRIBE_CONFIG__HEADER__TEXT, memberNameOrId)); List<String> jvmArgsList = memberConfigInfo.getJvmInputArguments(); TabularResultData jvmInputArgs = crd.addSection().addSection().addTable(); for (String jvmArg : jvmArgsList) { jvmInputArgs.accumulate("JVM command line arguments", jvmArg); } addSection(crd, memberConfigInfo.getGfePropsSetUsingApi(), "GemFire properties defined using the API"); addSection(crd, memberConfigInfo.getGfePropsRuntime(), "GemFire properties defined at the runtime"); addSection(crd, memberConfigInfo.getGfePropsSetFromFile(), "GemFire properties defined with the property file"); addSection(crd, memberConfigInfo.getGfePropsSetWithDefaults(), "GemFire properties using default values"); addSection(crd, memberConfigInfo.getCacheAttributes(), "Cache attributes"); List<Map<String, String>> cacheServerAttributesList = memberConfigInfo.getCacheServerAttributes(); if (cacheServerAttributesList != null && !cacheServerAttributesList.isEmpty()) { SectionResultData cacheServerSection = crd.addSection(); cacheServerSection.setHeader("Cache-server attributes"); for (Map<String, String> cacheServerAttributes : cacheServerAttributesList) { addSubSection(cacheServerSection, cacheServerAttributes, ""); } } result = ResultBuilder.buildResult(crd); } } else { ErrorResultData erd = ResultBuilder.createErrorResultData(); erd.addLine(CliStrings.format(CliStrings.DESCRIBE_CONFIG__MEMBER__NOT__FOUND, new Object[] {memberNameOrId})); result = ResultBuilder.buildResult(erd); } } catch (FunctionInvocationTargetException e) { result = ResultBuilder.createGemFireErrorResult(CliStrings .format(CliStrings.COULD_NOT_EXECUTE_COMMAND_TRY_AGAIN, CliStrings.DESCRIBE_CONFIG)); } catch (Exception e) { ErrorResultData erd = ResultBuilder.createErrorResultData(); erd.addLine(e.getMessage()); result = ResultBuilder.buildResult(erd); } return result; } private void addSection(CompositeResultData crd, Map<String, String> attrMap, String headerText) { if (attrMap != null && !attrMap.isEmpty()) { SectionResultData section = crd.addSection(); section.setHeader(headerText); section.addSeparator('.'); Set<String> attributes = new TreeSet<>(attrMap.keySet()); for (String attribute : attributes) { String attributeValue = attrMap.get(attribute); section.addData(attribute, attributeValue); } } } private void addSubSection(SectionResultData section, Map<String, String> attrMap, String headerText) { if (!attrMap.isEmpty()) { SectionResultData subSection = section.addSection(); Set<String> attributes = new TreeSet<>(attrMap.keySet()); subSection.setHeader(headerText); for (String attribute : attributes) { String attributeValue = attrMap.get(attribute); subSection.addData(attribute, attributeValue); } } } /** * Export the cache configuration in XML format. * * @param member Member for which to write the configuration * @param group Group or groups for which to write the configuration * @return Results of the attempt to write the configuration */ @CliCommand(value = {CliStrings.EXPORT_CONFIG}, help = CliStrings.EXPORT_CONFIG__HELP) @CliMetaData( interceptor = "org.apache.geode.management.internal.cli.commands.ConfigCommands$Interceptor", relatedTopic = {CliStrings.TOPIC_GEODE_CONFIG}) @ResourceOperation(resource = Resource.CLUSTER, operation = Operation.READ) public Result exportConfig( @CliOption(key = {CliStrings.MEMBER, CliStrings.MEMBERS}, optionContext = ConverterHint.ALL_MEMBER_IDNAME, help = CliStrings.EXPORT_CONFIG__MEMBER__HELP) String[] member, @CliOption(key = {CliStrings.GROUP, CliStrings.GROUPS}, optionContext = ConverterHint.MEMBERGROUP, help = CliStrings.EXPORT_CONFIG__GROUP__HELP) String[] group, @CliOption(key = {CliStrings.EXPORT_CONFIG__DIR}, help = CliStrings.EXPORT_CONFIG__DIR__HELP) String dir) { InfoResultData infoData = ResultBuilder.createInfoResultData(); Set<DistributedMember> targetMembers = CliUtil.findMembers(group, member); if (targetMembers.isEmpty()) { return ResultBuilder.createUserErrorResult(CliStrings.NO_MEMBERS_FOUND_MESSAGE); } try { ResultCollector<?, ?> rc = CliUtil.executeFunction(this.exportConfigFunction, null, targetMembers); List<CliFunctionResult> results = CliFunctionResult.cleanResults((List<?>) rc.getResult()); for (CliFunctionResult result : results) { if (result.getThrowable() != null) { infoData.addLine(CliStrings.format(CliStrings.EXPORT_CONFIG__MSG__EXCEPTION, result.getMemberIdOrName(), result.getThrowable())); } else if (result.isSuccessful()) { String cacheFileName = result.getMemberIdOrName() + "-cache.xml"; String propsFileName = result.getMemberIdOrName() + "-gf.properties"; String[] fileContent = (String[]) result.getSerializables(); infoData.addAsFile(cacheFileName, fileContent[0], "Downloading Cache XML file: {0}", false); infoData.addAsFile(propsFileName, fileContent[1], "Downloading properties file: {0}", false); } } return ResultBuilder.buildResult(infoData); } catch (VirtualMachineError e) { SystemFailure.initiateFailure(e); throw e; } catch (Throwable th) { SystemFailure.checkFailure(); th.printStackTrace(System.err); return ResultBuilder .createGemFireErrorResult(CliStrings.format(CliStrings.EXPORT_CONFIG__MSG__EXCEPTION, th.getClass().getName() + ": " + th.getMessage())); } } @CliCommand(value = {CliStrings.ALTER_RUNTIME_CONFIG}, help = CliStrings.ALTER_RUNTIME_CONFIG__HELP) @CliMetaData(relatedTopic = {CliStrings.TOPIC_GEODE_CONFIG}, interceptor = "org.apache.geode.management.internal.cli.commands.ConfigCommands$AlterRuntimeInterceptor") @ResourceOperation(resource = Resource.CLUSTER, operation = Operation.MANAGE) public Result alterRuntimeConfig( @CliOption(key = {CliStrings.MEMBER, CliStrings.MEMBERS}, optionContext = ConverterHint.ALL_MEMBER_IDNAME, help = CliStrings.ALTER_RUNTIME_CONFIG__MEMBER__HELP) String[] memberNameOrId, @CliOption(key = {CliStrings.GROUP, CliStrings.GROUPS}, optionContext = ConverterHint.MEMBERGROUP, help = CliStrings.ALTER_RUNTIME_CONFIG__MEMBER__HELP) String[] group, @CliOption(key = {CliStrings.ALTER_RUNTIME_CONFIG__ARCHIVE__DISK__SPACE__LIMIT}, help = CliStrings.ALTER_RUNTIME_CONFIG__ARCHIVE__DISK__SPACE__LIMIT__HELP) Integer archiveDiskSpaceLimit, @CliOption(key = {CliStrings.ALTER_RUNTIME_CONFIG__ARCHIVE__FILE__SIZE__LIMIT}, help = CliStrings.ALTER_RUNTIME_CONFIG__ARCHIVE__FILE__SIZE__LIMIT__HELP) Integer archiveFileSizeLimit, @CliOption(key = {CliStrings.ALTER_RUNTIME_CONFIG__LOG__DISK__SPACE__LIMIT}, help = CliStrings.ALTER_RUNTIME_CONFIG__LOG__DISK__SPACE__LIMIT__HELP) Integer logDiskSpaceLimit, @CliOption(key = {CliStrings.ALTER_RUNTIME_CONFIG__LOG__FILE__SIZE__LIMIT}, help = CliStrings.ALTER_RUNTIME_CONFIG__LOG__FILE__SIZE__LIMIT__HELP) Integer logFileSizeLimit, @CliOption(key = {CliStrings.ALTER_RUNTIME_CONFIG__LOG__LEVEL}, optionContext = ConverterHint.LOG_LEVEL, help = CliStrings.ALTER_RUNTIME_CONFIG__LOG__LEVEL__HELP) String logLevel, @CliOption(key = {CliStrings.ALTER_RUNTIME_CONFIG__STATISTIC__ARCHIVE__FILE}, help = CliStrings.ALTER_RUNTIME_CONFIG__STATISTIC__ARCHIVE__FILE__HELP) String statisticArchiveFile, @CliOption(key = {CliStrings.ALTER_RUNTIME_CONFIG__STATISTIC__SAMPLE__RATE}, help = CliStrings.ALTER_RUNTIME_CONFIG__STATISTIC__SAMPLE__RATE__HELP) Integer statisticSampleRate, @CliOption(key = {CliStrings.ALTER_RUNTIME_CONFIG__STATISTIC__SAMPLING__ENABLED}, help = CliStrings.ALTER_RUNTIME_CONFIG__STATISTIC__SAMPLING__ENABLED__HELP) Boolean statisticSamplingEnabled, @CliOption(key = {CliStrings.ALTER_RUNTIME_CONFIG__COPY__ON__READ}, specifiedDefaultValue = "false", help = CliStrings.ALTER_RUNTIME_CONFIG__COPY__ON__READ__HELP) Boolean setCopyOnRead, @CliOption(key = {CliStrings.ALTER_RUNTIME_CONFIG__LOCK__LEASE}, help = CliStrings.ALTER_RUNTIME_CONFIG__LOCK__LEASE__HELP) Integer lockLease, @CliOption(key = {CliStrings.ALTER_RUNTIME_CONFIG__LOCK__TIMEOUT}, help = CliStrings.ALTER_RUNTIME_CONFIG__LOCK__TIMEOUT__HELP) Integer lockTimeout, @CliOption(key = {CliStrings.ALTER_RUNTIME_CONFIG__MESSAGE__SYNC__INTERVAL}, help = CliStrings.ALTER_RUNTIME_CONFIG__MESSAGE__SYNC__INTERVAL__HELP) Integer messageSyncInterval, @CliOption(key = {CliStrings.ALTER_RUNTIME_CONFIG__SEARCH__TIMEOUT}, help = CliStrings.ALTER_RUNTIME_CONFIG__SEARCH__TIMEOUT__HELP) Integer searchTimeout) { Map<String, String> runTimeDistributionConfigAttributes = new HashMap<>(); Map<String, String> rumTimeCacheAttributes = new HashMap<>(); Set<DistributedMember> targetMembers = CliUtil.findMembers(group, memberNameOrId); if (targetMembers.isEmpty()) { return ResultBuilder.createUserErrorResult(CliStrings.NO_MEMBERS_FOUND_MESSAGE); } if (archiveDiskSpaceLimit != null) { runTimeDistributionConfigAttributes.put( CliStrings.ALTER_RUNTIME_CONFIG__ARCHIVE__DISK__SPACE__LIMIT, archiveDiskSpaceLimit.toString()); } if (archiveFileSizeLimit != null) { runTimeDistributionConfigAttributes.put( CliStrings.ALTER_RUNTIME_CONFIG__ARCHIVE__FILE__SIZE__LIMIT, archiveFileSizeLimit.toString()); } if (logDiskSpaceLimit != null) { runTimeDistributionConfigAttributes.put( CliStrings.ALTER_RUNTIME_CONFIG__LOG__DISK__SPACE__LIMIT, logDiskSpaceLimit.toString()); } if (logFileSizeLimit != null) { runTimeDistributionConfigAttributes.put( CliStrings.ALTER_RUNTIME_CONFIG__LOG__FILE__SIZE__LIMIT, logFileSizeLimit.toString()); } if (logLevel != null && !logLevel.isEmpty()) { runTimeDistributionConfigAttributes.put(CliStrings.ALTER_RUNTIME_CONFIG__LOG__LEVEL, logLevel); } if (statisticArchiveFile != null && !statisticArchiveFile.isEmpty()) { runTimeDistributionConfigAttributes .put(CliStrings.ALTER_RUNTIME_CONFIG__STATISTIC__ARCHIVE__FILE, statisticArchiveFile); } if (statisticSampleRate != null) { runTimeDistributionConfigAttributes.put( CliStrings.ALTER_RUNTIME_CONFIG__STATISTIC__SAMPLE__RATE, statisticSampleRate.toString()); } if (statisticSamplingEnabled != null) { runTimeDistributionConfigAttributes.put(STATISTIC_SAMPLING_ENABLED, statisticSamplingEnabled.toString()); } // Attributes that are set on the cache. if (setCopyOnRead != null) { rumTimeCacheAttributes.put(CliStrings.ALTER_RUNTIME_CONFIG__COPY__ON__READ, setCopyOnRead.toString()); } if (lockLease != null && lockLease > 0 && lockLease < Integer.MAX_VALUE) { rumTimeCacheAttributes.put(CliStrings.ALTER_RUNTIME_CONFIG__LOCK__LEASE, lockLease.toString()); } if (lockTimeout != null && lockTimeout > 0 && lockTimeout < Integer.MAX_VALUE) { rumTimeCacheAttributes.put(CliStrings.ALTER_RUNTIME_CONFIG__LOCK__TIMEOUT, lockTimeout.toString()); } if (messageSyncInterval != null && messageSyncInterval > 0 && messageSyncInterval < Integer.MAX_VALUE) { rumTimeCacheAttributes.put(CliStrings.ALTER_RUNTIME_CONFIG__MESSAGE__SYNC__INTERVAL, messageSyncInterval.toString()); } if (searchTimeout != null && searchTimeout > 0 && searchTimeout < Integer.MAX_VALUE) { rumTimeCacheAttributes.put(CliStrings.ALTER_RUNTIME_CONFIG__SEARCH__TIMEOUT, searchTimeout.toString()); } if (runTimeDistributionConfigAttributes.isEmpty() && rumTimeCacheAttributes.isEmpty()) { return ResultBuilder .createUserErrorResult(CliStrings.ALTER_RUNTIME_CONFIG__RELEVANT__OPTION__MESSAGE); } Map<String, String> allRunTimeAttributes = new HashMap<>(); allRunTimeAttributes.putAll(runTimeDistributionConfigAttributes); allRunTimeAttributes.putAll(rumTimeCacheAttributes); ResultCollector<?, ?> rc = CliUtil.executeFunction(alterRunTimeConfigFunction, allRunTimeAttributes, targetMembers); List<CliFunctionResult> results = CliFunctionResult.cleanResults((List<?>) rc.getResult()); Set<String> successfulMembers = new TreeSet<>(); Set<String> errorMessages = new TreeSet<>(); for (CliFunctionResult result : results) { if (result.getThrowable() != null) { logger.info("Function failed: " + result.getThrowable()); errorMessages.add(result.getThrowable().getMessage()); } else { successfulMembers.add(result.getMemberIdOrName()); } } final String lineSeparator = System.getProperty("line.separator"); if (!successfulMembers.isEmpty()) { StringBuilder successMessageBuilder = new StringBuilder(); successMessageBuilder.append(CliStrings.ALTER_RUNTIME_CONFIG__SUCCESS__MESSAGE); successMessageBuilder.append(lineSeparator); for (String member : successfulMembers) { successMessageBuilder.append(member); successMessageBuilder.append(lineSeparator); } Properties properties = new Properties(); properties.putAll(runTimeDistributionConfigAttributes); Result result = ResultBuilder.createInfoResult(successMessageBuilder.toString()); // Set the Cache attributes to be modified final XmlEntity xmlEntity = XmlEntity.builder().withType(CacheXml.CACHE) .withAttributes(rumTimeCacheAttributes).build(); persistClusterConfiguration(result, () -> getSharedConfiguration().modifyXmlAndProperties(properties, xmlEntity, group)); return result; } else { StringBuilder errorMessageBuilder = new StringBuilder(); errorMessageBuilder.append("Following errors occurred while altering runtime config"); errorMessageBuilder.append(lineSeparator); for (String errorMessage : errorMessages) { errorMessageBuilder.append(errorMessage); errorMessageBuilder.append(lineSeparator); } return ResultBuilder.createUserErrorResult(errorMessageBuilder.toString()); } } public static class AlterRuntimeInterceptor extends AbstractCliAroundInterceptor { @Override public Result preExecution(GfshParseResult parseResult) { Map<String, String> arguments = parseResult.getParamValueStrings(); // validate log level String logLevel = arguments.get("log-level"); if (StringUtils.isNotBlank(logLevel) && (LogLevel.getLevel(logLevel) == null)) { return ResultBuilder.createUserErrorResult("Invalid log level: " + logLevel); } return ResultBuilder.createInfoResult(""); } } /** * Interceptor used by gfsh to intercept execution of export config command at "shell". */ public static class Interceptor extends AbstractCliAroundInterceptor { private String saveDirString; @Override public Result preExecution(GfshParseResult parseResult) { Map<String, String> paramValueMap = parseResult.getParamValueStrings(); String dir = paramValueMap.get("dir"); dir = (dir == null) ? null : dir.trim(); File saveDirFile = new File("."); if (dir != null && !dir.isEmpty()) { saveDirFile = new File(dir); if (saveDirFile.exists()) { if (!saveDirFile.isDirectory()) return ResultBuilder.createGemFireErrorResult( CliStrings.format(CliStrings.EXPORT_CONFIG__MSG__NOT_A_DIRECTORY, dir)); } else if (!saveDirFile.mkdirs()) { return ResultBuilder.createGemFireErrorResult( CliStrings.format(CliStrings.EXPORT_CONFIG__MSG__CANNOT_CREATE_DIR, dir)); } } try { if (!saveDirFile.canWrite()) { return ResultBuilder.createGemFireErrorResult(CliStrings.format( CliStrings.EXPORT_CONFIG__MSG__NOT_WRITEABLE, saveDirFile.getCanonicalPath())); } } catch (IOException ioex) { return ResultBuilder.createGemFireErrorResult( CliStrings.format(CliStrings.EXPORT_CONFIG__MSG__NOT_WRITEABLE, saveDirFile.getName())); } saveDirString = saveDirFile.getAbsolutePath(); return ResultBuilder.createInfoResult("OK"); } @Override public Result postExecution(GfshParseResult parseResult, Result commandResult, Path tempFile) { if (commandResult.hasIncomingFiles()) { try { commandResult.saveIncomingFiles(saveDirString); } catch (IOException ioex) { Gfsh.getCurrentInstance().logSevere("Unable to export config", ioex); } } return commandResult; } } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.rest; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.util.Collection; import java.util.TreeSet; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.util.Bytes; /** * Parses a path based row/column/timestamp specification into its component * elements. * <p> * */ @InterfaceAudience.Private public class RowSpec { public static final long DEFAULT_START_TIMESTAMP = 0; public static final long DEFAULT_END_TIMESTAMP = Long.MAX_VALUE; private byte[] row = HConstants.EMPTY_START_ROW; private byte[] endRow = null; private TreeSet<byte[]> columns = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR); private long startTime = DEFAULT_START_TIMESTAMP; private long endTime = DEFAULT_END_TIMESTAMP; private int maxVersions = HColumnDescriptor.DEFAULT_VERSIONS; private int maxValues = Integer.MAX_VALUE; public RowSpec(String path) throws IllegalArgumentException { int i = 0; while (path.charAt(i) == '/') { i++; } i = parseRowKeys(path, i); i = parseColumns(path, i); i = parseTimestamp(path, i); i = parseQueryParams(path, i); } private int parseRowKeys(final String path, int i) throws IllegalArgumentException { String startRow = null, endRow = null; try { StringBuilder sb = new StringBuilder(); char c; while (i < path.length() && (c = path.charAt(i)) != '/') { sb.append(c); i++; } i++; String row = startRow = sb.toString(); int idx = startRow.indexOf(','); if (idx != -1) { startRow = URLDecoder.decode(row.substring(0, idx), HConstants.UTF8_ENCODING); endRow = URLDecoder.decode(row.substring(idx + 1), HConstants.UTF8_ENCODING); } else { startRow = URLDecoder.decode(row, HConstants.UTF8_ENCODING); } } catch (IndexOutOfBoundsException e) { throw new IllegalArgumentException(e); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } // HBase does not support wildcards on row keys so we will emulate a // suffix glob by synthesizing appropriate start and end row keys for // table scanning if (startRow.charAt(startRow.length() - 1) == '*') { if (endRow != null) throw new IllegalArgumentException("invalid path: start row "+ "specified with wildcard"); this.row = Bytes.toBytes(startRow.substring(0, startRow.lastIndexOf("*"))); this.endRow = new byte[this.row.length + 1]; System.arraycopy(this.row, 0, this.endRow, 0, this.row.length); this.endRow[this.row.length] = (byte)255; } else { this.row = Bytes.toBytes(startRow.toString()); if (endRow != null) { this.endRow = Bytes.toBytes(endRow.toString()); } } return i; } private int parseColumns(final String path, int i) throws IllegalArgumentException { if (i >= path.length()) { return i; } try { char c; StringBuilder column = new StringBuilder(); while (i < path.length() && (c = path.charAt(i)) != '/') { if (c == ',') { if (column.length() < 1) { throw new IllegalArgumentException("invalid path"); } String s = URLDecoder.decode(column.toString(), HConstants.UTF8_ENCODING); if (!s.contains(":")) { this.columns.add(Bytes.toBytes(s + ":")); } else { this.columns.add(Bytes.toBytes(s)); } column.setLength(0); i++; continue; } column.append(c); i++; } i++; // trailing list entry if (column.length() > 1) { String s = URLDecoder.decode(column.toString(), HConstants.UTF8_ENCODING); if (!s.contains(":")) { this.columns.add(Bytes.toBytes(s + ":")); } else { this.columns.add(Bytes.toBytes(s)); } } } catch (IndexOutOfBoundsException e) { throw new IllegalArgumentException(e); } catch (UnsupportedEncodingException e) { // shouldn't happen throw new RuntimeException(e); } return i; } private int parseTimestamp(final String path, int i) throws IllegalArgumentException { if (i >= path.length()) { return i; } long time0 = 0, time1 = 0; try { char c = 0; StringBuilder stamp = new StringBuilder(); while (i < path.length()) { c = path.charAt(i); if (c == '/' || c == ',') { break; } stamp.append(c); i++; } try { time0 = Long.valueOf(URLDecoder.decode(stamp.toString(), HConstants.UTF8_ENCODING)); } catch (NumberFormatException e) { throw new IllegalArgumentException(e); } if (c == ',') { stamp = new StringBuilder(); i++; while (i < path.length() && ((c = path.charAt(i)) != '/')) { stamp.append(c); i++; } try { time1 = Long.valueOf(URLDecoder.decode(stamp.toString(), HConstants.UTF8_ENCODING)); } catch (NumberFormatException e) { throw new IllegalArgumentException(e); } } if (c == '/') { i++; } } catch (IndexOutOfBoundsException e) { throw new IllegalArgumentException(e); } catch (UnsupportedEncodingException e) { // shouldn't happen throw new RuntimeException(e); } if (time1 != 0) { startTime = time0; endTime = time1; } else { endTime = time0; } return i; } private int parseQueryParams(final String path, int i) { if (i >= path.length()) { return i; } StringBuilder query = new StringBuilder(); try { query.append(URLDecoder.decode(path.substring(i), HConstants.UTF8_ENCODING)); } catch (UnsupportedEncodingException e) { // should not happen throw new RuntimeException(e); } i += query.length(); int j = 0; while (j < query.length()) { char c = query.charAt(j); if (c != '?' && c != '&') { break; } if (++j > query.length()) { throw new IllegalArgumentException("malformed query parameter"); } char what = query.charAt(j); if (++j > query.length()) { break; } c = query.charAt(j); if (c != '=') { throw new IllegalArgumentException("malformed query parameter"); } if (++j > query.length()) { break; } switch (what) { case 'm': { StringBuilder sb = new StringBuilder(); while (j <= query.length()) { c = query.charAt(i); if (c < '0' || c > '9') { j--; break; } sb.append(c); } maxVersions = Integer.valueOf(sb.toString()); } break; case 'n': { StringBuilder sb = new StringBuilder(); while (j <= query.length()) { c = query.charAt(i); if (c < '0' || c > '9') { j--; break; } sb.append(c); } maxValues = Integer.valueOf(sb.toString()); } break; default: throw new IllegalArgumentException("unknown parameter '" + c + "'"); } } return i; } public RowSpec(byte[] startRow, byte[] endRow, byte[][] columns, long startTime, long endTime, int maxVersions) { this.row = startRow; this.endRow = endRow; if (columns != null) { for (byte[] col: columns) { this.columns.add(col); } } this.startTime = startTime; this.endTime = endTime; this.maxVersions = maxVersions; } public RowSpec(byte[] startRow, byte[] endRow, Collection<byte[]> columns, long startTime, long endTime, int maxVersions) { this.row = startRow; this.endRow = endRow; if (columns != null) { this.columns.addAll(columns); } this.startTime = startTime; this.endTime = endTime; this.maxVersions = maxVersions; } public boolean isSingleRow() { return endRow == null; } public int getMaxVersions() { return maxVersions; } public void setMaxVersions(final int maxVersions) { this.maxVersions = maxVersions; } public int getMaxValues() { return maxValues; } public void setMaxValues(final int maxValues) { this.maxValues = maxValues; } public boolean hasColumns() { return !columns.isEmpty(); } public byte[] getRow() { return row; } public byte[] getStartRow() { return row; } public boolean hasEndRow() { return endRow != null; } public byte[] getEndRow() { return endRow; } public void addColumn(final byte[] column) { columns.add(column); } public byte[][] getColumns() { return columns.toArray(new byte[columns.size()][]); } public boolean hasTimestamp() { return (startTime == 0) && (endTime != Long.MAX_VALUE); } public long getTimestamp() { return endTime; } public long getStartTime() { return startTime; } public void setStartTime(final long startTime) { this.startTime = startTime; } public long getEndTime() { return endTime; } public void setEndTime(long endTime) { this.endTime = endTime; } public String toString() { StringBuilder result = new StringBuilder(); result.append("{startRow => '"); if (row != null) { result.append(Bytes.toString(row)); } result.append("', endRow => '"); if (endRow != null) { result.append(Bytes.toString(endRow)); } result.append("', columns => ["); for (byte[] col: columns) { result.append(" '"); result.append(Bytes.toString(col)); result.append("'"); } result.append(" ], startTime => "); result.append(Long.toString(startTime)); result.append(", endTime => "); result.append(Long.toString(endTime)); result.append(", maxVersions => "); result.append(Integer.toString(maxVersions)); result.append(", maxValues => "); result.append(Integer.toString(maxValues)); result.append("}"); return result.toString(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.store.hive; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import io.netty.buffer.DrillBuf; import org.apache.drill.common.exceptions.DrillRuntimeException; import org.apache.drill.common.exceptions.ExecutionSetupException; import org.apache.drill.common.exceptions.UserException; import org.apache.drill.common.types.TypeProtos; import org.apache.drill.common.types.TypeProtos.DataMode; import org.apache.drill.common.types.TypeProtos.MajorType; import org.apache.drill.common.types.TypeProtos.MinorType; import org.apache.drill.exec.expr.holders.Decimal18Holder; import org.apache.drill.exec.expr.holders.Decimal28SparseHolder; import org.apache.drill.exec.expr.holders.Decimal38SparseHolder; import org.apache.drill.exec.expr.holders.Decimal9Holder; import org.apache.drill.exec.planner.physical.PlannerSettings; import org.apache.drill.exec.server.options.OptionManager; import org.apache.drill.exec.util.DecimalUtility; import org.apache.drill.exec.vector.NullableBigIntVector; import org.apache.drill.exec.vector.NullableBitVector; import org.apache.drill.exec.vector.NullableDateVector; import org.apache.drill.exec.vector.NullableDecimal18Vector; import org.apache.drill.exec.vector.NullableDecimal28SparseVector; import org.apache.drill.exec.vector.NullableDecimal38SparseVector; import org.apache.drill.exec.vector.NullableDecimal9Vector; import org.apache.drill.exec.vector.NullableFloat4Vector; import org.apache.drill.exec.vector.NullableFloat8Vector; import org.apache.drill.exec.vector.NullableIntVector; import org.apache.drill.exec.vector.NullableTimeStampVector; import org.apache.drill.exec.vector.NullableVarBinaryVector; import org.apache.drill.exec.vector.NullableVarCharVector; import org.apache.drill.exec.vector.ValueVector; import org.apache.drill.exec.work.ExecErrorConstants; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler; import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.JobConf; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import java.math.BigDecimal; import java.sql.Date; import java.sql.Timestamp; import java.util.Map; import java.util.Properties; import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE; public class HiveUtilities { static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(HiveUtilities.class); /** Partition value is received in string format. Convert it into appropriate object based on the type. */ public static Object convertPartitionType(TypeInfo typeInfo, String value, final String defaultPartitionValue) { if (typeInfo.getCategory() != Category.PRIMITIVE) { // In Hive only primitive types are allowed as partition column types. throw new DrillRuntimeException("Non-Primitive types are not allowed as partition column type in Hive, " + "but received one: " + typeInfo.getCategory()); } if (defaultPartitionValue.equals(value)) { return null; } final PrimitiveCategory pCat = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory(); try { switch (pCat) { case BINARY: return value.getBytes(); case BOOLEAN: return Boolean.parseBoolean(value); case DECIMAL: { DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; return HiveDecimalUtils.enforcePrecisionScale(HiveDecimal.create(value), decimalTypeInfo.precision(), decimalTypeInfo.scale()); } case DOUBLE: return Double.parseDouble(value); case FLOAT: return Float.parseFloat(value); case BYTE: case SHORT: case INT: return Integer.parseInt(value); case LONG: return Long.parseLong(value); case STRING: case VARCHAR: return value.getBytes(); case TIMESTAMP: return Timestamp.valueOf(value); case DATE: return Date.valueOf(value); } } catch(final Exception e) { // In Hive, partition values that can't be converted from string are considered to be NULL. logger.trace("Failed to interpret '{}' value from partition value string '{}'", pCat, value); return null; } throwUnsupportedHiveDataTypeError(pCat.toString()); return null; } public static void populateVector(final ValueVector vector, final DrillBuf managedBuffer, final Object val, final int start, final int end) { TypeProtos.MinorType type = vector.getField().getType().getMinorType(); switch(type) { case VARBINARY: { NullableVarBinaryVector v = (NullableVarBinaryVector) vector; byte[] value = (byte[]) val; for (int i = start; i < end; i++) { v.getMutator().setSafe(i, value, 0, value.length); } break; } case BIT: { NullableBitVector v = (NullableBitVector) vector; Boolean value = (Boolean) val; for (int i = start; i < end; i++) { v.getMutator().set(i, value ? 1 : 0); } break; } case FLOAT8: { NullableFloat8Vector v = (NullableFloat8Vector) vector; double value = (double) val; for (int i = start; i < end; i++) { v.getMutator().setSafe(i, value); } break; } case FLOAT4: { NullableFloat4Vector v = (NullableFloat4Vector) vector; float value = (float) val; for (int i = start; i < end; i++) { v.getMutator().setSafe(i, value); } break; } case TINYINT: case SMALLINT: case INT: { NullableIntVector v = (NullableIntVector) vector; int value = (int) val; for (int i = start; i < end; i++) { v.getMutator().setSafe(i, value); } break; } case BIGINT: { NullableBigIntVector v = (NullableBigIntVector) vector; long value = (long) val; for (int i = start; i < end; i++) { v.getMutator().setSafe(i, value); } break; } case VARCHAR: { NullableVarCharVector v = (NullableVarCharVector) vector; byte[] value = (byte[]) val; for (int i = start; i < end; i++) { v.getMutator().setSafe(i, value, 0, value.length); } break; } case TIMESTAMP: { NullableTimeStampVector v = (NullableTimeStampVector) vector; DateTime ts = new DateTime(((Timestamp) val).getTime()).withZoneRetainFields(DateTimeZone.UTC); long value = ts.getMillis(); for (int i = start; i < end; i++) { v.getMutator().setSafe(i, value); } break; } case DATE: { NullableDateVector v = (NullableDateVector) vector; DateTime date = new DateTime(((Date)val).getTime()).withZoneRetainFields(DateTimeZone.UTC); long value = date.getMillis(); for (int i = start; i < end; i++) { v.getMutator().setSafe(i, value); } break; } case DECIMAL9: { final BigDecimal value = ((HiveDecimal)val).bigDecimalValue(); final NullableDecimal9Vector v = ((NullableDecimal9Vector) vector); final Decimal9Holder holder = new Decimal9Holder(); holder.scale = v.getField().getScale(); holder.precision = v.getField().getPrecision(); holder.value = DecimalUtility.getDecimal9FromBigDecimal(value, holder.scale, holder.precision); for (int i = start; i < end; i++) { v.getMutator().setSafe(i, holder); } break; } case DECIMAL18: { final BigDecimal value = ((HiveDecimal)val).bigDecimalValue(); final NullableDecimal18Vector v = ((NullableDecimal18Vector) vector); final Decimal18Holder holder = new Decimal18Holder(); holder.scale = v.getField().getScale(); holder.precision = v.getField().getPrecision(); holder.value = DecimalUtility.getDecimal18FromBigDecimal(value, holder.scale, holder.precision); for (int i = start; i < end; i++) { v.getMutator().setSafe(i, holder); } break; } case DECIMAL28SPARSE: { final int needSpace = Decimal28SparseHolder.nDecimalDigits * DecimalUtility.INTEGER_SIZE; Preconditions.checkArgument(managedBuffer.capacity() > needSpace, String.format("Not sufficient space in given managed buffer. Need %d bytes, buffer has %d bytes", needSpace, managedBuffer.capacity())); final BigDecimal value = ((HiveDecimal)val).bigDecimalValue(); final NullableDecimal28SparseVector v = ((NullableDecimal28SparseVector) vector); final Decimal28SparseHolder holder = new Decimal28SparseHolder(); holder.scale = v.getField().getScale(); holder.precision = v.getField().getPrecision(); holder.buffer = managedBuffer; holder.start = 0; DecimalUtility.getSparseFromBigDecimal(value, holder.buffer, 0, holder.scale, holder.precision, Decimal28SparseHolder.nDecimalDigits); for (int i = start; i < end; i++) { v.getMutator().setSafe(i, holder); } break; } case DECIMAL38SPARSE: { final int needSpace = Decimal38SparseHolder.nDecimalDigits * DecimalUtility.INTEGER_SIZE; Preconditions.checkArgument(managedBuffer.capacity() > needSpace, String.format("Not sufficient space in given managed buffer. Need %d bytes, buffer has %d bytes", needSpace, managedBuffer.capacity())); final BigDecimal value = ((HiveDecimal)val).bigDecimalValue(); final NullableDecimal38SparseVector v = ((NullableDecimal38SparseVector) vector); final Decimal38SparseHolder holder = new Decimal38SparseHolder(); holder.scale = v.getField().getScale(); holder.precision = v.getField().getPrecision(); holder.buffer = managedBuffer; holder.start = 0; DecimalUtility.getSparseFromBigDecimal(value, holder.buffer, 0, holder.scale, holder.precision, Decimal38SparseHolder.nDecimalDigits); for (int i = start; i < end; i++) { v.getMutator().setSafe(i, holder); } break; } } } public static MajorType getMajorTypeFromHiveTypeInfo(final TypeInfo typeInfo, final OptionManager options) { switch (typeInfo.getCategory()) { case PRIMITIVE: { PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo; MinorType minorType = HiveUtilities.getMinorTypeFromHivePrimitiveTypeInfo(primitiveTypeInfo, options); MajorType.Builder typeBuilder = MajorType.newBuilder().setMinorType(minorType) .setMode(DataMode.OPTIONAL); // Hive columns (both regular and partition) could have null values if (primitiveTypeInfo.getPrimitiveCategory() == PrimitiveCategory.DECIMAL) { DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo; typeBuilder.setPrecision(decimalTypeInfo.precision()) .setScale(decimalTypeInfo.scale()).build(); } return typeBuilder.build(); } case LIST: case MAP: case STRUCT: case UNION: default: throwUnsupportedHiveDataTypeError(typeInfo.getCategory().toString()); } return null; } public static TypeProtos.MinorType getMinorTypeFromHivePrimitiveTypeInfo(PrimitiveTypeInfo primitiveTypeInfo, OptionManager options) { switch(primitiveTypeInfo.getPrimitiveCategory()) { case BINARY: return TypeProtos.MinorType.VARBINARY; case BOOLEAN: return TypeProtos.MinorType.BIT; case DECIMAL: { if (options.getOption(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY).bool_val == false) { throw UserException.unsupportedError() .message(ExecErrorConstants.DECIMAL_DISABLE_ERR_MSG) .build(logger); } DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo; return DecimalUtility.getDecimalDataType(decimalTypeInfo.precision()); } case DOUBLE: return TypeProtos.MinorType.FLOAT8; case FLOAT: return TypeProtos.MinorType.FLOAT4; // TODO (DRILL-2470) // Byte and short (tinyint and smallint in SQL types) are currently read as integers // as these smaller integer types are not fully supported in Drill today. case SHORT: case BYTE: case INT: return TypeProtos.MinorType.INT; case LONG: return TypeProtos.MinorType.BIGINT; case STRING: case VARCHAR: return TypeProtos.MinorType.VARCHAR; case TIMESTAMP: return TypeProtos.MinorType.TIMESTAMP; case DATE: return TypeProtos.MinorType.DATE; } throwUnsupportedHiveDataTypeError(primitiveTypeInfo.getPrimitiveCategory().toString()); return null; } /** * Utility method which gets table or partition {@link InputFormat} class. First it * tries to get the class name from given StorageDescriptor object. If it doesn't contain it tries to get it from * StorageHandler class set in table properties. If not found throws an exception. * @param job {@link JobConf} instance needed incase the table is StorageHandler based table. * @param sd {@link StorageDescriptor} instance of currently reading partition or table (for non-partitioned tables). * @param table Table object * @throws Exception */ public static Class<? extends InputFormat> getInputFormatClass(final JobConf job, final StorageDescriptor sd, final Table table) throws Exception { final String inputFormatName = sd.getInputFormat(); if (Strings.isNullOrEmpty(inputFormatName)) { final String storageHandlerClass = table.getParameters().get(META_TABLE_STORAGE); if (Strings.isNullOrEmpty(storageHandlerClass)) { throw new ExecutionSetupException("Unable to get Hive table InputFormat class. There is neither " + "InputFormat class explicitly specified nor StorageHandler class"); } final HiveStorageHandler storageHandler = HiveUtils.getStorageHandler(job, storageHandlerClass); return storageHandler.getInputFormatClass(); } else { return (Class<? extends InputFormat>) Class.forName(inputFormatName); } } /** * Utility method which adds give configs to {@link JobConf} object. * * @param job {@link JobConf} instance. * @param properties New config properties * @param hiveConf HiveConf of Hive storage plugin */ public static void addConfToJob(final JobConf job, final Properties properties) { for (Object obj : properties.keySet()) { job.set((String) obj, (String) properties.get(obj)); } } /** * Wrapper around {@link MetaStoreUtils#getPartitionMetadata(Partition, Table)} which also adds parameters from table * to properties returned by {@link MetaStoreUtils#getPartitionMetadata(Partition, Table)}. * * @param partition * @param table * @return */ public static Properties getPartitionMetadata(final Partition partition, final Table table) { final Properties properties = MetaStoreUtils.getPartitionMetadata(partition, table); // SerDe expects properties from Table, but above call doesn't add Table properties. // Include Table properties in final list in order to not to break SerDes that depend on // Table properties. For example AvroSerDe gets the schema from properties (passed as second argument) for (Map.Entry<String, String> entry : table.getParameters().entrySet()) { if (entry.getKey() != null && entry.getKey() != null) { properties.put(entry.getKey(), entry.getValue()); } } return properties; } public static void throwUnsupportedHiveDataTypeError(String unsupportedType) { StringBuilder errMsg = new StringBuilder(); errMsg.append(String.format("Unsupported Hive data type %s. ", unsupportedType)); errMsg.append(System.getProperty("line.separator")); errMsg.append("Following Hive data types are supported in Drill for querying: "); errMsg.append( "BOOLEAN, TINYINT, SMALLINT, INT, BIGINT, FLOAT, DOUBLE, DATE, TIMESTAMP, BINARY, DECIMAL, STRING, and VARCHAR"); throw UserException.unsupportedError() .message(errMsg.toString()) .build(logger); } }
/* * Copyright (c) 2015, salesforce.com, inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are permitted provided * that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this list of conditions and the * following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and * the following disclaimer in the documentation and/or other materials provided with the distribution. * * Neither the name of salesforce.com, inc. nor the names of its contributors may be used to endorse or * promote products derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package com.salesforce.dataloader.ui; import java.io.File; import java.util.List; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.BusyIndicator; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.*; import com.salesforce.dataloader.config.Config; import com.salesforce.dataloader.controller.Controller; import com.salesforce.dataloader.dao.DataReader; import com.salesforce.dataloader.exception.DataAccessObjectException; import com.salesforce.dataloader.exception.DataAccessObjectInitializationException; import com.salesforce.dataloader.util.DAORowUtil; import com.sforce.ws.ConnectionException; public class DataSelectionDialog extends Dialog { private String message; private boolean success; private Controller controller; private Button ok; private Label label; /** * InputDialog constructor * * @param parent * the parent */ public DataSelectionDialog(Shell parent, Controller controller) { this(parent, SWT.DIALOG_TRIM | SWT.APPLICATION_MODAL | SWT.RESIZE); this.controller = controller; } /** * InputDialog constructor * * @param parent * the parent * @param style * the style */ public DataSelectionDialog(Shell parent, int style) { // Let users override the default styles super(parent, style); setText(Labels.getString("DataSelectionDialog.title")); //$NON-NLS-1$ setMessage(Labels.getString("DataSelectionDialog.message")); //$NON-NLS-1$ } /** * Gets the message * * @return String */ public String getMessage() { return message; } /** * Sets the message * * @param message * the new message */ public void setMessage(String message) { this.message = message; } /** * Opens the dialog and returns the input * * @return String */ public boolean open() { // Create the dialog window final Shell shell = new Shell(getParent(), getStyle()); shell.setText(getText()); shell.setImage(UIUtils.getImageRegistry().get("sfdc_icon")); //$NON-NLS-1$ createContents(shell); shell.pack(); shell.open(); Display display = getParent().getDisplay(); BusyIndicator.showWhile(display, new Thread() { @Override public void run() { try { controller.setFieldTypes(); controller.setReferenceDescribes(); String daoPath = controller.getConfig().getString(Config.DAO_NAME); File file = new File(daoPath); if (!file.exists() || !file.canRead()) { success = false; ok.setEnabled(true); label.setText(Labels.getString("DataSelectionDialog.errorRead")); //$NON-NLS-1$ shell.setText(Labels.getString("DataSelectionDialog.titleError")); return; } try { controller.createDao(); } catch (DataAccessObjectInitializationException e) { success = false; ok.setEnabled(true); label.setText(Labels.getString("DataSelectionDialog.errorRead")); //$NON-NLS-1$ shell.setText(Labels.getString("DataSelectionDialog.titleError")); return; } DataReader dataReader = (DataReader)controller.getDao(); List header = null; int totalRows = 0; try { dataReader.checkConnection(); dataReader.open(); String warning = DAORowUtil.validateColumns(dataReader); if(warning != null && warning.length() != 0) { int response = UIUtils.warningConfMessageBox(shell, warning + "\n" + Labels.getString("DataSelectionDialog.warningConf")); // in case user doesn't want to continue, treat this as an error if(response != SWT.YES) { success = false; ok.setEnabled(true); label.setText(Labels.getString("DataSelectionDialog.errorCSVFormat")); //$NON-NLS-1$ shell.setText(Labels.getString("DataSelectionDialog.titleError")); return; } } totalRows = dataReader.getTotalRows(); if ((header = dataReader.getColumnNames())== null || header.size() == 0) { success = false; ok.setEnabled(true); label.setText(Labels.getString("DataSelectionDialog.errorCSVFormat")); //$NON-NLS-1$ shell.setText(Labels.getString("DataSelectionDialog.titleError")); return; } } catch (DataAccessObjectException e) { success = false; ok.setEnabled(true); label.setText(Labels.getString("DataSelectionDialog.errorCSVFormat") + " " + e.getMessage()); //$NON-NLS-1$ Point size = label.computeSize(SWT.DEFAULT, SWT.DEFAULT); label.setSize(shell.getClientArea().width, size.y); shell.setText(Labels.getString("DataSelectionDialog.titleError")); shell.pack(); shell.redraw(); return; } finally { dataReader.close(); } success = true; ok.setEnabled(true); label.setText(Labels.getFormattedString( "DataSelectionDialog.initSuccess", String.valueOf(totalRows))); //$NON-NLS-1$ label.getParent().pack(); } catch (ConnectionException ex) { success = false; ok.setEnabled(true); label.setText(Labels.getString("DataSelectionDialog.errorEntity")); //$NON-NLS-1$ shell.setText(Labels.getString("DataSelectionDialog.titleError")); return; } } }); while (!shell.isDisposed()) { if (!display.readAndDispatch()) { display.sleep(); } } // Return the sucess return success; } /** * Creates the dialog's contents * * @param shell * the dialog window */ private void createContents(final Shell shell) { GridLayout layout = new GridLayout(2, false); layout.verticalSpacing = 10; shell.setLayout(layout); label = new Label(shell, SWT.WRAP); label.setText(message); GridData labelData = new GridData(); labelData.horizontalSpan = 2; labelData.widthHint = 400; label.setLayoutData(labelData); //the bottom separator Label labelSeparatorBottom = new Label(shell, SWT.SEPARATOR | SWT.HORIZONTAL); GridData sepData = new GridData(GridData.FILL_HORIZONTAL); sepData.horizontalSpan = 2; labelSeparatorBottom.setLayoutData(sepData); //ok cancel buttons new Label(shell, SWT.NONE); ok = new Button(shell, SWT.PUSH); ok.setText(Labels.getString("UI.ok")); //$NON-NLS-1$ ok.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent event) { shell.close(); } }); GridData buttonData = new GridData(GridData.HORIZONTAL_ALIGN_END); buttonData.widthHint = 75; ok.setLayoutData(buttonData); ok.setEnabled(false); shell.setDefaultButton(ok); } }
/* * Copyright 2019 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.server.presentation.models; import com.thoughtworks.go.config.PipelineConfig; import com.thoughtworks.go.domain.BaseCollection; import com.thoughtworks.go.domain.CommentRenderer; import com.thoughtworks.go.domain.PipelinePauseInfo; import com.thoughtworks.go.domain.StageIdentifier; import com.thoughtworks.go.presentation.pipelinehistory.PipelineInstanceModel; import com.thoughtworks.go.presentation.pipelinehistory.PipelineInstanceModels; import com.thoughtworks.go.presentation.pipelinehistory.StageInstanceModel; import com.thoughtworks.go.presentation.pipelinehistory.StageInstanceModels; import com.thoughtworks.go.server.presentation.PipelineHistoryGroupingUtil; import com.thoughtworks.go.server.util.Pagination; import com.thoughtworks.go.util.TimeConverter; import com.thoughtworks.go.util.json.JsonAware; import java.util.*; import static com.thoughtworks.go.config.CaseInsensitiveString.str; import static com.thoughtworks.go.util.UrlUtil.encodeInUtf8; import static java.lang.String.valueOf; public class PipelineHistoryJsonPresentationModel implements JsonAware { private final PipelinePauseInfo pipelinePauseInfo; private final PipelineConfig pipelineConfig; private final Pagination pagination; private TimeConverter timeConverter = new TimeConverter(); private boolean canForce; private final boolean hasForceBuildCause; private PipelineHistoryGroups pipelineHistoryGroups; private final boolean hasBuildCauseInBuffer; private final boolean canPause; public PipelineHistoryJsonPresentationModel(PipelinePauseInfo pipelinePauseInfo, PipelineInstanceModels pipelineHistory, PipelineConfig pipelineConfig, Pagination pagination, boolean canForce, boolean hasForcedBuildCause, boolean hasBuildCauseInBuffer, boolean canPause) { this(pipelinePauseInfo, new PipelineHistoryGroupingUtil().createGroups(pipelineHistory), pipelineConfig, pagination, canForce, hasForcedBuildCause, hasBuildCauseInBuffer, canPause); } PipelineHistoryJsonPresentationModel(PipelinePauseInfo pipelinePauseInfo, PipelineHistoryGroups pipelineHistoryGroups, PipelineConfig pipelineConfig, Pagination pagination, boolean canForce, boolean hasForceBuildCause, boolean hasBuildCauseInBuffer, boolean canPause) { this.pipelinePauseInfo = pipelinePauseInfo; this.pipelineHistoryGroups = pipelineHistoryGroups; this.pipelineConfig = pipelineConfig; this.pagination = pagination; this.canForce = canForce; this.hasForceBuildCause = hasForceBuildCause; this.hasBuildCauseInBuffer = hasBuildCauseInBuffer; this.canPause = canPause; createGroupForCurrentConfigIfItHasChanged(null); } private void createGroupForCurrentConfigIfItHasChanged(Map<String, StageIdentifier> latest) { if (pipelineHistoryGroups.isEmpty()) { if (hasBuildCauseInBuffer || pipelineConfig.isFirstStageManualApproval()) { createGroupForCurrentConfig(); } return; } if (hasPipelineConfigChanged()) { createGroupForCurrentConfig(); } } private boolean hasPipelineConfigChanged() { return !pipelineHistoryGroups.first().match(pipelineConfig); } private void createGroupForCurrentConfig() { PipelineInstanceGroupModel group = new PipelineInstanceGroupModel( new StageConfigurationModels(pipelineConfig)); pipelineHistoryGroups.add(0, group); } @Override public Map toJson() { Map<String, Object> json = new LinkedHashMap<>(); String pipelineName = str(pipelineConfig.name()); json.put("pipelineName", pipelineName); json.put("paused", valueOf(pipelinePauseInfo.isPaused())); json.put("pauseCause", pipelinePauseInfo.getPauseCause()); json.put("pauseBy", pipelinePauseInfo.getPauseBy()); json.put("canForce", valueOf(canForce)); json.put("nextLabel", ""); json.put("groups", groupAsJson()); json.put("forcedBuild", valueOf(hasForceBuildCause)); json.put("showForceBuildButton", valueOf(showForceBuildButton())); json.put("canPause", valueOf(canPause)); json.putAll(pagination.toJsonMap()); return json; } private boolean showForceBuildButton() { return hasBuildCauseInBuffer || pipelineConfig.isFirstStageManualApproval(); } private List groupAsJson() { List jsonList = new ArrayList(); for (PipelineInstanceGroupModel group : pipelineHistoryGroups) { Map<String, Object> jsonMap = new LinkedHashMap<>(); Map configJson = configAsJson(group.getStages()); jsonMap.put("config", configJson); jsonMap.put("history", historyAsJson(group.getPipelineInstances())); jsonList.add(jsonMap); } return jsonList; } private Map configAsJson(Iterable<StageConfigurationModel> stages) { List jsonList = new ArrayList(); for (StageConfigurationModel stageInfo : stages) { Map<String, Object> jsonMap = new LinkedHashMap<>(); jsonMap.put("name", stageInfo.getName()); jsonMap.put("isAutoApproved", valueOf(stageInfo.isAutoApproved())); jsonList.add(jsonMap); } Map<String, Object> jsonMap = new LinkedHashMap<>(); jsonMap.put("stages", jsonList); return jsonMap; } private List historyAsJson(BaseCollection<PipelineInstanceModel> pipelineHistory) { List json = new ArrayList(); for (PipelineInstanceModel item : pipelineHistory) { Map<String, Object> jsonMap = new LinkedHashMap<>(); jsonMap.put("pipelineId", item.getId()); jsonMap.put("label", item.getLabel()); jsonMap.put("counterOrLabel", item.getPipelineIdentifier().instanceIdentifier()); jsonMap.put("scheduled_date", timeConverter.getHumanReadableStringWithTimeZone(item.getScheduledDate())); jsonMap.put("scheduled_timestamp", item.getScheduledDate() != null ? item.getScheduledDate().getTime() : null); jsonMap.put("buildCauseBy", item.getApprovedByForDisplay()); jsonMap.put("modification_date", getModificationDate(item)); jsonMap.put("materialRevisions", materialRevisionsJson(item)); jsonMap.put("stages", stageHistoryAsJson(item, item.getStageHistory())); jsonMap.put("revision", item.getRevisionOfLatestModification()); jsonMap.put("comment", item.getComment()); json.add(jsonMap); } return json; } private List materialRevisionsJson(PipelineInstanceModel item) { CommentRenderer commentRenderer = pipelineConfig.getCommentRenderer(); MaterialRevisionsJsonBuilder jsonVisitor = new MaterialRevisionsJsonBuilder(commentRenderer); jsonVisitor.setIncludeModifiedFiles(false); item.getBuildCause().getMaterialRevisions().accept(jsonVisitor); return jsonVisitor.json(); } // TODO #1234 - should not get latest modified date private TimeConverter.ConvertedTime getModificationDate(PipelineInstanceModel item) { Date mostRecentModificationDate = item.getBuildCause().getMaterialRevisions().getDateOfLatestModification(); return timeConverter.getConvertedTime(mostRecentModificationDate); } private List stageHistoryAsJson(PipelineInstanceModel pipelineInstanceModel, StageInstanceModels stageHistory) { List json = new ArrayList(); for (StageInstanceModel stageHistoryItem : stageHistory) { Map<String, Object> jsonMap = new LinkedHashMap<>(); jsonMap.put("stageName", stageHistoryItem.getName()); jsonMap.put("stageId", stageHistoryItem.getId()); jsonMap.put("stageStatus", stageHistoryItem.getState().toString()); StageIdentifier stageIdentifier = new StageIdentifier(pipelineInstanceModel.getPipelineIdentifier(), stageHistoryItem.getName(), stageHistoryItem.getCounter()); jsonMap.put("stageLocator", encodeInUtf8(stageIdentifier.stageLocator())); jsonMap.put("getCanRun", Boolean.toString(stageHistoryItem.getCanRun())); if (!stageHistoryItem.getCanRun()) { jsonMap.put("errorMessage", stageHistoryItem.getErrorMessage()); } jsonMap.put("getCanCancel", Boolean.toString(stageHistoryItem.getCanCancel())); jsonMap.put("scheduled", Boolean.toString(stageHistoryItem.isScheduled())); jsonMap.put("stageCounter", stageHistoryItem.getCounter()); handleApproval(stageHistoryItem, jsonMap); json.add(jsonMap); } return json; } private void handleApproval(StageInstanceModel stageHistoryItem, Map jsonMap) { if (stageHistoryItem.needsApproval() && !pipelinePauseInfo.isPaused()) { jsonMap.put("needsApproval", String.valueOf(true)); } else if (stageHistoryItem.getApprovedBy() != null) { jsonMap.put("approvedBy", stageHistoryItem.getApprovedBy()); } } }
/* * Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package tx; import java.util.*; import com.gemstone.gemfire.cache.*; import com.gemstone.gemfire.distributed.DistributedMember; import com.gemstone.gemfire.internal.cache.Token; import hydra.*; import util.*; /** * A class that holds information to aid in entry validation * and supplies methods to get the expected behavior (based on * operation and whether or not the tx operation should be * visible in the calling thread) as well as the actual values. * A compare() method is also suppled to provide a check of * expected data values / actual data values. * * Each instance holds: * - regionName * - key * - operation (for better logging when comparisons fail) * - keyExists (boolean) * - hasValue (boolean) * - cacheValue (Object) * * In order to support the expected values on a per operation * basis, Maps holding templates of EntryValidators are established * at class initialization (one for the checks required if the * tx should be visible by the calling thread and another if the * tx should NOT be visible. * * A calling application will get the expected behavior by invoking: * getExpected(Operation op, boolean isVisible). * - keyExists (whether or not the key should exist for this operation) * - hasValue (whether or not the key should contain a value for this operation) * - cacheValue (what value we should expect in the VM, this can be DONT_CARE, * OLDVAL or NEWVAL). The template provides these enumerated types and then * uses them to fill in the appropriate value from the Operation when the * expected EntryValidator is created. * * Applications also invoke the getActual(Operation op) method to get the * actual values from the VMs cache: * keyExists (uses regionName/key to check region.containsKey()) * hasValue (uses regionName/key to check region.containsValueForKey()) * cacheValue (holds the value obtained via DiskRegUtil.getValueInVM() * * Once both EntryValidator instances have been constructed, the application * can invoke the instance method <expected>.compare( <actual> ) * If the expected values/state are not found, a Test Exception is thrown. */ public class EntryValidator { private static Map visible = null; private static Map notVisible = null; // setting for cacheValue if value doesn't need to be checked // e.g. if Tx is creating a new entry, but hasn't yet been committed. // We don't expect to find the key or for the key to have a value // and we don't care what that value might be ... public static final Integer DONT_CARE = new Integer(-1); public static final Integer OLDVAL = new Integer(0); public static final Integer NEWVAL = new Integer(1); static { visible= new HashMap(); visible.put(Operation.ENTRY_CREATE, new EntryValidator(true, true, EntryValidator.NEWVAL)); visible.put(Operation.ENTRY_UPDATE, new EntryValidator(true, true, EntryValidator.NEWVAL)); visible.put(Operation.ENTRY_DESTROY, new EntryValidator(false, false, EntryValidator.NEWVAL)); visible.put(Operation.ENTRY_LOCAL_DESTROY, new EntryValidator(false, false, EntryValidator.NEWVAL)); visible.put(Operation.ENTRY_INVAL, new EntryValidator(true, false, EntryValidator.NEWVAL)); visible.put(Operation.ENTRY_LOCAL_INVAL, new EntryValidator(true, false, EntryValidator.NEWVAL)); visible.put(Operation.ENTRY_GET_NEW_KEY, new EntryValidator(true, true, EntryValidator.NEWVAL)); visible.put(Operation.ENTRY_GET_EXIST_KEY, new EntryValidator(true, true, EntryValidator.NEWVAL)); visible.put(Operation.ENTRY_GET_PREV_KEY, new EntryValidator(true, true, EntryValidator.NEWVAL)); notVisible = new HashMap(); notVisible.put(Operation.ENTRY_CREATE, new EntryValidator(false, false, EntryValidator.OLDVAL)); notVisible.put(Operation.ENTRY_UPDATE, new EntryValidator(true, true, EntryValidator.OLDVAL)); notVisible.put(Operation.ENTRY_DESTROY, new EntryValidator(true, true, EntryValidator.OLDVAL)); notVisible.put(Operation.ENTRY_LOCAL_DESTROY, new EntryValidator(true, true, EntryValidator.OLDVAL)); notVisible.put(Operation.ENTRY_INVAL, new EntryValidator( true, true, EntryValidator.OLDVAL)); notVisible.put(Operation.ENTRY_LOCAL_INVAL, new EntryValidator(true, true, EntryValidator.OLDVAL)); notVisible.put(Operation.ENTRY_GET_NEW_KEY, new EntryValidator(false, false, EntryValidator.OLDVAL)); notVisible.put(Operation.ENTRY_GET_EXIST_KEY, new EntryValidator(true, true, EntryValidator.OLDVAL)); notVisible.put(Operation.ENTRY_GET_PREV_KEY, new EntryValidator(true, true, EntryValidator.OLDVAL)); } // private fields private String regionName = null; private String key = null; private Operation op = null; private boolean keyExists; private boolean hasValue; private Object cacheValue; /** * noArg constructor */ public EntryValidator(Operation op) { this.op = op; } /** Constructor to create a EntryValidator * used internally by this class to create templates based * on expected values maps. (See static initializer above) * * @param keyExists boolean (true => expect region.containsKey() == true) * @param hasValue boolean (true => expect region.containsValueForKey() == true) * @param cacheValue The expected value of this entry when retrieved from cache */ public EntryValidator(boolean keyExists, boolean hasValue, Object cacheValue) { this.keyExists = keyExists; this.hasValue = hasValue; this.cacheValue = cacheValue; } /** Constructor to create a EntryValidator * * @param regionName The name of the region involved in the operation. * @param key The key in the region involved in the operation. * @param keyExists boolean (true => expect region.containsKey() == true) * @param hasValue boolean (true => expect region.containsValueForKey() == true) * @param cacheValue The expected value of this entry when retrieved from cache */ public EntryValidator(Operation op, String regionName, String key, boolean keyExists, boolean hasValue, Object cacheValue) { this.op = op; this.regionName = regionName; this.key = key; this.keyExists = keyExists; this.hasValue = hasValue; this.cacheValue = cacheValue; } public String getRegionName() { return this.regionName; } public void setRegionName(String regionName) { this.regionName = regionName; } public Object getKey() { return this.key; } public void setKey(String key) { this.key = key; } public void setKeyExists(boolean b) { this.keyExists = b; } public void setHasValue(boolean b) { this.hasValue = b; } public void setCacheValue(Object value) { this.cacheValue = value; } public Operation getOperation() { return this.op; } public void setOperation(Operation op) { this.op = op; } public static EntryValidator getExpected(Operation op, boolean isVisible) { String regionName = op.getRegionName(); String key = (String)op.getKey(); String opName = op.getOpName(); Object oldVal = op.getOldValue(); Object newVal = op.getNewValue(); Log.getLogWriter().info("EntryValidator.getExpected(" + op.toString() + " isVisible = " + isVisible + ")"); // Get Map based on whether changes will be visible to calling thread Map aMap = (isVisible) ? visible : notVisible; // Get template for expected values and fill in the cacheValue with // old or expected value (or DONT_CARE) EntryValidator template = (EntryValidator)aMap.get(opName); EntryValidator expected = new EntryValidator(op); expected.setRegionName( regionName ); expected.setKey( key ); expected.keyExists = template.keyExists; expected.hasValue = template.hasValue; expected.cacheValue = template.cacheValue; if (expected.cacheValue == EntryValidator.DONT_CARE) { // do nothing } else if (expected.cacheValue == EntryValidator.OLDVAL) { expected.setCacheValue(oldVal); } else if (expected.cacheValue == EntryValidator.NEWVAL) { expected.setCacheValue(newVal); } else { throw new TestException("EntryValidator: invalid value <" + expected.cacheValue + "> given for cacheValue"); } // If expected.cacheValue is INVALID or LOCAL_INVALID, set hasValue = false if ((expected.cacheValue == null) || (expected.cacheValue.toString().equals("INVALID")) || (expected.cacheValue.toString().equals("LOCAL_INVALID"))) { expected.hasValue = false; } Log.getLogWriter().info("getExpected returns " + expected.toString()); return expected; } /* * Get the actual values -- for a transactional thread, the underlying * implementation for region apis (like containsKey(), containsValueForKey() * and getValueInVM() will operate on transactional state. * For non-transactional threads, these same region apis will operate on * committed state. * * getActual() also verifies the operation of region collections (entries()) * and region.getEntry() and ensures they equal what we returned from * getValueInVM(). */ public static EntryValidator getActual(Operation op) { String regionName = op.getRegionName(); String key = (String)op.getKey(); String opName = op.getOpName(); Object oldVal = op.getOldValue(); Object newVal = op.getNewValue(); Log.getLogWriter().info("EntryValidator.getActual(" + op.toString() + ")"); Region aRegion = CacheUtil.getCache().getRegion( regionName ); Object actualValue = DONT_CARE; boolean keyExists; boolean hasValue; // protect ourselves from destroyed region access if (aRegion != null) { keyExists = aRegion.containsKey( key ); hasValue = aRegion.containsValueForKey( key ); actualValue = null; Region.Entry entry = aRegion.getEntry(key); if (entry != null) { actualValue = entry.getValue(); if (actualValue == null) { // containsKey null => Token$Invalid actualValue = Token.INVALID; } } // validate collection iterators in sync // todo@lhughes - turn back on once invalidate doesn't cause // problems with region collections & iteration (BUG 31894) /* validateIterators(aRegion, key, actualValue); */ // test really works off ValueHolder.modVal ... if (actualValue instanceof BaseValueHolder) { actualValue = ((BaseValueHolder)actualValue).modVal; } else { Log.getLogWriter().info("WARNING: actual value retrieved from cache is not a ValueHolder, is " + actualValue); } // If actualValue is INVALID or LOCAL_INVALID set hasValue = false if (actualValue != null) { if ((actualValue.toString().equals("INVALID")) || (actualValue.toString().equals("LOCAL_INVALID"))) { hasValue = false; } } } else { // protect ourselves from accessing a destroyed region keyExists = false; hasValue = false; actualValue = null; } EntryValidator actualValues = new EntryValidator(op, regionName, key, keyExists, hasValue, actualValue); Log.getLogWriter().info("EntryValidator.getActual returning = " + actualValues.toString()); return actualValues; } /** * Compare two validator controls, throw an exception if not a match **/ public void compare(EntryValidator vc) { if (this.keyExists != vc.keyExists) { if (!vc.keyExists && vc.getOperation().getOpName().equalsIgnoreCase(Operation.ENTRY_GET_PREV_KEY)) { // In this case, we don't know if the entry had been invalidated or // destroyed before we tried to re-use the key (for the notVisible // case). Even in the 'visible' case, we wouldn't know if another // Cache has the value & returned it! In any case, return ... we can't // check anything more if the key doesn't even exist -- just return. return; } throw new TestException ("EntryValidator comparison failure: expected " + this.toString() + " but actualValues = " + vc.toString()); } // If an entry was previously invalidated and we are now // doing a 'get' -- hasValue will be false. In this case // we actually don't care about hasValue, so we need to // skip this test (and possibly throwing an unwanted exception) if (!getInvalidatedEntry()) { if (this.hasValue != vc.hasValue) { throw new TestException ("EntryValidator comparison failure: expected " + this.toString() + " but actualValues = " + vc.toString()); } } if (this.cacheValue != EntryValidator.DONT_CARE) { if (this.cacheValue != null) { if (!this.cacheValue.equals(vc.cacheValue)) { throw new TestException ("EntryValidator comparison failure: expected " + this.toString() + " but actualValues = " + vc.toString()); } } else { // cacheValue == null ? compare with ==? if (vc.cacheValue != null) { throw new TestException ("EntryValidator comparison failure: expected " + this.toString() + " but actualValues = " + vc.toString()); } } } } private boolean getInvalidatedEntry() { if (this.op.getOpName().toLowerCase().startsWith("entry-get")) { Object oldVal = this.op.getOldValue(); if ((oldVal != null) && (oldVal.toString().equals("INVALID"))) { Log.getLogWriter().info("FOUND GET ON INVALIDATED ENTRY"); return true; } } return false; } //private static void validateIterators(Region aRegion, Object key, Object value) { // // boolean found = false; // boolean entryValuesCorrect = false; // // Set entries = aRegion.entries(false); // for (Iterator it=entries.iterator(); it.hasNext(); ) { // Region.Entry entry = (Region.Entry)it.next(); // if ((entry.getKey().equals(key)) && (entry.getValue().equals(value))) { // found = true; // break; // } // } // // Note the key may not be found if scope = local & is a remote vm // // and we've done a create entry // if (!found && value==null) { // found = true; // } // // Region.Entry entry = aRegion.getEntry(key); // if (entry != null) { // if ((entry.getKey().equals(key)) && (entry.getValue().equals(value)) || // (value == null && entry.getKey().equals(key) && entry.isDestroyed())) { // entryValuesCorrect = true; // } // } else { // if (value == null) { // entryValuesCorrect = true; // } // } // // if (!found || !entryValuesCorrect) { // throw new TestException("EntryValidator: possible iterator problem, found = " + found + " entryValueCorrect = " + entryValuesCorrect + "for key/value pair (" + key + ", " + value); // } //} /** Return a string description of the EntryValidator */ public String toString() { String aStr = "op: " + op + " regionName: " + regionName + " key:" + key + ", keyExists: " + keyExists + ", hasValue: " + hasValue + ", cacheValue: " + cacheValue; return aStr; } }
// Copyright (C) 2015 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.git; import static com.google.common.base.Preconditions.checkState; import static org.eclipse.jgit.revwalk.RevFlag.UNINTERESTING; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.ListMultimap; import com.google.common.collect.MultimapBuilder; import com.google.common.collect.Multimaps; import com.google.common.collect.SetMultimap; import com.google.common.collect.Sets; import com.google.common.collect.SortedSetMultimap; import com.google.common.flogger.FluentLogger; import com.google.gerrit.reviewdb.client.PatchSet; import com.google.gerrit.reviewdb.client.Project; import com.google.gerrit.server.PatchSetUtil; import com.google.gerrit.server.change.RevisionResource; import com.google.gerrit.server.notedb.ChangeNotes; import java.util.ArrayDeque; import java.util.Collection; import java.util.Deque; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.Ref; import org.eclipse.jgit.revwalk.RevCommit; /** * Helper for assigning groups to commits during {@code ReceiveCommits}. * * <p>For each commit encountered along a walk between the branch tip and the tip of the push, the * group of a commit is defined as follows: * * <ul> * <li>If the commit is an existing patch set of a change, the group is read from the group field * in the corresponding {@link PatchSet} record. * <li>If all of a commit's parents are merged into the branch, then its group is its own SHA-1. * <li>If the commit has a single parent that is not yet merged into the branch, then its group is * the same as the parent's group. * <li> * <li>For a merge commit, choose a parent and use that parent's group. If one of the parents has * a group from a patch set, use that group, otherwise, use the group from the first parent. * In addition to setting this merge commit's group, use the chosen group for all commits that * would otherwise use a group from the parents that were not chosen. * <li>If a merge commit has multiple parents whose group comes from separate patch sets, * concatenate the groups from those parents together. This indicates two side branches were * pushed separately, followed by the merge. * <li> * </ul> * * <p>Callers must call {@link #visit(RevCommit)} on all commits between the current branch tip and * the tip of a push, in reverse topo order (parents before children). Once all commits have been * visited, call {@link #getGroups()} for the result. */ public class GroupCollector { private static final FluentLogger logger = FluentLogger.forEnclosingClass(); public static List<String> getDefaultGroups(ObjectId commit) { return ImmutableList.of(commit.name()); } public static List<String> getGroups(RevisionResource rsrc) { if (rsrc.getEdit().isPresent()) { // Groups for an edit are just the base revision's groups, since they have // the same parent. return rsrc.getEdit().get().getBasePatchSet().getGroups(); } return rsrc.getPatchSet().getGroups(); } private interface Lookup { List<String> lookup(PatchSet.Id psId); } private final ListMultimap<ObjectId, PatchSet.Id> patchSetsBySha; private final ListMultimap<ObjectId, String> groups; private final SetMultimap<String, String> groupAliases; private final Lookup groupLookup; private boolean done; public static GroupCollector create( ListMultimap<ObjectId, Ref> changeRefsById, PatchSetUtil psUtil, ChangeNotes.Factory notesFactory, Project.NameKey project) { return new GroupCollector( transformRefs(changeRefsById), psId -> { // TODO(dborowitz): Reuse open repository from caller. ChangeNotes notes = notesFactory.createChecked(project, psId.getParentKey()); PatchSet ps = psUtil.get(notes, psId); return ps != null ? ps.getGroups() : null; }); } private GroupCollector(ListMultimap<ObjectId, PatchSet.Id> patchSetsBySha, Lookup groupLookup) { this.patchSetsBySha = patchSetsBySha; this.groupLookup = groupLookup; groups = MultimapBuilder.hashKeys().arrayListValues().build(); groupAliases = MultimapBuilder.hashKeys().hashSetValues().build(); } private static ListMultimap<ObjectId, PatchSet.Id> transformRefs( ListMultimap<ObjectId, Ref> refs) { return Multimaps.transformValues(refs, r -> PatchSet.Id.fromRef(r.getName())); } @VisibleForTesting GroupCollector( ListMultimap<ObjectId, PatchSet.Id> patchSetsBySha, ListMultimap<PatchSet.Id, String> groupLookup) { this( patchSetsBySha, psId -> { List<String> groups = groupLookup.get(psId); return !groups.isEmpty() ? groups : null; }); } public void visit(RevCommit c) { checkState(!done, "visit() called after getGroups()"); Set<RevCommit> interestingParents = getInterestingParents(c); if (interestingParents.isEmpty()) { // All parents are uninteresting: treat this commit as the root of a new // group of related changes. groups.put(c, c.name()); return; } else if (interestingParents.size() == 1) { // Only one parent is new in this push. If it is the only parent, just use // that parent's group. If there are multiple parents, perhaps this commit // is a merge of a side branch. This commit belongs in that parent's group // in that case. groups.putAll(c, groups.get(interestingParents.iterator().next())); return; } // Multiple parents, merging at least two branches containing new commits in // this push. Set<String> thisCommitGroups = new TreeSet<>(); Set<String> parentGroupsNewInThisPush = Sets.newLinkedHashSetWithExpectedSize(interestingParents.size()); for (RevCommit p : interestingParents) { Collection<String> parentGroups = groups.get(p); if (parentGroups.isEmpty()) { throw new IllegalStateException( String.format("no group assigned to parent %s of commit %s", p.name(), c.name())); } for (String parentGroup : parentGroups) { if (isGroupFromExistingPatchSet(p, parentGroup)) { // This parent's group is from an existing patch set, i.e. the parent // not new in this push. Use this group for the commit. thisCommitGroups.add(parentGroup); } else { // This parent's group is new in this push. parentGroupsNewInThisPush.add(parentGroup); } } } Iterable<String> toAlias; if (thisCommitGroups.isEmpty()) { // All parent groups were new in this push. Pick the first one and alias // other parents' groups to this first parent. String firstParentGroup = parentGroupsNewInThisPush.iterator().next(); thisCommitGroups = ImmutableSet.of(firstParentGroup); toAlias = Iterables.skip(parentGroupsNewInThisPush, 1); } else { // For each parent group that was new in this push, alias it to the actual // computed group(s) for this commit. toAlias = parentGroupsNewInThisPush; } groups.putAll(c, thisCommitGroups); for (String pg : toAlias) { groupAliases.putAll(pg, thisCommitGroups); } } public SortedSetMultimap<ObjectId, String> getGroups() { done = true; SortedSetMultimap<ObjectId, String> result = MultimapBuilder.hashKeys(groups.keySet().size()).treeSetValues().build(); for (Map.Entry<ObjectId, Collection<String>> e : groups.asMap().entrySet()) { ObjectId id = e.getKey(); result.putAll(id.copy(), resolveGroups(id, e.getValue())); } return result; } private Set<RevCommit> getInterestingParents(RevCommit commit) { Set<RevCommit> result = Sets.newLinkedHashSetWithExpectedSize(commit.getParentCount()); for (RevCommit p : commit.getParents()) { if (!p.has(UNINTERESTING)) { result.add(p); } } return result; } private boolean isGroupFromExistingPatchSet(RevCommit commit, String group) { ObjectId id = parseGroup(commit, group); return id != null && patchSetsBySha.containsKey(id); } private Set<String> resolveGroups(ObjectId forCommit, Collection<String> candidates) { Set<String> actual = Sets.newTreeSet(); Set<String> done = Sets.newHashSetWithExpectedSize(candidates.size()); Set<String> seen = Sets.newHashSetWithExpectedSize(candidates.size()); Deque<String> todo = new ArrayDeque<>(candidates); // BFS through all aliases to find groups that are not aliased to anything // else. while (!todo.isEmpty()) { String g = todo.removeFirst(); if (!seen.add(g)) { continue; } Set<String> aliases = groupAliases.get(g); if (aliases.isEmpty()) { if (!done.contains(g)) { Iterables.addAll(actual, resolveGroup(forCommit, g)); done.add(g); } } else { todo.addAll(aliases); } } return actual; } private ObjectId parseGroup(ObjectId forCommit, String group) { try { return ObjectId.fromString(group); } catch (IllegalArgumentException e) { // Shouldn't happen; some sort of corruption or manual tinkering? logger.atWarning().log("group for commit %s is not a SHA-1: %s", forCommit.name(), group); return null; } } private Iterable<String> resolveGroup(ObjectId forCommit, String group) { ObjectId id = parseGroup(forCommit, group); if (id != null) { PatchSet.Id psId = Iterables.getFirst(patchSetsBySha.get(id), null); if (psId != null) { List<String> groups = groupLookup.lookup(psId); // Group for existing patch set may be missing, e.g. if group has not // been migrated yet. if (groups != null && !groups.isEmpty()) { return groups; } } } return ImmutableList.of(group); } }
/* * Copyright (c) 1996, 2004, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package java.rmi.server; import java.io.*; import java.util.*; /** * <code>LogStream</code> provides a mechanism for logging errors that are * of possible interest to those monitoring a system. * * @author Ann Wollrath (lots of code stolen from Ken Arnold) * @since JDK1.1 * @deprecated no replacement */ @Deprecated public class LogStream extends PrintStream { /** table mapping known log names to log stream objects */ private static Hashtable known = new Hashtable(5); /** default output stream for new logs */ private static PrintStream defaultStream = System.err; /** log name for this log */ private String name; /** stream where output of this log is sent to */ private OutputStream logOut; /** string writer for writing message prefixes to log stream */ private OutputStreamWriter logWriter; /** string buffer used for constructing log message prefixes */ private StringBuffer buffer = new StringBuffer(); /** stream used for buffering lines */ private ByteArrayOutputStream bufOut; /** * Create a new LogStream object. Since this only constructor is * private, users must have a LogStream created through the "log" * method. * @param name string identifying messages from this log * @out output stream that log messages will be sent to * @since JDK1.1 * @deprecated no replacement */ @Deprecated private LogStream(String name, OutputStream out) { super(new ByteArrayOutputStream()); bufOut = (ByteArrayOutputStream) super.out; this.name = name; setOutputStream(out); } /** * Return the LogStream identified by the given name. If * a log corresponding to "name" does not exist, a log using * the default stream is created. * @param name name identifying the desired LogStream * @return log associated with given name * @since JDK1.1 * @deprecated no replacement */ @Deprecated public static LogStream log(String name) { LogStream stream; synchronized (known) { stream = (LogStream)known.get(name); if (stream == null) { stream = new LogStream(name, defaultStream); } known.put(name, stream); } return stream; } /** * Return the current default stream for new logs. * @return default log stream * @see #setDefaultStream * @since JDK1.1 * @deprecated no replacement */ @Deprecated public static synchronized PrintStream getDefaultStream() { return defaultStream; } /** * Set the default stream for new logs. * @param newDefault new default log stream * @see #getDefaultStream * @since JDK1.1 * @deprecated no replacement */ @Deprecated public static synchronized void setDefaultStream(PrintStream newDefault) { defaultStream = newDefault; } /** * Return the current stream to which output from this log is sent. * @return output stream for this log * @see #setOutputStream * @since JDK1.1 * @deprecated no replacement */ @Deprecated public synchronized OutputStream getOutputStream() { return logOut; } /** * Set the stream to which output from this log is sent. * @param out new output stream for this log * @see #getOutputStream * @since JDK1.1 * @deprecated no replacement */ @Deprecated public synchronized void setOutputStream(OutputStream out) { logOut = out; // Maintain an OutputStreamWriter with default CharToByteConvertor // (just like new PrintStream) for writing log message prefixes. logWriter = new OutputStreamWriter(logOut); } /** * Write a byte of data to the stream. If it is not a newline, then * the byte is appended to the internal buffer. If it is a newline, * then the currently buffered line is sent to the log's output * stream, prefixed with the appropriate logging information. * @since JDK1.1 * @deprecated no replacement */ @Deprecated public void write(int b) { if (b == '\n') { // synchronize on "this" first to avoid potential deadlock synchronized (this) { synchronized (logOut) { // construct prefix for log messages: buffer.setLength(0);; buffer.append( // date/time stamp... (new Date()).toString()); buffer.append(':'); buffer.append(name); // ...log name... buffer.append(':'); buffer.append(Thread.currentThread().getName()); buffer.append(':'); // ...and thread name try { // write prefix through to underlying byte stream logWriter.write(buffer.toString()); logWriter.flush(); // finally, write the already converted bytes of // the log message bufOut.writeTo(logOut); logOut.write(b); logOut.flush(); } catch (IOException e) { setError(); } finally { bufOut.reset(); } } } } else super.write(b); } /** * Write a subarray of bytes. Pass each through write byte method. * @since JDK1.1 * @deprecated no replacement */ @Deprecated public void write(byte b[], int off, int len) { if (len < 0) throw new ArrayIndexOutOfBoundsException(len); for (int i = 0; i < len; ++ i) write(b[off + i]); } /** * Return log name as string representation. * @return log name * @since JDK1.1 * @deprecated no replacement */ @Deprecated public String toString() { return name; } /** log level constant (no logging). */ public static final int SILENT = 0; /** log level constant (brief logging). */ public static final int BRIEF = 10; /** log level constant (verbose logging). */ public static final int VERBOSE = 20; /** * Convert a string name of a logging level to its internal * integer representation. * @param s name of logging level (e.g., 'SILENT', 'BRIEF', 'VERBOSE') * @return corresponding integer log level * @since JDK1.1 * @deprecated no replacement */ @Deprecated public static int parseLevel(String s) { if ((s == null) || (s.length() < 1)) return -1; try { return Integer.parseInt(s); } catch (NumberFormatException e) { } if (s.length() < 1) return -1; if ("SILENT".startsWith(s.toUpperCase())) return SILENT; else if ("BRIEF".startsWith(s.toUpperCase())) return BRIEF; else if ("VERBOSE".startsWith(s.toUpperCase())) return VERBOSE; return -1; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.service; import java.io.IOException; import java.net.InetAddress; import java.util.*; import java.util.concurrent.*; import java.lang.management.ManagementFactory; import java.lang.management.RuntimeMXBean; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.concurrent.ScheduledExecutors; import org.apache.cassandra.concurrent.Stage; import org.apache.cassandra.concurrent.StageManager; import org.apache.cassandra.config.CFMetaData; import org.apache.cassandra.config.Schema; import org.apache.cassandra.config.ViewDefinition; import org.apache.cassandra.cql3.functions.UDAggregate; import org.apache.cassandra.cql3.functions.UDFunction; import org.apache.cassandra.db.*; import org.apache.cassandra.db.marshal.UserType; import org.apache.cassandra.exceptions.AlreadyExistsException; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.gms.*; import org.apache.cassandra.io.IVersionedSerializer; import org.apache.cassandra.io.util.DataInputPlus; import org.apache.cassandra.io.util.DataOutputPlus; import org.apache.cassandra.net.MessageOut; import org.apache.cassandra.net.MessagingService; import org.apache.cassandra.schema.KeyspaceMetadata; import org.apache.cassandra.schema.SchemaKeyspace; import org.apache.cassandra.utils.FBUtilities; import org.apache.cassandra.utils.WrappedRunnable; public class MigrationManager { private static final Logger logger = LoggerFactory.getLogger(MigrationManager.class); public static final MigrationManager instance = new MigrationManager(); private static final RuntimeMXBean runtimeMXBean = ManagementFactory.getRuntimeMXBean(); public static final int MIGRATION_DELAY_IN_MS = 60000; private static final int MIGRATION_TASK_WAIT_IN_SECONDS = Integer.parseInt(System.getProperty("cassandra.migration_task_wait_in_seconds", "1")); private final List<MigrationListener> listeners = new CopyOnWriteArrayList<>(); private MigrationManager() {} public void register(MigrationListener listener) { listeners.add(listener); } public void unregister(MigrationListener listener) { listeners.remove(listener); } public static void scheduleSchemaPull(InetAddress endpoint, EndpointState state) { VersionedValue value = state.getApplicationState(ApplicationState.SCHEMA); if (!endpoint.equals(FBUtilities.getBroadcastAddress()) && value != null) maybeScheduleSchemaPull(UUID.fromString(value.value), endpoint); } /** * If versions differ this node sends request with local migration list to the endpoint * and expecting to receive a list of migrations to apply locally. */ private static void maybeScheduleSchemaPull(final UUID theirVersion, final InetAddress endpoint) { if ((Schema.instance.getVersion() != null && Schema.instance.getVersion().equals(theirVersion)) || !shouldPullSchemaFrom(endpoint)) { logger.debug("Not pulling schema because versions match or shouldPullSchemaFrom returned false"); return; } if (Schema.emptyVersion.equals(Schema.instance.getVersion()) || runtimeMXBean.getUptime() < MIGRATION_DELAY_IN_MS) { // If we think we may be bootstrapping or have recently started, submit MigrationTask immediately logger.debug("Submitting migration task for {}", endpoint); submitMigrationTask(endpoint); } else { // Include a delay to make sure we have a chance to apply any changes being // pushed out simultaneously. See CASSANDRA-5025 Runnable runnable = () -> { // grab the latest version of the schema since it may have changed again since the initial scheduling EndpointState epState = Gossiper.instance.getEndpointStateForEndpoint(endpoint); if (epState == null) { logger.debug("epState vanished for {}, not submitting migration task", endpoint); return; } VersionedValue value = epState.getApplicationState(ApplicationState.SCHEMA); UUID currentVersion = UUID.fromString(value.value); if (Schema.instance.getVersion().equals(currentVersion)) { logger.debug("not submitting migration task for {} because our versions match", endpoint); return; } logger.debug("submitting migration task for {}", endpoint); submitMigrationTask(endpoint); }; ScheduledExecutors.optionalTasks.schedule(runnable, MIGRATION_DELAY_IN_MS, TimeUnit.MILLISECONDS); } } private static Future<?> submitMigrationTask(InetAddress endpoint) { /* * Do not de-ref the future because that causes distributed deadlock (CASSANDRA-3832) because we are * running in the gossip stage. */ return StageManager.getStage(Stage.MIGRATION).submit(new MigrationTask(endpoint)); } public static boolean shouldPullSchemaFrom(InetAddress endpoint) { /* * Don't request schema from nodes with a differnt or unknonw major version (may have incompatible schema) * Don't request schema from fat clients */ return MessagingService.instance().knowsVersion(endpoint) && MessagingService.instance().getRawVersion(endpoint) == MessagingService.current_version && !Gossiper.instance.isGossipOnlyMember(endpoint); } public static boolean isReadyForBootstrap() { return MigrationTask.getInflightTasks().isEmpty(); } public static void waitUntilReadyForBootstrap() { CountDownLatch completionLatch; while ((completionLatch = MigrationTask.getInflightTasks().poll()) != null) { try { if (!completionLatch.await(MIGRATION_TASK_WAIT_IN_SECONDS, TimeUnit.SECONDS)) logger.error("Migration task failed to complete"); } catch (InterruptedException e) { Thread.currentThread().interrupt(); logger.error("Migration task was interrupted"); } } } public void notifyCreateKeyspace(KeyspaceMetadata ksm) { for (MigrationListener listener : listeners) listener.onCreateKeyspace(ksm.name); } public void notifyCreateColumnFamily(CFMetaData cfm) { for (MigrationListener listener : listeners) listener.onCreateColumnFamily(cfm.ksName, cfm.cfName); } public void notifyCreateView(ViewDefinition view) { for (MigrationListener listener : listeners) listener.onCreateView(view.ksName, view.viewName); } public void notifyCreateUserType(UserType ut) { for (MigrationListener listener : listeners) listener.onCreateUserType(ut.keyspace, ut.getNameAsString()); } public void notifyCreateFunction(UDFunction udf) { for (MigrationListener listener : listeners) listener.onCreateFunction(udf.name().keyspace, udf.name().name, udf.argTypes()); } public void notifyCreateAggregate(UDAggregate udf) { for (MigrationListener listener : listeners) listener.onCreateAggregate(udf.name().keyspace, udf.name().name, udf.argTypes()); } public void notifyUpdateKeyspace(KeyspaceMetadata ksm) { for (MigrationListener listener : listeners) listener.onUpdateKeyspace(ksm.name); } public void notifyUpdateColumnFamily(CFMetaData cfm, boolean columnsDidChange) { for (MigrationListener listener : listeners) listener.onUpdateColumnFamily(cfm.ksName, cfm.cfName, columnsDidChange); } public void notifyUpdateView(ViewDefinition view, boolean columnsDidChange) { for (MigrationListener listener : listeners) listener.onUpdateView(view.ksName, view.viewName, columnsDidChange); } public void notifyUpdateUserType(UserType ut) { for (MigrationListener listener : listeners) listener.onUpdateUserType(ut.keyspace, ut.getNameAsString()); // FIXME: remove when we get rid of AbstractType in metadata. Doesn't really belong anywhere. Schema.instance.getKSMetaData(ut.keyspace).functions.udfs().forEach(f -> f.userTypeUpdated(ut.keyspace, ut.getNameAsString())); } public void notifyUpdateFunction(UDFunction udf) { for (MigrationListener listener : listeners) listener.onUpdateFunction(udf.name().keyspace, udf.name().name, udf.argTypes()); } public void notifyUpdateAggregate(UDAggregate udf) { for (MigrationListener listener : listeners) listener.onUpdateAggregate(udf.name().keyspace, udf.name().name, udf.argTypes()); } public void notifyDropKeyspace(KeyspaceMetadata ksm) { for (MigrationListener listener : listeners) listener.onDropKeyspace(ksm.name); } public void notifyDropColumnFamily(CFMetaData cfm) { for (MigrationListener listener : listeners) listener.onDropColumnFamily(cfm.ksName, cfm.cfName); } public void notifyDropView(ViewDefinition view) { for (MigrationListener listener : listeners) listener.onDropView(view.ksName, view.viewName); } public void notifyDropUserType(UserType ut) { for (MigrationListener listener : listeners) listener.onDropUserType(ut.keyspace, ut.getNameAsString()); } public void notifyDropFunction(UDFunction udf) { for (MigrationListener listener : listeners) listener.onDropFunction(udf.name().keyspace, udf.name().name, udf.argTypes()); } public void notifyDropAggregate(UDAggregate udf) { for (MigrationListener listener : listeners) listener.onDropAggregate(udf.name().keyspace, udf.name().name, udf.argTypes()); } public static void announceNewKeyspace(KeyspaceMetadata ksm) throws ConfigurationException { announceNewKeyspace(ksm, false); } public static void announceNewKeyspace(KeyspaceMetadata ksm, boolean announceLocally) throws ConfigurationException { announceNewKeyspace(ksm, FBUtilities.timestampMicros(), announceLocally); } public static void announceNewKeyspace(KeyspaceMetadata ksm, long timestamp, boolean announceLocally) throws ConfigurationException { ksm.validate(); if (Schema.instance.getKSMetaData(ksm.name) != null) throw new AlreadyExistsException(ksm.name); logger.info(String.format("Create new Keyspace: %s", ksm)); announce(SchemaKeyspace.makeCreateKeyspaceMutation(ksm, timestamp), announceLocally); } public static void announceNewColumnFamily(CFMetaData cfm) throws ConfigurationException { announceNewColumnFamily(cfm, false); } public static void announceNewColumnFamily(CFMetaData cfm, boolean announceLocally) throws ConfigurationException { announceNewColumnFamily(cfm, announceLocally, true); } /** * Announces the table even if the definition is already know locally. * This should generally be avoided but is used internally when we want to force the most up to date version of * a system table schema (Note that we don't know if the schema we force _is_ the most recent version or not, we * just rely on idempotency to basically ignore that announce if it's not. That's why we can't use announceUpdateColumnFamily, * it would for instance delete new columns if this is not called with the most up-to-date version) * * Note that this is only safe for system tables where we know the cfId is fixed and will be the same whatever version * of the definition is used. */ public static void forceAnnounceNewColumnFamily(CFMetaData cfm) throws ConfigurationException { announceNewColumnFamily(cfm, false, false); } private static void announceNewColumnFamily(CFMetaData cfm, boolean announceLocally, boolean throwOnDuplicate) throws ConfigurationException { cfm.validate(); KeyspaceMetadata ksm = Schema.instance.getKSMetaData(cfm.ksName); if (ksm == null) throw new ConfigurationException(String.format("Cannot add table '%s' to non existing keyspace '%s'.", cfm.cfName, cfm.ksName)); // If we have a table or a view which has the same name, we can't add a new one else if (throwOnDuplicate && ksm.getTableOrViewNullable(cfm.cfName) != null) throw new AlreadyExistsException(cfm.ksName, cfm.cfName); logger.info(String.format("Create new table: %s", cfm)); announce(SchemaKeyspace.makeCreateTableMutation(ksm, cfm, FBUtilities.timestampMicros()), announceLocally); } public static void announceNewView(ViewDefinition view, boolean announceLocally) throws ConfigurationException { view.metadata.validate(); KeyspaceMetadata ksm = Schema.instance.getKSMetaData(view.ksName); if (ksm == null) throw new ConfigurationException(String.format("Cannot add table '%s' to non existing keyspace '%s'.", view.viewName, view.ksName)); else if (ksm.getTableOrViewNullable(view.viewName) != null) throw new AlreadyExistsException(view.ksName, view.viewName); logger.info(String.format("Create new view: %s", view)); announce(SchemaKeyspace.makeCreateViewMutation(ksm, view, FBUtilities.timestampMicros()), announceLocally); } public static void announceNewType(UserType newType, boolean announceLocally) { KeyspaceMetadata ksm = Schema.instance.getKSMetaData(newType.keyspace); announce(SchemaKeyspace.makeCreateTypeMutation(ksm, newType, FBUtilities.timestampMicros()), announceLocally); } public static void announceNewFunction(UDFunction udf, boolean announceLocally) { logger.info(String.format("Create scalar function '%s'", udf.name())); KeyspaceMetadata ksm = Schema.instance.getKSMetaData(udf.name().keyspace); announce(SchemaKeyspace.makeCreateFunctionMutation(ksm, udf, FBUtilities.timestampMicros()), announceLocally); } public static void announceNewAggregate(UDAggregate udf, boolean announceLocally) { logger.info(String.format("Create aggregate function '%s'", udf.name())); KeyspaceMetadata ksm = Schema.instance.getKSMetaData(udf.name().keyspace); announce(SchemaKeyspace.makeCreateAggregateMutation(ksm, udf, FBUtilities.timestampMicros()), announceLocally); } public static void announceKeyspaceUpdate(KeyspaceMetadata ksm) throws ConfigurationException { announceKeyspaceUpdate(ksm, false); } public static void announceKeyspaceUpdate(KeyspaceMetadata ksm, boolean announceLocally) throws ConfigurationException { ksm.validate(); KeyspaceMetadata oldKsm = Schema.instance.getKSMetaData(ksm.name); if (oldKsm == null) throw new ConfigurationException(String.format("Cannot update non existing keyspace '%s'.", ksm.name)); logger.info(String.format("Update Keyspace '%s' From %s To %s", ksm.name, oldKsm, ksm)); announce(SchemaKeyspace.makeCreateKeyspaceMutation(ksm.name, ksm.params, FBUtilities.timestampMicros()), announceLocally); } public static void announceColumnFamilyUpdate(CFMetaData cfm, boolean fromThrift) throws ConfigurationException { announceColumnFamilyUpdate(cfm, fromThrift, false); } public static void announceColumnFamilyUpdate(CFMetaData cfm, boolean fromThrift, boolean announceLocally) throws ConfigurationException { cfm.validate(); CFMetaData oldCfm = Schema.instance.getCFMetaData(cfm.ksName, cfm.cfName); if (oldCfm == null) throw new ConfigurationException(String.format("Cannot update non existing table '%s' in keyspace '%s'.", cfm.cfName, cfm.ksName)); KeyspaceMetadata ksm = Schema.instance.getKSMetaData(cfm.ksName); oldCfm.validateCompatibility(cfm); logger.info(String.format("Update table '%s/%s' From %s To %s", cfm.ksName, cfm.cfName, oldCfm, cfm)); announce(SchemaKeyspace.makeUpdateTableMutation(ksm, oldCfm, cfm, FBUtilities.timestampMicros(), fromThrift), announceLocally); } public static void announceViewUpdate(ViewDefinition view, boolean announceLocally) throws ConfigurationException { view.metadata.validate(); ViewDefinition oldView = Schema.instance.getView(view.ksName, view.viewName); if (oldView == null) throw new ConfigurationException(String.format("Cannot update non existing materialized view '%s' in keyspace '%s'.", view.viewName, view.ksName)); KeyspaceMetadata ksm = Schema.instance.getKSMetaData(view.ksName); oldView.metadata.validateCompatibility(view.metadata); logger.info(String.format("Update view '%s/%s' From %s To %s", view.ksName, view.viewName, oldView, view)); announce(SchemaKeyspace.makeUpdateViewMutation(ksm, oldView, view, FBUtilities.timestampMicros()), announceLocally); } public static void announceTypeUpdate(UserType updatedType, boolean announceLocally) { logger.info(String.format("Update type '%s.%s' to %s", updatedType.keyspace, updatedType.getNameAsString(), updatedType)); announceNewType(updatedType, announceLocally); } public static void announceKeyspaceDrop(String ksName) throws ConfigurationException { announceKeyspaceDrop(ksName, false); } public static void announceKeyspaceDrop(String ksName, boolean announceLocally) throws ConfigurationException { KeyspaceMetadata oldKsm = Schema.instance.getKSMetaData(ksName); if (oldKsm == null) throw new ConfigurationException(String.format("Cannot drop non existing keyspace '%s'.", ksName)); logger.info(String.format("Drop Keyspace '%s'", oldKsm.name)); announce(SchemaKeyspace.makeDropKeyspaceMutation(oldKsm, FBUtilities.timestampMicros()), announceLocally); } public static void announceColumnFamilyDrop(String ksName, String cfName) throws ConfigurationException { announceColumnFamilyDrop(ksName, cfName, false); } public static void announceColumnFamilyDrop(String ksName, String cfName, boolean announceLocally) throws ConfigurationException { CFMetaData oldCfm = Schema.instance.getCFMetaData(ksName, cfName); if (oldCfm == null) throw new ConfigurationException(String.format("Cannot drop non existing table '%s' in keyspace '%s'.", cfName, ksName)); KeyspaceMetadata ksm = Schema.instance.getKSMetaData(ksName); logger.info(String.format("Drop table '%s/%s'", oldCfm.ksName, oldCfm.cfName)); announce(SchemaKeyspace.makeDropTableMutation(ksm, oldCfm, FBUtilities.timestampMicros()), announceLocally); } public static void announceViewDrop(String ksName, String viewName, boolean announceLocally) throws ConfigurationException { ViewDefinition view = Schema.instance.getView(ksName, viewName); if (view == null) throw new ConfigurationException(String.format("Cannot drop non existing materialized view '%s' in keyspace '%s'.", viewName, ksName)); KeyspaceMetadata ksm = Schema.instance.getKSMetaData(ksName); logger.info(String.format("Drop table '%s/%s'", view.ksName, view.viewName)); announce(SchemaKeyspace.makeDropViewMutation(ksm, view, FBUtilities.timestampMicros()), announceLocally); } public static void announceTypeDrop(UserType droppedType) { announceTypeDrop(droppedType, false); } public static void announceTypeDrop(UserType droppedType, boolean announceLocally) { KeyspaceMetadata ksm = Schema.instance.getKSMetaData(droppedType.keyspace); announce(SchemaKeyspace.dropTypeFromSchemaMutation(ksm, droppedType, FBUtilities.timestampMicros()), announceLocally); } public static void announceFunctionDrop(UDFunction udf, boolean announceLocally) { logger.info(String.format("Drop scalar function overload '%s' args '%s'", udf.name(), udf.argTypes())); KeyspaceMetadata ksm = Schema.instance.getKSMetaData(udf.name().keyspace); announce(SchemaKeyspace.makeDropFunctionMutation(ksm, udf, FBUtilities.timestampMicros()), announceLocally); } public static void announceAggregateDrop(UDAggregate udf, boolean announceLocally) { logger.info(String.format("Drop aggregate function overload '%s' args '%s'", udf.name(), udf.argTypes())); KeyspaceMetadata ksm = Schema.instance.getKSMetaData(udf.name().keyspace); announce(SchemaKeyspace.makeDropAggregateMutation(ksm, udf, FBUtilities.timestampMicros()), announceLocally); } /** * actively announce a new version to active hosts via rpc * @param schema The schema mutation to be applied */ private static void announce(Mutation schema, boolean announceLocally) { if (announceLocally) SchemaKeyspace.mergeSchema(Collections.singletonList(schema)); else FBUtilities.waitOnFuture(announce(Collections.singletonList(schema))); } private static void pushSchemaMutation(InetAddress endpoint, Collection<Mutation> schema) { MessageOut<Collection<Mutation>> msg = new MessageOut<>(MessagingService.Verb.DEFINITIONS_UPDATE, schema, MigrationsSerializer.instance); MessagingService.instance().sendOneWay(msg, endpoint); } // Returns a future on the local application of the schema private static Future<?> announce(final Collection<Mutation> schema) { Future<?> f = StageManager.getStage(Stage.MIGRATION).submit(new WrappedRunnable() { protected void runMayThrow() throws ConfigurationException { SchemaKeyspace.mergeSchemaAndAnnounceVersion(schema); } }); for (InetAddress endpoint : Gossiper.instance.getLiveMembers()) { // only push schema to nodes with known and equal versions if (!endpoint.equals(FBUtilities.getBroadcastAddress()) && MessagingService.instance().knowsVersion(endpoint) && MessagingService.instance().getRawVersion(endpoint) == MessagingService.current_version) pushSchemaMutation(endpoint, schema); } return f; } /** * Announce my version passively over gossip. * Used to notify nodes as they arrive in the cluster. * * @param version The schema version to announce */ public static void passiveAnnounce(UUID version) { Gossiper.instance.addLocalApplicationState(ApplicationState.SCHEMA, StorageService.instance.valueFactory.schema(version)); logger.debug("Gossiping my schema version {}", version); } /** * Clear all locally stored schema information and reset schema to initial state. * Called by user (via JMX) who wants to get rid of schema disagreement. */ public static void resetLocalSchema() { logger.info("Starting local schema reset..."); logger.debug("Truncating schema tables..."); SchemaKeyspace.truncate(); logger.debug("Clearing local schema keyspace definitions..."); Schema.instance.clear(); Set<InetAddress> liveEndpoints = Gossiper.instance.getLiveMembers(); liveEndpoints.remove(FBUtilities.getBroadcastAddress()); // force migration if there are nodes around for (InetAddress node : liveEndpoints) { if (shouldPullSchemaFrom(node)) { logger.debug("Requesting schema from {}", node); FBUtilities.waitOnFuture(submitMigrationTask(node)); break; } } logger.info("Local schema reset is complete."); } public static class MigrationsSerializer implements IVersionedSerializer<Collection<Mutation>> { public static MigrationsSerializer instance = new MigrationsSerializer(); public void serialize(Collection<Mutation> schema, DataOutputPlus out, int version) throws IOException { out.writeInt(schema.size()); for (Mutation mutation : schema) Mutation.serializer.serialize(mutation, out, version); } public Collection<Mutation> deserialize(DataInputPlus in, int version) throws IOException { int count = in.readInt(); Collection<Mutation> schema = new ArrayList<>(count); for (int i = 0; i < count; i++) schema.add(Mutation.serializer.deserialize(in, version)); return schema; } public long serializedSize(Collection<Mutation> schema, int version) { int size = TypeSizes.sizeof(schema.size()); for (Mutation mutation : schema) size += Mutation.serializer.serializedSize(mutation, version); return size; } } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * ReportQuery.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.admanager.axis.v202111; /** * A {@code ReportQuery} object allows you to specify the selection * criteria for * generating a report. Only reports with at least one {@link * Column} are supported. */ public class ReportQuery implements java.io.Serializable { /* The list of break-down types being requested in the report. * The generated report will contain the dimensions in the same order * as requested. * This field is required. */ private com.google.api.ads.admanager.axis.v202111.Dimension[] dimensions; /* The ad unit view for the report. Defaults to {@link AdUnitView#TOP_LEVEL}. */ private com.google.api.ads.admanager.axis.v202111.ReportQueryAdUnitView adUnitView; /* The list of trafficking statistics and revenue information * being requested * in the report. The generated report will contain the * columns in the same * order as requested. This field is required. */ private com.google.api.ads.admanager.axis.v202111.Column[] columns; /* The list of break-down attributes being requested in this report. * Some * {@link DimensionAttribute} values can only be used * with certain * {@link Dimension} values that must be included in * the {@link #dimensions} * attribute. The generated report will contain the attributes * in the same * order as requested. */ private com.google.api.ads.admanager.axis.v202111.DimensionAttribute[] dimensionAttributes; /* The list of {@link CustomField#id} being requested in this * report. * To add a {@link CustomField} to the report, you must * include * its corresponding {@link Dimension}, determined by * the {@link CustomField#entityType}, * as a {@link #dimensions dimension}. * * <table> * <tr> * <th scope="col" colspan="2">{@link CustomFieldEntityType#entityType}</th> * </tr> * <tr> * <td>{@link CustomFieldEntityType#LINE_ITEM}</td><td>{@link * Dimension#LINE_ITEM_NAME}</td> * </tr> * <tr> * <td>{@link CustomFieldEntityType#ORDER}</td><td>{@link * Dimension#ORDER_NAME}</td> * </tr> * <tr> * <td>{@link CustomFieldEntityType#CREATIVE}</td><td>{@link * Dimension#CREATIVE_NAME}</td> * </tr> * </table> */ private long[] customFieldIds; /* The list of content CMS metadata key {@link CmsMetadataKey#id * IDs} * being requested in this report. Each of these IDs * must have been defined in the * {@link CmsMetadataKey CMS metadata key}. This will * include * dimensions in the form of {@code CMS_METADATA_KEY[id]_ID} * and * {@code CMS_METADATA_KEY[id]_VALUE} where where {@code * ID} is the ID of * the {@link CmsMetadataValue#id CMS metadata value} * and {@code VALUE} is the * {@link CmsMetadataValue#valueName name}. * <p> * To add {@link CmsMetadataKey#id IDs}, you must include * {@link Dimension#CMS_METADATA} * in {@link #dimensions}, and specify a non-empty list * of content CMS metadata key IDs. * The order of content CMS metadata columns in the report * correspond to the place of * {@link Dimension#CMS_METADATA} in {@link #dimensions}. * For example, if {@link #dimensions} contains the following dimensions * in the order: * {@link Dimension#ADVERTISER_NAME}, * {@link Dimension#CMS_METADATA} and {@link Dimension#COUNTRY_NAME}, * and * {@link #cmsMetadataKeyIds} contains the following * IDs in the * order: 1001 and 1002. The order of dimensions in the * report will be: * Dimension.ADVERTISER_NAME, * Dimension.CMS_METADATA_KEY[1001]_VALUE, * Dimension.CMS_METADATA_KEY[1002]_VALUE, * Dimension.COUNTRY_NAME, * Dimension.ADVERTISER_ID, * Dimension.CMS_METADATA_KEY[1001]_ID, * Dimension.CMS_METADATA_KEY[1002]_ID, * Dimension.COUNTRY_CRITERIA_ID */ private long[] cmsMetadataKeyIds; /* The list of custom dimension custom targeting key {@link CustomTargetingKey#id * IDs} being * requested in this report. This will include dimensions * in the form of * {@code TOP_LEVEL_DIMENSION_KEY[id]_ID} and {@code * TOP_LEVEL_DIMENSION_KEY[id]_VALUE} where * {@code ID} is the ID of the {@link CustomTargetingValue#id * custom targeting value} and * {@code VALUE} is the {@link CustomTargetingValue#name * name}. * * <p>To add {@link CustomTargetingKey#id IDs}, you must * include * {@link Dimension#CUSTOM_DIMENSION} in {@link #dimensions}, * and specify a non-empty list of * custom targeting key IDs. The order of cusotm dimension * columns in the report correspond to the * place of {@link Dimension#CUSTOM_DIMENSION} in {@link * #dimensions}. For example, if * {@link #dimensions} contains the following dimensions * in the order: * {@link Dimension#ADVERTISER_NAME}, {@link Dimension#CUSTOM_DIMENSION} * and * {@link Dimension#COUNTRY_NAME}, and {@link #customCriteriaCustomTargetingKeyIds} * contains the * following IDs in the order: 1001 and 1002. The order * of dimensions in the report will be: * Dimension.ADVERTISER_NAME, Dimension.TOP_LEVEL_DIMENSION_KEY[1001]_VALUE, * Dimension.TOP_LEVEL_DIMENSION_KEY[1002]_VALUE, Dimension.COUNTRY_NAME, * Dimension.ADVERTISER_ID, * Dimension.TOP_LEVEL_DIMENSION_KEY[1001]_ID, Dimension.TOP_LEVEL_DIMENSION_KEY[1002]_ID, * Dimension.COUNTRY_CRITERIA_ID. */ private long[] customDimensionKeyIds; /* The start date from which the reporting information is gathered. * The * {@code ReportQuery#dateRangeType} field must be set * to * {@link DateRangeType#CUSTOM_DATE} in order to use * this. */ private com.google.api.ads.admanager.axis.v202111.Date startDate; /* The end date upto which the reporting information is gathered. * The * {@code ReportQuery#dateRangeType} field must be set * to * {@link DateRangeType#CUSTOM_DATE} in order to use * this. */ private com.google.api.ads.admanager.axis.v202111.Date endDate; /* The period of time for which the reporting data is being generated. * In * order to define custom time periods, set this to * {@link DateRangeType#CUSTOM_DATE}. If set to {@link * DateRangeType#CUSTOM_DATE}, then * {@link ReportQuery#startDate} and {@link ReportQuery#endDate} * will be used. */ private com.google.api.ads.admanager.axis.v202111.DateRangeType dateRangeType; /* Specifies a filter to use for reporting on data. This filter * will be used * in conjunction (joined with an AND statement) with * the date range selected * through {@link #dateRangeType}, {@link #startDate}, * and {@link #endDate}. * * The syntax currently allowed for {@link Statement#query} * is<br> * <code> [WHERE <condition> {AND <condition> ...}]<code><br> * <p><condition></code><br> * &nbsp;&nbsp;&nbsp;&nbsp; <code>:= <property> = <value></code><br> * <code><condition></code><br> * &nbsp;&nbsp;&nbsp;&nbsp; <code>:= <property> = <bind * variable></code><br> * <code><condition> := <property> IN <list></code><br> * <code><bind variable> := :<name></code><br> * </code> where property is the enumeration name of * a {@link Dimension} that * can be filtered. * <p> * For example, the statement "WHERE LINE_ITEM_ID IN * (34344, 23235)" can be used * to generate a report for a specific set of line items * <p> * Filtering on IDs is highly recommended over filtering * on names, especially * for geographical entities. When filtering on names, * matching is case * sensitive. */ private com.google.api.ads.admanager.axis.v202111.Statement statement; /* The currency for Ad Exchange revenue metrics. This field is * only valid for Ad Exchange metrics, * and an exception will be thrown if this field is used * with non-Ad Exchange metrics. Defaults to * the network currency if left {@code null}. The supported * currency codes can be found in * <a href="https://support.google.com/adxseller/answer/6019533"> * this Help Center article.</a> */ private java.lang.String adxReportCurrency; /* Gets the {@link TimeZoneType} for this report, which determines * the time zone used for the * report's date range. Defaults to {@link TimeZoneType.PUBLISHER}. */ private com.google.api.ads.admanager.axis.v202111.TimeZoneType timeZoneType; public ReportQuery() { } public ReportQuery( com.google.api.ads.admanager.axis.v202111.Dimension[] dimensions, com.google.api.ads.admanager.axis.v202111.ReportQueryAdUnitView adUnitView, com.google.api.ads.admanager.axis.v202111.Column[] columns, com.google.api.ads.admanager.axis.v202111.DimensionAttribute[] dimensionAttributes, long[] customFieldIds, long[] cmsMetadataKeyIds, long[] customDimensionKeyIds, com.google.api.ads.admanager.axis.v202111.Date startDate, com.google.api.ads.admanager.axis.v202111.Date endDate, com.google.api.ads.admanager.axis.v202111.DateRangeType dateRangeType, com.google.api.ads.admanager.axis.v202111.Statement statement, java.lang.String adxReportCurrency, com.google.api.ads.admanager.axis.v202111.TimeZoneType timeZoneType) { this.dimensions = dimensions; this.adUnitView = adUnitView; this.columns = columns; this.dimensionAttributes = dimensionAttributes; this.customFieldIds = customFieldIds; this.cmsMetadataKeyIds = cmsMetadataKeyIds; this.customDimensionKeyIds = customDimensionKeyIds; this.startDate = startDate; this.endDate = endDate; this.dateRangeType = dateRangeType; this.statement = statement; this.adxReportCurrency = adxReportCurrency; this.timeZoneType = timeZoneType; } @Override public String toString() { return com.google.common.base.MoreObjects.toStringHelper(this.getClass()) .omitNullValues() .add("adUnitView", getAdUnitView()) .add("adxReportCurrency", getAdxReportCurrency()) .add("cmsMetadataKeyIds", getCmsMetadataKeyIds()) .add("columns", getColumns()) .add("customDimensionKeyIds", getCustomDimensionKeyIds()) .add("customFieldIds", getCustomFieldIds()) .add("dateRangeType", getDateRangeType()) .add("dimensionAttributes", getDimensionAttributes()) .add("dimensions", getDimensions()) .add("endDate", getEndDate()) .add("startDate", getStartDate()) .add("statement", getStatement()) .add("timeZoneType", getTimeZoneType()) .toString(); } /** * Gets the dimensions value for this ReportQuery. * * @return dimensions * The list of break-down types being requested in the report. * The generated report will contain the dimensions in the same order * as requested. * This field is required. */ public com.google.api.ads.admanager.axis.v202111.Dimension[] getDimensions() { return dimensions; } /** * Sets the dimensions value for this ReportQuery. * * @param dimensions * The list of break-down types being requested in the report. * The generated report will contain the dimensions in the same order * as requested. * This field is required. */ public void setDimensions(com.google.api.ads.admanager.axis.v202111.Dimension[] dimensions) { this.dimensions = dimensions; } public com.google.api.ads.admanager.axis.v202111.Dimension getDimensions(int i) { return this.dimensions[i]; } public void setDimensions(int i, com.google.api.ads.admanager.axis.v202111.Dimension _value) { this.dimensions[i] = _value; } /** * Gets the adUnitView value for this ReportQuery. * * @return adUnitView * The ad unit view for the report. Defaults to {@link AdUnitView#TOP_LEVEL}. */ public com.google.api.ads.admanager.axis.v202111.ReportQueryAdUnitView getAdUnitView() { return adUnitView; } /** * Sets the adUnitView value for this ReportQuery. * * @param adUnitView * The ad unit view for the report. Defaults to {@link AdUnitView#TOP_LEVEL}. */ public void setAdUnitView(com.google.api.ads.admanager.axis.v202111.ReportQueryAdUnitView adUnitView) { this.adUnitView = adUnitView; } /** * Gets the columns value for this ReportQuery. * * @return columns * The list of trafficking statistics and revenue information * being requested * in the report. The generated report will contain the * columns in the same * order as requested. This field is required. */ public com.google.api.ads.admanager.axis.v202111.Column[] getColumns() { return columns; } /** * Sets the columns value for this ReportQuery. * * @param columns * The list of trafficking statistics and revenue information * being requested * in the report. The generated report will contain the * columns in the same * order as requested. This field is required. */ public void setColumns(com.google.api.ads.admanager.axis.v202111.Column[] columns) { this.columns = columns; } public com.google.api.ads.admanager.axis.v202111.Column getColumns(int i) { return this.columns[i]; } public void setColumns(int i, com.google.api.ads.admanager.axis.v202111.Column _value) { this.columns[i] = _value; } /** * Gets the dimensionAttributes value for this ReportQuery. * * @return dimensionAttributes * The list of break-down attributes being requested in this report. * Some * {@link DimensionAttribute} values can only be used * with certain * {@link Dimension} values that must be included in * the {@link #dimensions} * attribute. The generated report will contain the attributes * in the same * order as requested. */ public com.google.api.ads.admanager.axis.v202111.DimensionAttribute[] getDimensionAttributes() { return dimensionAttributes; } /** * Sets the dimensionAttributes value for this ReportQuery. * * @param dimensionAttributes * The list of break-down attributes being requested in this report. * Some * {@link DimensionAttribute} values can only be used * with certain * {@link Dimension} values that must be included in * the {@link #dimensions} * attribute. The generated report will contain the attributes * in the same * order as requested. */ public void setDimensionAttributes(com.google.api.ads.admanager.axis.v202111.DimensionAttribute[] dimensionAttributes) { this.dimensionAttributes = dimensionAttributes; } public com.google.api.ads.admanager.axis.v202111.DimensionAttribute getDimensionAttributes(int i) { return this.dimensionAttributes[i]; } public void setDimensionAttributes(int i, com.google.api.ads.admanager.axis.v202111.DimensionAttribute _value) { this.dimensionAttributes[i] = _value; } /** * Gets the customFieldIds value for this ReportQuery. * * @return customFieldIds * The list of {@link CustomField#id} being requested in this * report. * To add a {@link CustomField} to the report, you must * include * its corresponding {@link Dimension}, determined by * the {@link CustomField#entityType}, * as a {@link #dimensions dimension}. * * <table> * <tr> * <th scope="col" colspan="2">{@link CustomFieldEntityType#entityType}</th> * </tr> * <tr> * <td>{@link CustomFieldEntityType#LINE_ITEM}</td><td>{@link * Dimension#LINE_ITEM_NAME}</td> * </tr> * <tr> * <td>{@link CustomFieldEntityType#ORDER}</td><td>{@link * Dimension#ORDER_NAME}</td> * </tr> * <tr> * <td>{@link CustomFieldEntityType#CREATIVE}</td><td>{@link * Dimension#CREATIVE_NAME}</td> * </tr> * </table> */ public long[] getCustomFieldIds() { return customFieldIds; } /** * Sets the customFieldIds value for this ReportQuery. * * @param customFieldIds * The list of {@link CustomField#id} being requested in this * report. * To add a {@link CustomField} to the report, you must * include * its corresponding {@link Dimension}, determined by * the {@link CustomField#entityType}, * as a {@link #dimensions dimension}. * * <table> * <tr> * <th scope="col" colspan="2">{@link CustomFieldEntityType#entityType}</th> * </tr> * <tr> * <td>{@link CustomFieldEntityType#LINE_ITEM}</td><td>{@link * Dimension#LINE_ITEM_NAME}</td> * </tr> * <tr> * <td>{@link CustomFieldEntityType#ORDER}</td><td>{@link * Dimension#ORDER_NAME}</td> * </tr> * <tr> * <td>{@link CustomFieldEntityType#CREATIVE}</td><td>{@link * Dimension#CREATIVE_NAME}</td> * </tr> * </table> */ public void setCustomFieldIds(long[] customFieldIds) { this.customFieldIds = customFieldIds; } public long getCustomFieldIds(int i) { return this.customFieldIds[i]; } public void setCustomFieldIds(int i, long _value) { this.customFieldIds[i] = _value; } /** * Gets the cmsMetadataKeyIds value for this ReportQuery. * * @return cmsMetadataKeyIds * The list of content CMS metadata key {@link CmsMetadataKey#id * IDs} * being requested in this report. Each of these IDs * must have been defined in the * {@link CmsMetadataKey CMS metadata key}. This will * include * dimensions in the form of {@code CMS_METADATA_KEY[id]_ID} * and * {@code CMS_METADATA_KEY[id]_VALUE} where where {@code * ID} is the ID of * the {@link CmsMetadataValue#id CMS metadata value} * and {@code VALUE} is the * {@link CmsMetadataValue#valueName name}. * <p> * To add {@link CmsMetadataKey#id IDs}, you must include * {@link Dimension#CMS_METADATA} * in {@link #dimensions}, and specify a non-empty list * of content CMS metadata key IDs. * The order of content CMS metadata columns in the report * correspond to the place of * {@link Dimension#CMS_METADATA} in {@link #dimensions}. * For example, if {@link #dimensions} contains the following dimensions * in the order: * {@link Dimension#ADVERTISER_NAME}, * {@link Dimension#CMS_METADATA} and {@link Dimension#COUNTRY_NAME}, * and * {@link #cmsMetadataKeyIds} contains the following * IDs in the * order: 1001 and 1002. The order of dimensions in the * report will be: * Dimension.ADVERTISER_NAME, * Dimension.CMS_METADATA_KEY[1001]_VALUE, * Dimension.CMS_METADATA_KEY[1002]_VALUE, * Dimension.COUNTRY_NAME, * Dimension.ADVERTISER_ID, * Dimension.CMS_METADATA_KEY[1001]_ID, * Dimension.CMS_METADATA_KEY[1002]_ID, * Dimension.COUNTRY_CRITERIA_ID */ public long[] getCmsMetadataKeyIds() { return cmsMetadataKeyIds; } /** * Sets the cmsMetadataKeyIds value for this ReportQuery. * * @param cmsMetadataKeyIds * The list of content CMS metadata key {@link CmsMetadataKey#id * IDs} * being requested in this report. Each of these IDs * must have been defined in the * {@link CmsMetadataKey CMS metadata key}. This will * include * dimensions in the form of {@code CMS_METADATA_KEY[id]_ID} * and * {@code CMS_METADATA_KEY[id]_VALUE} where where {@code * ID} is the ID of * the {@link CmsMetadataValue#id CMS metadata value} * and {@code VALUE} is the * {@link CmsMetadataValue#valueName name}. * <p> * To add {@link CmsMetadataKey#id IDs}, you must include * {@link Dimension#CMS_METADATA} * in {@link #dimensions}, and specify a non-empty list * of content CMS metadata key IDs. * The order of content CMS metadata columns in the report * correspond to the place of * {@link Dimension#CMS_METADATA} in {@link #dimensions}. * For example, if {@link #dimensions} contains the following dimensions * in the order: * {@link Dimension#ADVERTISER_NAME}, * {@link Dimension#CMS_METADATA} and {@link Dimension#COUNTRY_NAME}, * and * {@link #cmsMetadataKeyIds} contains the following * IDs in the * order: 1001 and 1002. The order of dimensions in the * report will be: * Dimension.ADVERTISER_NAME, * Dimension.CMS_METADATA_KEY[1001]_VALUE, * Dimension.CMS_METADATA_KEY[1002]_VALUE, * Dimension.COUNTRY_NAME, * Dimension.ADVERTISER_ID, * Dimension.CMS_METADATA_KEY[1001]_ID, * Dimension.CMS_METADATA_KEY[1002]_ID, * Dimension.COUNTRY_CRITERIA_ID */ public void setCmsMetadataKeyIds(long[] cmsMetadataKeyIds) { this.cmsMetadataKeyIds = cmsMetadataKeyIds; } public long getCmsMetadataKeyIds(int i) { return this.cmsMetadataKeyIds[i]; } public void setCmsMetadataKeyIds(int i, long _value) { this.cmsMetadataKeyIds[i] = _value; } /** * Gets the customDimensionKeyIds value for this ReportQuery. * * @return customDimensionKeyIds * The list of custom dimension custom targeting key {@link CustomTargetingKey#id * IDs} being * requested in this report. This will include dimensions * in the form of * {@code TOP_LEVEL_DIMENSION_KEY[id]_ID} and {@code * TOP_LEVEL_DIMENSION_KEY[id]_VALUE} where * {@code ID} is the ID of the {@link CustomTargetingValue#id * custom targeting value} and * {@code VALUE} is the {@link CustomTargetingValue#name * name}. * * <p>To add {@link CustomTargetingKey#id IDs}, you must * include * {@link Dimension#CUSTOM_DIMENSION} in {@link #dimensions}, * and specify a non-empty list of * custom targeting key IDs. The order of cusotm dimension * columns in the report correspond to the * place of {@link Dimension#CUSTOM_DIMENSION} in {@link * #dimensions}. For example, if * {@link #dimensions} contains the following dimensions * in the order: * {@link Dimension#ADVERTISER_NAME}, {@link Dimension#CUSTOM_DIMENSION} * and * {@link Dimension#COUNTRY_NAME}, and {@link #customCriteriaCustomTargetingKeyIds} * contains the * following IDs in the order: 1001 and 1002. The order * of dimensions in the report will be: * Dimension.ADVERTISER_NAME, Dimension.TOP_LEVEL_DIMENSION_KEY[1001]_VALUE, * Dimension.TOP_LEVEL_DIMENSION_KEY[1002]_VALUE, Dimension.COUNTRY_NAME, * Dimension.ADVERTISER_ID, * Dimension.TOP_LEVEL_DIMENSION_KEY[1001]_ID, Dimension.TOP_LEVEL_DIMENSION_KEY[1002]_ID, * Dimension.COUNTRY_CRITERIA_ID. */ public long[] getCustomDimensionKeyIds() { return customDimensionKeyIds; } /** * Sets the customDimensionKeyIds value for this ReportQuery. * * @param customDimensionKeyIds * The list of custom dimension custom targeting key {@link CustomTargetingKey#id * IDs} being * requested in this report. This will include dimensions * in the form of * {@code TOP_LEVEL_DIMENSION_KEY[id]_ID} and {@code * TOP_LEVEL_DIMENSION_KEY[id]_VALUE} where * {@code ID} is the ID of the {@link CustomTargetingValue#id * custom targeting value} and * {@code VALUE} is the {@link CustomTargetingValue#name * name}. * * <p>To add {@link CustomTargetingKey#id IDs}, you must * include * {@link Dimension#CUSTOM_DIMENSION} in {@link #dimensions}, * and specify a non-empty list of * custom targeting key IDs. The order of cusotm dimension * columns in the report correspond to the * place of {@link Dimension#CUSTOM_DIMENSION} in {@link * #dimensions}. For example, if * {@link #dimensions} contains the following dimensions * in the order: * {@link Dimension#ADVERTISER_NAME}, {@link Dimension#CUSTOM_DIMENSION} * and * {@link Dimension#COUNTRY_NAME}, and {@link #customCriteriaCustomTargetingKeyIds} * contains the * following IDs in the order: 1001 and 1002. The order * of dimensions in the report will be: * Dimension.ADVERTISER_NAME, Dimension.TOP_LEVEL_DIMENSION_KEY[1001]_VALUE, * Dimension.TOP_LEVEL_DIMENSION_KEY[1002]_VALUE, Dimension.COUNTRY_NAME, * Dimension.ADVERTISER_ID, * Dimension.TOP_LEVEL_DIMENSION_KEY[1001]_ID, Dimension.TOP_LEVEL_DIMENSION_KEY[1002]_ID, * Dimension.COUNTRY_CRITERIA_ID. */ public void setCustomDimensionKeyIds(long[] customDimensionKeyIds) { this.customDimensionKeyIds = customDimensionKeyIds; } public long getCustomDimensionKeyIds(int i) { return this.customDimensionKeyIds[i]; } public void setCustomDimensionKeyIds(int i, long _value) { this.customDimensionKeyIds[i] = _value; } /** * Gets the startDate value for this ReportQuery. * * @return startDate * The start date from which the reporting information is gathered. * The * {@code ReportQuery#dateRangeType} field must be set * to * {@link DateRangeType#CUSTOM_DATE} in order to use * this. */ public com.google.api.ads.admanager.axis.v202111.Date getStartDate() { return startDate; } /** * Sets the startDate value for this ReportQuery. * * @param startDate * The start date from which the reporting information is gathered. * The * {@code ReportQuery#dateRangeType} field must be set * to * {@link DateRangeType#CUSTOM_DATE} in order to use * this. */ public void setStartDate(com.google.api.ads.admanager.axis.v202111.Date startDate) { this.startDate = startDate; } /** * Gets the endDate value for this ReportQuery. * * @return endDate * The end date upto which the reporting information is gathered. * The * {@code ReportQuery#dateRangeType} field must be set * to * {@link DateRangeType#CUSTOM_DATE} in order to use * this. */ public com.google.api.ads.admanager.axis.v202111.Date getEndDate() { return endDate; } /** * Sets the endDate value for this ReportQuery. * * @param endDate * The end date upto which the reporting information is gathered. * The * {@code ReportQuery#dateRangeType} field must be set * to * {@link DateRangeType#CUSTOM_DATE} in order to use * this. */ public void setEndDate(com.google.api.ads.admanager.axis.v202111.Date endDate) { this.endDate = endDate; } /** * Gets the dateRangeType value for this ReportQuery. * * @return dateRangeType * The period of time for which the reporting data is being generated. * In * order to define custom time periods, set this to * {@link DateRangeType#CUSTOM_DATE}. If set to {@link * DateRangeType#CUSTOM_DATE}, then * {@link ReportQuery#startDate} and {@link ReportQuery#endDate} * will be used. */ public com.google.api.ads.admanager.axis.v202111.DateRangeType getDateRangeType() { return dateRangeType; } /** * Sets the dateRangeType value for this ReportQuery. * * @param dateRangeType * The period of time for which the reporting data is being generated. * In * order to define custom time periods, set this to * {@link DateRangeType#CUSTOM_DATE}. If set to {@link * DateRangeType#CUSTOM_DATE}, then * {@link ReportQuery#startDate} and {@link ReportQuery#endDate} * will be used. */ public void setDateRangeType(com.google.api.ads.admanager.axis.v202111.DateRangeType dateRangeType) { this.dateRangeType = dateRangeType; } /** * Gets the statement value for this ReportQuery. * * @return statement * Specifies a filter to use for reporting on data. This filter * will be used * in conjunction (joined with an AND statement) with * the date range selected * through {@link #dateRangeType}, {@link #startDate}, * and {@link #endDate}. * * The syntax currently allowed for {@link Statement#query} * is<br> * <code> [WHERE <condition> {AND <condition> ...}]<code><br> * <p><condition></code><br> * &nbsp;&nbsp;&nbsp;&nbsp; <code>:= <property> = <value></code><br> * <code><condition></code><br> * &nbsp;&nbsp;&nbsp;&nbsp; <code>:= <property> = <bind * variable></code><br> * <code><condition> := <property> IN <list></code><br> * <code><bind variable> := :<name></code><br> * </code> where property is the enumeration name of * a {@link Dimension} that * can be filtered. * <p> * For example, the statement "WHERE LINE_ITEM_ID IN * (34344, 23235)" can be used * to generate a report for a specific set of line items * <p> * Filtering on IDs is highly recommended over filtering * on names, especially * for geographical entities. When filtering on names, * matching is case * sensitive. */ public com.google.api.ads.admanager.axis.v202111.Statement getStatement() { return statement; } /** * Sets the statement value for this ReportQuery. * * @param statement * Specifies a filter to use for reporting on data. This filter * will be used * in conjunction (joined with an AND statement) with * the date range selected * through {@link #dateRangeType}, {@link #startDate}, * and {@link #endDate}. * * The syntax currently allowed for {@link Statement#query} * is<br> * <code> [WHERE <condition> {AND <condition> ...}]<code><br> * <p><condition></code><br> * &nbsp;&nbsp;&nbsp;&nbsp; <code>:= <property> = <value></code><br> * <code><condition></code><br> * &nbsp;&nbsp;&nbsp;&nbsp; <code>:= <property> = <bind * variable></code><br> * <code><condition> := <property> IN <list></code><br> * <code><bind variable> := :<name></code><br> * </code> where property is the enumeration name of * a {@link Dimension} that * can be filtered. * <p> * For example, the statement "WHERE LINE_ITEM_ID IN * (34344, 23235)" can be used * to generate a report for a specific set of line items * <p> * Filtering on IDs is highly recommended over filtering * on names, especially * for geographical entities. When filtering on names, * matching is case * sensitive. */ public void setStatement(com.google.api.ads.admanager.axis.v202111.Statement statement) { this.statement = statement; } /** * Gets the adxReportCurrency value for this ReportQuery. * * @return adxReportCurrency * The currency for Ad Exchange revenue metrics. This field is * only valid for Ad Exchange metrics, * and an exception will be thrown if this field is used * with non-Ad Exchange metrics. Defaults to * the network currency if left {@code null}. The supported * currency codes can be found in * <a href="https://support.google.com/adxseller/answer/6019533"> * this Help Center article.</a> */ public java.lang.String getAdxReportCurrency() { return adxReportCurrency; } /** * Sets the adxReportCurrency value for this ReportQuery. * * @param adxReportCurrency * The currency for Ad Exchange revenue metrics. This field is * only valid for Ad Exchange metrics, * and an exception will be thrown if this field is used * with non-Ad Exchange metrics. Defaults to * the network currency if left {@code null}. The supported * currency codes can be found in * <a href="https://support.google.com/adxseller/answer/6019533"> * this Help Center article.</a> */ public void setAdxReportCurrency(java.lang.String adxReportCurrency) { this.adxReportCurrency = adxReportCurrency; } /** * Gets the timeZoneType value for this ReportQuery. * * @return timeZoneType * Gets the {@link TimeZoneType} for this report, which determines * the time zone used for the * report's date range. Defaults to {@link TimeZoneType.PUBLISHER}. */ public com.google.api.ads.admanager.axis.v202111.TimeZoneType getTimeZoneType() { return timeZoneType; } /** * Sets the timeZoneType value for this ReportQuery. * * @param timeZoneType * Gets the {@link TimeZoneType} for this report, which determines * the time zone used for the * report's date range. Defaults to {@link TimeZoneType.PUBLISHER}. */ public void setTimeZoneType(com.google.api.ads.admanager.axis.v202111.TimeZoneType timeZoneType) { this.timeZoneType = timeZoneType; } private java.lang.Object __equalsCalc = null; public synchronized boolean equals(java.lang.Object obj) { if (!(obj instanceof ReportQuery)) return false; ReportQuery other = (ReportQuery) obj; if (obj == null) return false; if (this == obj) return true; if (__equalsCalc != null) { return (__equalsCalc == obj); } __equalsCalc = obj; boolean _equals; _equals = true && ((this.dimensions==null && other.getDimensions()==null) || (this.dimensions!=null && java.util.Arrays.equals(this.dimensions, other.getDimensions()))) && ((this.adUnitView==null && other.getAdUnitView()==null) || (this.adUnitView!=null && this.adUnitView.equals(other.getAdUnitView()))) && ((this.columns==null && other.getColumns()==null) || (this.columns!=null && java.util.Arrays.equals(this.columns, other.getColumns()))) && ((this.dimensionAttributes==null && other.getDimensionAttributes()==null) || (this.dimensionAttributes!=null && java.util.Arrays.equals(this.dimensionAttributes, other.getDimensionAttributes()))) && ((this.customFieldIds==null && other.getCustomFieldIds()==null) || (this.customFieldIds!=null && java.util.Arrays.equals(this.customFieldIds, other.getCustomFieldIds()))) && ((this.cmsMetadataKeyIds==null && other.getCmsMetadataKeyIds()==null) || (this.cmsMetadataKeyIds!=null && java.util.Arrays.equals(this.cmsMetadataKeyIds, other.getCmsMetadataKeyIds()))) && ((this.customDimensionKeyIds==null && other.getCustomDimensionKeyIds()==null) || (this.customDimensionKeyIds!=null && java.util.Arrays.equals(this.customDimensionKeyIds, other.getCustomDimensionKeyIds()))) && ((this.startDate==null && other.getStartDate()==null) || (this.startDate!=null && this.startDate.equals(other.getStartDate()))) && ((this.endDate==null && other.getEndDate()==null) || (this.endDate!=null && this.endDate.equals(other.getEndDate()))) && ((this.dateRangeType==null && other.getDateRangeType()==null) || (this.dateRangeType!=null && this.dateRangeType.equals(other.getDateRangeType()))) && ((this.statement==null && other.getStatement()==null) || (this.statement!=null && this.statement.equals(other.getStatement()))) && ((this.adxReportCurrency==null && other.getAdxReportCurrency()==null) || (this.adxReportCurrency!=null && this.adxReportCurrency.equals(other.getAdxReportCurrency()))) && ((this.timeZoneType==null && other.getTimeZoneType()==null) || (this.timeZoneType!=null && this.timeZoneType.equals(other.getTimeZoneType()))); __equalsCalc = null; return _equals; } private boolean __hashCodeCalc = false; public synchronized int hashCode() { if (__hashCodeCalc) { return 0; } __hashCodeCalc = true; int _hashCode = 1; if (getDimensions() != null) { for (int i=0; i<java.lang.reflect.Array.getLength(getDimensions()); i++) { java.lang.Object obj = java.lang.reflect.Array.get(getDimensions(), i); if (obj != null && !obj.getClass().isArray()) { _hashCode += obj.hashCode(); } } } if (getAdUnitView() != null) { _hashCode += getAdUnitView().hashCode(); } if (getColumns() != null) { for (int i=0; i<java.lang.reflect.Array.getLength(getColumns()); i++) { java.lang.Object obj = java.lang.reflect.Array.get(getColumns(), i); if (obj != null && !obj.getClass().isArray()) { _hashCode += obj.hashCode(); } } } if (getDimensionAttributes() != null) { for (int i=0; i<java.lang.reflect.Array.getLength(getDimensionAttributes()); i++) { java.lang.Object obj = java.lang.reflect.Array.get(getDimensionAttributes(), i); if (obj != null && !obj.getClass().isArray()) { _hashCode += obj.hashCode(); } } } if (getCustomFieldIds() != null) { for (int i=0; i<java.lang.reflect.Array.getLength(getCustomFieldIds()); i++) { java.lang.Object obj = java.lang.reflect.Array.get(getCustomFieldIds(), i); if (obj != null && !obj.getClass().isArray()) { _hashCode += obj.hashCode(); } } } if (getCmsMetadataKeyIds() != null) { for (int i=0; i<java.lang.reflect.Array.getLength(getCmsMetadataKeyIds()); i++) { java.lang.Object obj = java.lang.reflect.Array.get(getCmsMetadataKeyIds(), i); if (obj != null && !obj.getClass().isArray()) { _hashCode += obj.hashCode(); } } } if (getCustomDimensionKeyIds() != null) { for (int i=0; i<java.lang.reflect.Array.getLength(getCustomDimensionKeyIds()); i++) { java.lang.Object obj = java.lang.reflect.Array.get(getCustomDimensionKeyIds(), i); if (obj != null && !obj.getClass().isArray()) { _hashCode += obj.hashCode(); } } } if (getStartDate() != null) { _hashCode += getStartDate().hashCode(); } if (getEndDate() != null) { _hashCode += getEndDate().hashCode(); } if (getDateRangeType() != null) { _hashCode += getDateRangeType().hashCode(); } if (getStatement() != null) { _hashCode += getStatement().hashCode(); } if (getAdxReportCurrency() != null) { _hashCode += getAdxReportCurrency().hashCode(); } if (getTimeZoneType() != null) { _hashCode += getTimeZoneType().hashCode(); } __hashCodeCalc = false; return _hashCode; } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(ReportQuery.class, true); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ReportQuery")); org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("dimensions"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "dimensions")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "Dimension")); elemField.setMinOccurs(0); elemField.setNillable(false); elemField.setMaxOccursUnbounded(true); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("adUnitView"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "adUnitView")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "ReportQuery.AdUnitView")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("columns"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "columns")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "Column")); elemField.setMinOccurs(0); elemField.setNillable(false); elemField.setMaxOccursUnbounded(true); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("dimensionAttributes"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "dimensionAttributes")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "DimensionAttribute")); elemField.setMinOccurs(0); elemField.setNillable(false); elemField.setMaxOccursUnbounded(true); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("customFieldIds"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "customFieldIds")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long")); elemField.setMinOccurs(0); elemField.setNillable(false); elemField.setMaxOccursUnbounded(true); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("cmsMetadataKeyIds"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "cmsMetadataKeyIds")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long")); elemField.setMinOccurs(0); elemField.setNillable(false); elemField.setMaxOccursUnbounded(true); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("customDimensionKeyIds"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "customDimensionKeyIds")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long")); elemField.setMinOccurs(0); elemField.setNillable(false); elemField.setMaxOccursUnbounded(true); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("startDate"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "startDate")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "Date")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("endDate"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "endDate")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "Date")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("dateRangeType"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "dateRangeType")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "DateRangeType")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("statement"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "statement")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "Statement")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("adxReportCurrency"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "adxReportCurrency")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "string")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("timeZoneType"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "timeZoneType")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "TimeZoneType")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } /** * Get Custom Serializer */ public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanSerializer( _javaType, _xmlType, typeDesc); } /** * Get Custom Deserializer */ public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanDeserializer( _javaType, _xmlType, typeDesc); } }
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.List; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.coordination.ZkSplitLogWorkerCoordination; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.LoadBalancer; import org.apache.hadoop.hbase.master.balancer.SimpleLoadBalancer; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.zookeeper.EmptyWatcher; import org.apache.hadoop.hbase.zookeeper.ZKConfig; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.ZooDefs; import org.apache.zookeeper.ZooKeeper; import org.apache.zookeeper.ZooKeeper.States; import org.apache.zookeeper.data.ACL; import org.apache.zookeeper.data.Stat; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; @Category({MiscTests.class, LargeTests.class}) public class TestZooKeeper { private static final Log LOG = LogFactory.getLog(TestZooKeeper.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); /** * @throws java.lang.Exception */ @BeforeClass public static void setUpBeforeClass() throws Exception { // Test we can first start the ZK cluster by itself Configuration conf = TEST_UTIL.getConfiguration(); TEST_UTIL.startMiniDFSCluster(2); TEST_UTIL.startMiniZKCluster(); conf.setBoolean("dfs.support.append", true); conf.setInt(HConstants.ZK_SESSION_TIMEOUT, 1000); conf.setClass(HConstants.HBASE_MASTER_LOADBALANCER_CLASS, MockLoadBalancer.class, LoadBalancer.class); } /** * @throws java.lang.Exception */ @AfterClass public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } /** * @throws java.lang.Exception */ @Before public void setUp() throws Exception { TEST_UTIL.startMiniHBaseCluster(2, 2); } @After public void after() throws Exception { try { // Some regionserver could fail to delete its znode. // So shutdown could hang. Let's kill them all instead. TEST_UTIL.getHBaseCluster().killAll(); // Still need to clean things up TEST_UTIL.shutdownMiniHBaseCluster(); } finally { TEST_UTIL.getTestFileSystem().delete(FSUtils.getRootDir(TEST_UTIL.getConfiguration()), true); ZKUtil.deleteNodeRecursively(TEST_UTIL.getZooKeeperWatcher(), "/hbase"); } } private ZooKeeperWatcher getZooKeeperWatcher(Connection c) throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { Method getterZK = c.getClass().getDeclaredMethod("getKeepAliveZooKeeperWatcher"); getterZK.setAccessible(true); return (ZooKeeperWatcher) getterZK.invoke(c); } /** * See HBASE-1232 and http://wiki.apache.org/hadoop/ZooKeeper/FAQ#4. * @throws IOException * @throws InterruptedException */ // fails frequently, disabled for now, see HBASE-6406 //@Test public void testClientSessionExpired() throws Exception { Configuration c = new Configuration(TEST_UTIL.getConfiguration()); // We don't want to share the connection as we will check its state c.set(HConstants.HBASE_CLIENT_INSTANCE_ID, "1111"); Connection connection = ConnectionFactory.createConnection(c); ZooKeeperWatcher connectionZK = getZooKeeperWatcher(connection); LOG.info("ZooKeeperWatcher= 0x"+ Integer.toHexString( connectionZK.hashCode())); LOG.info("getRecoverableZooKeeper= 0x"+ Integer.toHexString( connectionZK.getRecoverableZooKeeper().hashCode())); LOG.info("session="+Long.toHexString( connectionZK.getRecoverableZooKeeper().getSessionId())); TEST_UTIL.expireSession(connectionZK); LOG.info("Before using zkw state=" + connectionZK.getRecoverableZooKeeper().getState()); // provoke session expiration by doing something with ZK try { connectionZK.getRecoverableZooKeeper().getZooKeeper().exists( "/1/1", false); } catch (KeeperException ignored) { } // Check that the old ZK connection is closed, means we did expire States state = connectionZK.getRecoverableZooKeeper().getState(); LOG.info("After using zkw state=" + state); LOG.info("session="+Long.toHexString( connectionZK.getRecoverableZooKeeper().getSessionId())); // It's asynchronous, so we may have to wait a little... final long limit1 = System.currentTimeMillis() + 3000; while (System.currentTimeMillis() < limit1 && state != States.CLOSED){ state = connectionZK.getRecoverableZooKeeper().getState(); } LOG.info("After using zkw loop=" + state); LOG.info("ZooKeeper should have timed out"); LOG.info("session="+Long.toHexString( connectionZK.getRecoverableZooKeeper().getSessionId())); // It's surprising but sometimes we can still be in connected state. // As it's known (even if not understood) we don't make the the test fail // for this reason.) // Assert.assertTrue("state=" + state, state == States.CLOSED); // Check that the client recovered ZooKeeperWatcher newConnectionZK = getZooKeeperWatcher(connection); States state2 = newConnectionZK.getRecoverableZooKeeper().getState(); LOG.info("After new get state=" +state2); // As it's an asynchronous event we may got the same ZKW, if it's not // yet invalidated. Hence this loop. final long limit2 = System.currentTimeMillis() + 3000; while (System.currentTimeMillis() < limit2 && state2 != States.CONNECTED && state2 != States.CONNECTING) { newConnectionZK = getZooKeeperWatcher(connection); state2 = newConnectionZK.getRecoverableZooKeeper().getState(); } LOG.info("After new get state loop=" + state2); Assert.assertTrue( state2 == States.CONNECTED || state2 == States.CONNECTING); connection.close(); } @Test (timeout = 120000) public void testRegionServerSessionExpired() throws Exception { LOG.info("Starting testRegionServerSessionExpired"); TEST_UTIL.expireRegionServerSession(0); testSanity("testRegionServerSessionExpired"); } @Test(timeout = 300000) public void testMasterSessionExpired() throws Exception { LOG.info("Starting testMasterSessionExpired"); TEST_UTIL.expireMasterSession(); testSanity("testMasterSessionExpired"); } /** * Master recovery when the znode already exists. Internally, this * test differs from {@link #testMasterSessionExpired} because here * the master znode will exist in ZK. */ @Test(timeout = 300000) public void testMasterZKSessionRecoveryFailure() throws Exception { LOG.info("Starting testMasterZKSessionRecoveryFailure"); MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); HMaster m = cluster.getMaster(); m.abort("Test recovery from zk session expired", new KeeperException.SessionExpiredException()); assertTrue(m.isStopped()); // Master doesn't recover any more testSanity("testMasterZKSessionRecoveryFailure"); } /** * Make sure we can use the cluster * @throws Exception */ private void testSanity(final String testName) throws Exception{ String tableName = testName + "_" + System.currentTimeMillis(); HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName)); HColumnDescriptor family = new HColumnDescriptor("fam"); desc.addFamily(family); LOG.info("Creating table " + tableName); Admin admin = TEST_UTIL.getHBaseAdmin(); try { admin.createTable(desc); } finally { admin.close(); } Table table = TEST_UTIL.getConnection().getTable(desc.getTableName()); Put put = new Put(Bytes.toBytes("testrow")); put.add(Bytes.toBytes("fam"), Bytes.toBytes("col"), Bytes.toBytes("testdata")); LOG.info("Putting table " + tableName); table.put(put); table.close(); } @Test public void testMultipleZK() throws IOException, NoSuchMethodException, InvocationTargetException, IllegalAccessException { Table localMeta = TEST_UTIL.getConnection().getTable(TableName.META_TABLE_NAME); Configuration otherConf = new Configuration(TEST_UTIL.getConfiguration()); otherConf.set(HConstants.ZOOKEEPER_QUORUM, "127.0.0.1"); Connection connection = ConnectionFactory.createConnection(otherConf); Table ipMeta = connection.getTable(TableName.META_TABLE_NAME); // dummy, just to open the connection final byte [] row = new byte [] {'r'}; localMeta.exists(new Get(row)); ipMeta.exists(new Get(row)); // make sure they aren't the same ZooKeeperWatcher z1 = getZooKeeperWatcher(ConnectionFactory.createConnection(localMeta.getConfiguration())); ZooKeeperWatcher z2 = getZooKeeperWatcher(ConnectionFactory.createConnection(otherConf)); assertFalse(z1 == z2); assertFalse(z1.getQuorum().equals(z2.getQuorum())); localMeta.close(); ipMeta.close(); connection.close(); } /** * Create a znode with data * @throws Exception */ @Test public void testCreateWithParents() throws Exception { ZooKeeperWatcher zkw = new ZooKeeperWatcher(new Configuration(TEST_UTIL.getConfiguration()), TestZooKeeper.class.getName(), null); byte[] expectedData = new byte[] { 1, 2, 3 }; ZKUtil.createWithParents(zkw, "/l1/l2/l3/l4/testCreateWithParents", expectedData); byte[] data = ZKUtil.getData(zkw, "/l1/l2/l3/l4/testCreateWithParents"); assertTrue(Bytes.equals(expectedData, data)); ZKUtil.deleteNodeRecursively(zkw, "/l1"); ZKUtil.createWithParents(zkw, "/testCreateWithParents", expectedData); data = ZKUtil.getData(zkw, "/testCreateWithParents"); assertTrue(Bytes.equals(expectedData, data)); ZKUtil.deleteNodeRecursively(zkw, "/testCreateWithParents"); } /** * Create a bunch of znodes in a hierarchy, try deleting one that has childs (it will fail), then * delete it recursively, then delete the last znode * @throws Exception */ @Test public void testZNodeDeletes() throws Exception { ZooKeeperWatcher zkw = new ZooKeeperWatcher( new Configuration(TEST_UTIL.getConfiguration()), TestZooKeeper.class.getName(), null); ZKUtil.createWithParents(zkw, "/l1/l2/l3/l4"); try { ZKUtil.deleteNode(zkw, "/l1/l2"); fail("We should not be able to delete if znode has childs"); } catch (KeeperException ex) { assertNotNull(ZKUtil.getDataNoWatch(zkw, "/l1/l2/l3/l4", null)); } ZKUtil.deleteNodeRecursively(zkw, "/l1/l2"); // make sure it really is deleted assertNull(ZKUtil.getDataNoWatch(zkw, "/l1/l2/l3/l4", null)); // do the same delete again and make sure it doesn't crash ZKUtil.deleteNodeRecursively(zkw, "/l1/l2"); ZKUtil.deleteNode(zkw, "/l1"); assertNull(ZKUtil.getDataNoWatch(zkw, "/l1/l2", null)); } @Test public void testClusterKey() throws Exception { testKey("server", 2181, "hbase"); testKey("server1,server2,server3", 2181, "hbase"); try { ZKUtil.transformClusterKey("2181:hbase"); } catch (IOException ex) { // OK } } @Test public void testClusterKeyWithMultiplePorts() throws Exception { // server has different port than the default port testKey("server1:2182", 2181, "hbase", true); // multiple servers have their own port testKey("server1:2182,server2:2183,server3:2184", 2181, "hbase", true); // one server has no specified port, should use default port testKey("server1:2182,server2,server3:2184", 2181, "hbase", true); // the last server has no specified port, should use default port testKey("server1:2182,server2:2183,server3", 2181, "hbase", true); // multiple servers have no specified port, should use default port for those servers testKey("server1:2182,server2,server3:2184,server4", 2181, "hbase", true); // same server, different ports testKey("server1:2182,server1:2183,server1", 2181, "hbase", true); // mix of same server/different port and different server testKey("server1:2182,server2:2183,server1", 2181, "hbase", true); } private void testKey(String ensemble, int port, String znode) throws IOException { testKey(ensemble, port, znode, false); // not support multiple client ports } private void testKey(String ensemble, int port, String znode, Boolean multiplePortSupport) throws IOException { Configuration conf = new Configuration(); String key = ensemble+":"+port+":"+znode; String ensemble2 = null; ZKUtil.ZKClusterKey zkClusterKey = ZKUtil.transformClusterKey(key); if (multiplePortSupport) { ensemble2 = ZKUtil.standardizeQuorumServerString(ensemble, Integer.toString(port)); assertEquals(ensemble2, zkClusterKey.quorumString); } else { assertEquals(ensemble, zkClusterKey.quorumString); } assertEquals(port, zkClusterKey.clientPort); assertEquals(znode, zkClusterKey.znodeParent); ZKUtil.applyClusterKeyToConf(conf, key); assertEquals(zkClusterKey.quorumString, conf.get(HConstants.ZOOKEEPER_QUORUM)); assertEquals(zkClusterKey.clientPort, conf.getInt(HConstants.ZOOKEEPER_CLIENT_PORT, -1)); assertEquals(zkClusterKey.znodeParent, conf.get(HConstants.ZOOKEEPER_ZNODE_PARENT)); String reconstructedKey = ZKUtil.getZooKeeperClusterKey(conf); if (multiplePortSupport) { String key2 = ensemble2 + ":" + port + ":" + znode; assertEquals(key2, reconstructedKey); } else { assertEquals(key, reconstructedKey); } } /** * A test for HBASE-3238 * @throws IOException A connection attempt to zk failed * @throws InterruptedException One of the non ZKUtil actions was interrupted * @throws KeeperException Any of the zookeeper connections had a * KeeperException */ @Test public void testCreateSilentIsReallySilent() throws InterruptedException, KeeperException, IOException { Configuration c = TEST_UTIL.getConfiguration(); String aclZnode = "/aclRoot"; String quorumServers = ZKConfig.getZKQuorumServersString(c); int sessionTimeout = 5 * 1000; // 5 seconds ZooKeeper zk = new ZooKeeper(quorumServers, sessionTimeout, EmptyWatcher.instance); zk.addAuthInfo("digest", "hbase:rox".getBytes()); // Assumes the root of the ZooKeeper space is writable as it creates a node // wherever the cluster home is defined. ZooKeeperWatcher zk2 = new ZooKeeperWatcher(TEST_UTIL.getConfiguration(), "testCreateSilentIsReallySilent", null); // Save the previous ACL Stat s = null; List<ACL> oldACL = null; while (true) { try { s = new Stat(); oldACL = zk.getACL("/", s); break; } catch (KeeperException e) { switch (e.code()) { case CONNECTIONLOSS: case SESSIONEXPIRED: case OPERATIONTIMEOUT: LOG.warn("Possibly transient ZooKeeper exception", e); Threads.sleep(100); break; default: throw e; } } } // I set this acl after the attempted creation of the cluster home node. // Add retries in case of retryable zk exceptions. while (true) { try { zk.setACL("/", ZooDefs.Ids.CREATOR_ALL_ACL, -1); break; } catch (KeeperException e) { switch (e.code()) { case CONNECTIONLOSS: case SESSIONEXPIRED: case OPERATIONTIMEOUT: LOG.warn("Possibly transient ZooKeeper exception: " + e); Threads.sleep(100); break; default: throw e; } } } while (true) { try { zk.create(aclZnode, null, ZooDefs.Ids.CREATOR_ALL_ACL, CreateMode.PERSISTENT); break; } catch (KeeperException e) { switch (e.code()) { case CONNECTIONLOSS: case SESSIONEXPIRED: case OPERATIONTIMEOUT: LOG.warn("Possibly transient ZooKeeper exception: " + e); Threads.sleep(100); break; default: throw e; } } } zk.close(); ZKUtil.createAndFailSilent(zk2, aclZnode); // Restore the ACL ZooKeeper zk3 = new ZooKeeper(quorumServers, sessionTimeout, EmptyWatcher.instance); zk3.addAuthInfo("digest", "hbase:rox".getBytes()); try { zk3.setACL("/", oldACL, -1); } finally { zk3.close(); } } /** * Test should not fail with NPE when getChildDataAndWatchForNewChildren * invoked with wrongNode */ @Test @SuppressWarnings("deprecation") public void testGetChildDataAndWatchForNewChildrenShouldNotThrowNPE() throws Exception { ZooKeeperWatcher zkw = new ZooKeeperWatcher(TEST_UTIL.getConfiguration(), "testGetChildDataAndWatchForNewChildrenShouldNotThrowNPE", null); ZKUtil.getChildDataAndWatchForNewChildren(zkw, "/wrongNode"); } /** * Tests that the master does not call retainAssignment after recovery from expired zookeeper * session. Without the HBASE-6046 fix master always tries to assign all the user regions by * calling retainAssignment. */ @Test(timeout = 300000) public void testRegionAssignmentAfterMasterRecoveryDueToZKExpiry() throws Exception { MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); cluster.startRegionServer(); cluster.waitForActiveAndReadyMaster(10000); HMaster m = cluster.getMaster(); final ZooKeeperWatcher zkw = m.getZooKeeper(); // now the cluster is up. So assign some regions. try (Admin admin = TEST_UTIL.getHBaseAdmin()) { byte[][] SPLIT_KEYS = new byte[][] { Bytes.toBytes("a"), Bytes.toBytes("b"), Bytes.toBytes("c"), Bytes.toBytes("d"), Bytes.toBytes("e"), Bytes.toBytes("f"), Bytes.toBytes("g"), Bytes.toBytes("h"), Bytes.toBytes("i"), Bytes.toBytes("j") }; String tableName = "testRegionAssignmentAfterMasterRecoveryDueToZKExpiry"; HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName)); htd.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY)); admin.createTable(htd, SPLIT_KEYS); TEST_UTIL.waitUntilNoRegionsInTransition(60000); m.getZooKeeper().close(); MockLoadBalancer.retainAssignCalled = false; final int expectedNumOfListeners = countPermanentListeners(zkw); m.abort("Test recovery from zk session expired", new KeeperException.SessionExpiredException()); assertTrue(m.isStopped()); // Master doesn't recover any more // The recovered master should not call retainAssignment, as it is not a // clean startup. assertFalse("Retain assignment should not be called", MockLoadBalancer.retainAssignCalled); // number of listeners should be same as the value before master aborted // wait for new master is initialized cluster.waitForActiveAndReadyMaster(120000); final HMaster newMaster = cluster.getMasterThread().getMaster(); assertEquals(expectedNumOfListeners, countPermanentListeners(newMaster.getZooKeeper())); } } /** * Count listeners in zkw excluding listeners, that belongs to workers or other * temporary processes. */ private int countPermanentListeners(ZooKeeperWatcher watcher) { return countListeners(watcher, ZkSplitLogWorkerCoordination.class); } /** * Count listeners in zkw excluding provided classes */ private int countListeners(ZooKeeperWatcher watcher, Class<?>... exclude) { int cnt = 0; for (Object o : watcher.getListeners()) { boolean skip = false; for (Class<?> aClass : exclude) { if (aClass.isAssignableFrom(o.getClass())) { skip = true; break; } } if (!skip) { cnt += 1; } } return cnt; } /** * Tests whether the logs are split when master recovers from a expired zookeeper session and an * RS goes down. */ @Test(timeout = 300000) public void testLogSplittingAfterMasterRecoveryDueToZKExpiry() throws Exception { MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); cluster.startRegionServer(); HMaster m = cluster.getMaster(); // now the cluster is up. So assign some regions. Admin admin = TEST_UTIL.getHBaseAdmin(); Table table = null; try { byte[][] SPLIT_KEYS = new byte[][] { Bytes.toBytes("1"), Bytes.toBytes("2"), Bytes.toBytes("3"), Bytes.toBytes("4"), Bytes.toBytes("5") }; String tableName = "testLogSplittingAfterMasterRecoveryDueToZKExpiry"; HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName)); HColumnDescriptor hcd = new HColumnDescriptor("col"); htd.addFamily(hcd); admin.createTable(htd, SPLIT_KEYS); TEST_UTIL.waitUntilNoRegionsInTransition(60000); table = TEST_UTIL.getConnection().getTable(htd.getTableName()); Put p; int numberOfPuts; for (numberOfPuts = 0; numberOfPuts < 6; numberOfPuts++) { p = new Put(Bytes.toBytes(numberOfPuts)); p.add(Bytes.toBytes("col"), Bytes.toBytes("ql"), Bytes.toBytes("value" + numberOfPuts)); table.put(p); } m.getZooKeeper().close(); m.abort("Test recovery from zk session expired", new KeeperException.SessionExpiredException()); assertTrue(m.isStopped()); // Master doesn't recover any more cluster.getRegionServer(0).abort("Aborting"); // Without patch for HBASE-6046 this test case will always timeout // with patch the test case should pass. Scan scan = new Scan(); int numberOfRows = 0; ResultScanner scanner = table.getScanner(scan); Result[] result = scanner.next(1); while (result != null && result.length > 0) { numberOfRows++; result = scanner.next(1); } assertEquals("Number of rows should be equal to number of puts.", numberOfPuts, numberOfRows); } finally { if (table != null) table.close(); admin.close(); } } static class MockLoadBalancer extends SimpleLoadBalancer { static boolean retainAssignCalled = false; @Override public Map<ServerName, List<HRegionInfo>> retainAssignment( Map<HRegionInfo, ServerName> regions, List<ServerName> servers) { retainAssignCalled = true; return super.retainAssignment(regions, servers); } } }
/* * Copyright 2010 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.api.plugins; import org.gradle.api.Action; import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.Task; import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.ConfigurationContainer; import org.gradle.api.artifacts.Dependency; import org.gradle.api.file.FileCollection; import org.gradle.api.internal.artifacts.publish.ArchivePublishArtifact; import org.gradle.api.internal.component.BuildableJavaComponent; import org.gradle.api.internal.component.ComponentRegistry; import org.gradle.api.internal.java.JavaLibrary; import org.gradle.api.internal.plugins.DefaultArtifactPublicationSet; import org.gradle.api.internal.project.ProjectInternal; import org.gradle.api.tasks.SourceSet; import org.gradle.api.tasks.bundling.Jar; import org.gradle.api.tasks.javadoc.Javadoc; import org.gradle.api.tasks.testing.Test; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.concurrent.Callable; /** * <p>A {@link Plugin} which compiles and tests Java source, and assembles it into a JAR file.</p> */ public class JavaPlugin implements Plugin<ProjectInternal> { public static final String PROCESS_RESOURCES_TASK_NAME = "processResources"; public static final String CLASSES_TASK_NAME = "classes"; public static final String COMPILE_JAVA_TASK_NAME = "compileJava"; public static final String PROCESS_TEST_RESOURCES_TASK_NAME = "processTestResources"; public static final String TEST_CLASSES_TASK_NAME = "testClasses"; public static final String COMPILE_TEST_JAVA_TASK_NAME = "compileTestJava"; public static final String TEST_TASK_NAME = "test"; public static final String JAR_TASK_NAME = "jar"; public static final String JAVADOC_TASK_NAME = "javadoc"; public static final String COMPILE_CONFIGURATION_NAME = "compile"; public static final String COMPILE_ONLY_CONFIGURATION_NAME = "compileOnly"; public static final String RUNTIME_CONFIGURATION_NAME = "runtime"; public static final String COMPILE_CLASSPATH_CONFIGURATION_NAME = "compileClasspath"; public static final String TEST_COMPILE_CONFIGURATION_NAME = "testCompile"; public static final String TEST_COMPILE_ONLY_CONFIGURATION_NAME = "testCompileOnly"; public static final String TEST_RUNTIME_CONFIGURATION_NAME = "testRuntime"; public static final String TEST_COMPILE_CLASSPATH_CONFIGURATION_NAME = "testCompileClasspath"; public void apply(ProjectInternal project) { project.getPluginManager().apply(JavaBasePlugin.class); JavaPluginConvention javaConvention = project.getConvention().getPlugin(JavaPluginConvention.class); project.getServices().get(ComponentRegistry.class).setMainComponent(new BuildableJavaComponentImpl(javaConvention)); configureSourceSets(javaConvention); configureConfigurations(project); configureJavaDoc(javaConvention); configureTest(project, javaConvention); configureArchivesAndComponent(project, javaConvention); configureBuild(project); } private void configureSourceSets(final JavaPluginConvention pluginConvention) { final Project project = pluginConvention.getProject(); SourceSet main = pluginConvention.getSourceSets().create(SourceSet.MAIN_SOURCE_SET_NAME); SourceSet test = pluginConvention.getSourceSets().create(SourceSet.TEST_SOURCE_SET_NAME); test.setCompileClasspath(project.files(main.getOutput(), project.getConfigurations().getByName(TEST_COMPILE_CLASSPATH_CONFIGURATION_NAME))); test.setRuntimeClasspath(project.files(test.getOutput(), main.getOutput(), project.getConfigurations().getByName(TEST_RUNTIME_CONFIGURATION_NAME))); } private void configureJavaDoc(final JavaPluginConvention pluginConvention) { Project project = pluginConvention.getProject(); SourceSet mainSourceSet = pluginConvention.getSourceSets().getByName(SourceSet.MAIN_SOURCE_SET_NAME); Javadoc javadoc = project.getTasks().create(JAVADOC_TASK_NAME, Javadoc.class); javadoc.setDescription("Generates Javadoc API documentation for the main source code."); javadoc.setGroup(JavaBasePlugin.DOCUMENTATION_GROUP); javadoc.setClasspath(mainSourceSet.getOutput().plus(mainSourceSet.getCompileClasspath())); javadoc.setSource(mainSourceSet.getAllJava()); addDependsOnTaskInOtherProjects(javadoc, true, JAVADOC_TASK_NAME, COMPILE_CONFIGURATION_NAME); } private void configureArchivesAndComponent(final Project project, final JavaPluginConvention pluginConvention) { Jar jar = project.getTasks().create(JAR_TASK_NAME, Jar.class); jar.setDescription("Assembles a jar archive containing the main classes."); jar.setGroup(BasePlugin.BUILD_GROUP); jar.from(pluginConvention.getSourceSets().getByName(SourceSet.MAIN_SOURCE_SET_NAME).getOutput()); ArchivePublishArtifact jarArtifact = new ArchivePublishArtifact(jar); Configuration runtimeConfiguration = project.getConfigurations().getByName(RUNTIME_CONFIGURATION_NAME); runtimeConfiguration.getArtifacts().add(jarArtifact); project.getExtensions().getByType(DefaultArtifactPublicationSet.class).addCandidate(jarArtifact); project.getComponents().add(new JavaLibrary(jarArtifact, runtimeConfiguration.getAllDependencies())); } private void configureBuild(Project project) { addDependsOnTaskInOtherProjects(project.getTasks().getByName(JavaBasePlugin.BUILD_NEEDED_TASK_NAME), true, JavaBasePlugin.BUILD_NEEDED_TASK_NAME, TEST_RUNTIME_CONFIGURATION_NAME); addDependsOnTaskInOtherProjects(project.getTasks().getByName(JavaBasePlugin.BUILD_DEPENDENTS_TASK_NAME), false, JavaBasePlugin.BUILD_DEPENDENTS_TASK_NAME, TEST_RUNTIME_CONFIGURATION_NAME); } private void configureTest(final Project project, final JavaPluginConvention pluginConvention) { project.getTasks().withType(Test.class, new Action<Test>() { public void execute(final Test test) { test.getConventionMapping().map("testClassesDir", new Callable<Object>() { public Object call() throws Exception { return pluginConvention.getSourceSets().getByName(SourceSet.TEST_SOURCE_SET_NAME).getOutput().getClassesDir(); } }); test.getConventionMapping().map("classpath", new Callable<Object>() { public Object call() throws Exception { return pluginConvention.getSourceSets().getByName(SourceSet.TEST_SOURCE_SET_NAME).getRuntimeClasspath(); } }); test.getConventionMapping().map("testSrcDirs", new Callable<Object>() { public Object call() throws Exception { return new ArrayList<File>(pluginConvention.getSourceSets().getByName(SourceSet.TEST_SOURCE_SET_NAME) .getJava().getSrcDirs()); } }); } }); Test test = project.getTasks().create(TEST_TASK_NAME, Test.class); project.getTasks().getByName(JavaBasePlugin.CHECK_TASK_NAME).dependsOn(test); test.setDescription("Runs the unit tests."); test.setGroup(JavaBasePlugin.VERIFICATION_GROUP); } void configureConfigurations(Project project) { ConfigurationContainer configurations = project.getConfigurations(); Configuration compileConfiguration = configurations.getByName(COMPILE_CONFIGURATION_NAME); Configuration runtimeConfiguration = configurations.getByName(RUNTIME_CONFIGURATION_NAME); Configuration compileTestsConfiguration = configurations.getByName(TEST_COMPILE_CONFIGURATION_NAME); compileTestsConfiguration.extendsFrom(compileConfiguration); configurations.getByName(TEST_RUNTIME_CONFIGURATION_NAME).extendsFrom(runtimeConfiguration, compileTestsConfiguration); configurations.getByName(Dependency.DEFAULT_CONFIGURATION).extendsFrom(runtimeConfiguration); } /** * Adds a dependency on tasks with the specified name in other projects. The other projects are determined from * project lib dependencies using the specified configuration name. These may be projects this project depends on or * projects that depend on this project based on the useDependOn argument. * * @param task Task to add dependencies to * @param useDependedOn if true, add tasks from projects this project depends on, otherwise use projects that depend * on this one. * @param otherProjectTaskName name of task in other projects * @param configurationName name of configuration to use to find the other projects */ private void addDependsOnTaskInOtherProjects(final Task task, boolean useDependedOn, String otherProjectTaskName, String configurationName) { Project project = task.getProject(); final Configuration configuration = project.getConfigurations().getByName(configurationName); task.dependsOn(configuration.getTaskDependencyFromProjectDependency(useDependedOn, otherProjectTaskName)); } private static class BuildableJavaComponentImpl implements BuildableJavaComponent { private final JavaPluginConvention convention; public BuildableJavaComponentImpl(JavaPluginConvention convention) { this.convention = convention; } public Collection<String> getRebuildTasks() { return Arrays.asList(BasePlugin.CLEAN_TASK_NAME, JavaBasePlugin.BUILD_TASK_NAME); } public Collection<String> getBuildTasks() { return Arrays.asList(JavaBasePlugin.BUILD_TASK_NAME); } public FileCollection getRuntimeClasspath() { FileCollection runtimeClasspath = convention.getSourceSets().getByName(SourceSet.MAIN_SOURCE_SET_NAME).getRuntimeClasspath(); ProjectInternal project = convention.getProject(); FileCollection gradleApi = project.getConfigurations().detachedConfiguration(project.getDependencies().gradleApi(), project.getDependencies().localGroovy()); return runtimeClasspath.minus(gradleApi); } public Configuration getCompileDependencies() { return convention.getProject().getConfigurations().getByName(JavaPlugin.COMPILE_CONFIGURATION_NAME); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.fuseki.server; import static java.lang.String.format; import static org.apache.jena.fuseki.server.DataServiceStatus.*; import static org.apache.jena.tdb.sys.TDBInternal.isTDB1; import static org.apache.jena.tdb2.sys.TDBInternal.isTDB2; import java.util.*; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; import org.apache.jena.ext.com.google.common.collect.ArrayListMultimap; import org.apache.jena.ext.com.google.common.collect.ListMultimap; import org.apache.jena.fuseki.Fuseki; import org.apache.jena.fuseki.FusekiException; import org.apache.jena.fuseki.auth.AuthPolicy; import org.apache.jena.fuseki.build.FusekiConfig; import org.apache.jena.fuseki.servlets.ActionService; import org.apache.jena.query.TxnType; import org.apache.jena.riot.out.NodeFmtLib; import org.apache.jena.sparql.core.DatasetGraph; import org.apache.jena.sparql.core.DatasetGraphWrapper; public class DataService { // Not final - it null'ed if closed to release the dataset state. private DatasetGraph dataset; private final Map<String, EndpointSet> endpoints; private final ListMultimap<Operation, Endpoint> operationsMap; // Dataset-level authorization policy. private final AuthPolicy authPolicy; /** * Record which {@link DataAccessPoint DataAccessPoints} this {@code DataService} is * associated with. This is mainly for checking and development. * Usually, one {@code DataService} is associated with one {@link DataAccessPoint}. */ private List<DataAccessPoint> dataAccessPoints = new ArrayList<>(1); private volatile DataServiceStatus state = UNINITIALIZED; // DataService-level counters. private final CounterSet counters = new CounterSet(); private final AtomicBoolean offlineInProgress = new AtomicBoolean(false); private final AtomicBoolean acceptingRequests = new AtomicBoolean(true); /** Builder for a new DataService. */ public static Builder newBuilder() { return new Builder(); } /** Builder for a new DataService, with a dataset. */ public static Builder newBuilder(DatasetGraph dsg) { return newBuilder().dataset(dsg); } /** Return a new builder, populated by an existing DatasetService */ public static Builder newBuilder(DataService dSrv) { return new Builder(dSrv.dataset, dSrv.endpoints, dSrv.operationsMap, dSrv.authPolicy); } /** Create a {@code DataService} for the given dataset. */ private DataService(DatasetGraph dataset, Map<String, EndpointSet> endpoints, ListMultimap<Operation, Endpoint> operationsMap, AuthPolicy authPolicy) { this.dataset = dataset; this.endpoints = Map.copyOf(endpoints); this.operationsMap = ArrayListMultimap.create(operationsMap); this.authPolicy = authPolicy; counters.add(CounterName.Requests); counters.add(CounterName.RequestsGood); counters.add(CounterName.RequestsBad); } /*package*/ void noteDataAccessPoint(DataAccessPoint dap) { this.dataAccessPoints.add(dap); } private String label() { StringJoiner sj = new StringJoiner(", ", "[", "]"); dataAccessPoints.stream() .map(DataAccessPoint::getName) .filter(x->!x.isEmpty()) .forEach(sj::add); return sj.toString(); } public DatasetGraph getDataset() { return dataset; } // // Convenience // // public void addEndpoint(Operation operation) { // addEndpoint(operation, null, null); // } // // public void addEndpoint(Operation operation, AuthPolicy authPolicy) { // addEndpoint(operation, null, authPolicy); // } // // public void addEndpoint(Operation operation, String endpointName) { // addEndpoint(operation, endpointName, null); // } // // public void addEndpoint(Operation operation, String endpointName, AuthPolicy authPolicy) { // Endpoint endpoint = Endpoint.create(operation, endpointName, authPolicy); // addEndpoint(endpoint); // } // // public void addEndpoint(Endpoint endpoint) { // addEndpoint$(endpoint); // } // // private void addEndpoint$(Endpoint endpoint) { // EndpointSet eps = endpoints.computeIfAbsent(endpoint.getName(), (k)->new EndpointSet(k)); // eps.put(endpoint); // // Cleaner not to have duplicates. But nice to have a (short) list that keeps the create order. // if ( ! operationsMap.containsEntry(endpoint.getOperation(), endpoint) ) // operationsMap.put(endpoint.getOperation(), endpoint); // } // // private void xremoveEndpoint$(Endpoint endpoint) { // EndpointSet eps = endpoints.get(endpoint.getName()); // if ( eps == null ) // return; // eps.remove(endpoint); // operationsMap.remove(endpoint.getOperation(), endpoint); // } // // public void setAuthPolicy(AuthPolicy authPolicy) { this.authPolicy = authPolicy; } /** Return the {@linkplain EndpointSet} for the operations for named use. */ public EndpointSet getEndpointSet(String endpointName) { return endpoints.get(endpointName); } /** Return the {@linkplain EndpointSet} for the operations for unnamed use. */ public EndpointSet getEndpointSet() { return endpoints.get(""); } /** Return a collection of all endpoints for this {@linkplain DataService}. */ public Collection<Endpoint> getEndpoints() { // A copy :-( Set<Endpoint> x = new HashSet<>(); endpoints.forEach((k,eps)->{ eps.forEach((op,ep)->x.add(ep)); }); return x; } /** Execute an action for each {@link Endpoint}. */ public void forEachEndpoint(Consumer<Endpoint> action) { endpoints.forEach((k,eps)->{ eps.forEach((op,ep)->action.accept(ep)); }); Set<Endpoint> x = new HashSet<>(); endpoints.forEach((k,eps)->{ eps.forEach((op,ep)->x.add(ep)); }); } public List<Endpoint> getEndpoints(Operation operation) { List<Endpoint> x = operationsMap.get(operation); return x; } /** Return the operations available here. * @see #getEndpoints(Operation) to get the endpoint list */ public Collection<Operation> getOperations() { return operationsMap.keySet(); } /** Return the operations available here. * @see #getEndpoints(Operation) to get the endpoint list */ public boolean hasOperation(Operation operation) { return operationsMap.keySet().contains(operation); } public boolean allowUpdate() { return true; } public void goOffline() { offlineInProgress.set(true); acceptingRequests.set(false); state = OFFLINE; } /** Set any {@link ActionService} processors that are currently unset. */ public void setEndpointProcessors(OperationRegistry operationRegistry) { // Make sure the processor is set for each endpoint. forEachEndpoint(ep->{ if ( ep.getProcessor() == null ) ep.setProcessor(operationRegistry.findHandler(ep.getOperation())); }); } private void ensureEnpointProcessors() { forEachEndpoint(ep->{ if ( ep.getProcessor() == null ) { String x = NodeFmtLib.strNT(ep.getOperation().getId()); Fuseki.configLog.warn("No processor for operation "+x); } }); } public void goActive() { ensureEnpointProcessors(); offlineInProgress.set(false); acceptingRequests.set(true); state = ACTIVE; } // Due to concurrency, call isAcceptingRequests(). // public boolean isActive() { // return state != DatasetStatus.ACTIVE; // } public boolean isAcceptingRequests() { return acceptingRequests.get(); } //@Override public CounterSet getCounters() { return counters; } //@Override public long getRequests() { return counters.value(CounterName.Requests); } //@Override public long getRequestsGood() { return counters.value(CounterName.RequestsGood); } //@Override public long getRequestsBad() { return counters.value(CounterName.RequestsBad); } /** Counter of active transactions */ public AtomicLong activeTxn = new AtomicLong(0); /** Cumulative counter of transactions */ public AtomicLong totalTxn = new AtomicLong(0); public void startTxn(TxnType mode) { check(DataServiceStatus.ACTIVE); activeTxn.getAndIncrement(); totalTxn.getAndIncrement(); } private void check(DataServiceStatus status) { if ( state != status ) { String msg = format("DataService %s: Expected=%s, Actual=%s", label(), status, state); throw new FusekiException(msg); } } public void finishTxn() { activeTxn.decrementAndGet(); } /** Shutdown and never use again. */ public synchronized void shutdown() { if ( state == CLOSING ) return; expel(dataset); dataset = null; state = CLOSED; } private static void expel(DatasetGraph database) { // This should not be necessary. // When created by assembler, "closeIndexOnClose" should be set true. // so this happen automatically (otherwise we need either reflection // or make jena-text a dependency). // // Close the in-JVM objects for Lucene index and databases. // if ( database instanceof DatasetGraphText ) { // DatasetGraphText dbtext = (DatasetGraphText)database; // database = dbtext.getBase(); // dbtext.getTextIndex().close(); // } // Find possible TDB1, TDB2. DatasetGraph base = findTDB(database); database.close(); boolean isTDB1 = isTDB1(base); boolean isTDB2 = isTDB2(base); if ( isTDB1 || isTDB2 ) { // JENA-1586: Remove database from the process. if ( isTDB1 ) org.apache.jena.tdb.sys.TDBInternal.expel(base); if ( isTDB2 ) org.apache.jena.tdb2.sys.TDBInternal.expel(base); } } /** Unwrap until a TDB database is encountered */ private static DatasetGraph findTDB(DatasetGraph dsg) { DatasetGraph dsgw = dsg; while (dsgw instanceof DatasetGraphWrapper) { if ( isTDB1(dsgw) ) return dsgw; if ( isTDB2(dsgw) ) return dsgw; dsgw = ((DatasetGraphWrapper)dsgw).getWrapped(); } return dsgw; } /** Returning null implies no authorization control */ public AuthPolicy authPolicy() { return authPolicy; } public static class Builder { private DatasetGraph dataset = null; private Map<String, EndpointSet> endpoints = new HashMap<>(); private ListMultimap<Operation, Endpoint> operationsMap = ArrayListMultimap.create(); // Dataset-level authorization policy. private AuthPolicy authPolicy = null; private Builder() {} private Builder(DatasetGraph dataset, Map<String, EndpointSet> endpoints, ListMultimap<Operation, Endpoint> operationsMap,AuthPolicy authPolicy) { this(); this.dataset = dataset; this.endpoints.putAll(endpoints); this.operationsMap.putAll(operationsMap); this.authPolicy = authPolicy; } public Builder dataset(DatasetGraph dsg) { this.dataset = dsg; return this; } public DatasetGraph dataset() { return this.dataset; } public Builder withStdServices(boolean withUpdate) { FusekiConfig.populateStdServices(this, withUpdate); return this; } // For now, don't provide ... // public DatasetGraph dataset() { return this.dataset; } // // public AuthPolicy authPolicy() { return this.authPolicy; } public Builder addEndpoint(Operation operation) { return addEndpoint(operation, null, null); } public Builder addEndpoint(Operation operation, AuthPolicy authPolicy) { return addEndpoint(operation, null, authPolicy); } public Builder addEndpoint(Operation operation, String endpointName) { return addEndpoint(operation, endpointName, null); } public Builder addEndpoint(Operation operation, String endpointName, AuthPolicy authPolicy) { Endpoint endpoint = Endpoint.create(operation, endpointName, authPolicy); return addEndpoint(endpoint); } public Builder addEndpoint(Endpoint endpoint) { return addEndpoint$(endpoint); } private Builder addEndpoint$(Endpoint endpoint) { EndpointSet eps = endpoints.computeIfAbsent(endpoint.getName(), (k)->new EndpointSet(k)); eps.put(endpoint); // Cleaner not to have duplicates. But nice to have a (short) list that keeps the create order. if ( ! operationsMap.containsEntry(endpoint.getOperation(), endpoint) ) operationsMap.put(endpoint.getOperation(), endpoint); return this; } private void removeEndpoint$(Endpoint endpoint) { EndpointSet eps = endpoints.get(endpoint.getName()); if ( eps == null ) return; eps.remove(endpoint); operationsMap.remove(endpoint.getOperation(), endpoint); } public Builder setAuthPolicy(AuthPolicy authPolicy) { this.authPolicy = authPolicy; return this; } public DataService build() { return new DataService(dataset, endpoints, operationsMap, authPolicy); } } }
/* */ package com.googlecode.objectify.test; import com.googlecode.objectify.Key; import com.googlecode.objectify.annotation.Cache; import com.googlecode.objectify.annotation.Entity; import com.googlecode.objectify.annotation.Id; import com.googlecode.objectify.test.entity.Trivial; import com.googlecode.objectify.test.util.TestBase; import org.testng.annotations.Test; import java.util.logging.Logger; import static com.googlecode.objectify.test.util.TestObjectifyService.fact; import static com.googlecode.objectify.test.util.TestObjectifyService.ofy; /** * Tests of basic entity manipulation. * * @author Jeff Schnitzer <jeff@infohazard.org> */ public class EntityTests extends TestBase { /** */ @SuppressWarnings("unused") private static Logger log = Logger.getLogger(EntityTests.class.getName()); /** * A fruit. * * @author Scott Hernandez */ @Entity @Cache static abstract class Fruit { @Id Long id; String color; String taste; /** Default constructor must always exist */ protected Fruit() {} /** Constructor*/ protected Fruit(String color, String taste) { this.color = color; this.taste = taste; } public String getColor() { return this.color; } public String getTaste() { return this.taste; } } /** * A fruit, an apple. * * @author Scott Hernandez */ @Entity @Cache static class Apple extends Fruit { public static final String COLOR = "red"; public static final String TASTE = "sweet"; private String size; /** Default constructor must always exist */ public Apple() {} /** Constructor*/ public Apple(String color, String taste) { super(color,taste); this.size = "small"; } public String getSize() { return this.size; } } /** */ @Test public void testApple() throws Exception { fact().register(Apple.class); Apple a = new Apple(Apple.COLOR, Apple.TASTE); Key<Apple> aKey = ofy().save().entity(a).now(); Apple a2 = ofy().load().key(aKey).now(); assert a2.getColor().equals(a.getColor()) : "Colors were different after stored/retrieved"; assert a2.getSize().equals(a.getSize()) : "Sizes were different after stored/retrieved"; assert a2.getTaste().equals(a.getTaste()) : "Tastes were different after stored/retrieved"; } /** * A banana fruit. * * @author Scott Hernandez */ @Entity @Cache static class Banana extends Fruit { public static final String COLOR = "yellow"; public static final String TASTE = "sweet"; private String shape; /** Default constructor must always exist */ public Banana() {} /** Constructor*/ public Banana(String color, String taste) { super(color,taste); this.shape = "like a banana"; } public String getShape() { return this.shape; } } /** */ @Test public void testBanana() throws Exception { fact().register(Banana.class); Banana b = new Banana(Banana.COLOR, Banana.TASTE); Key<Banana> bKey = ofy().save().entity(b).now(); Banana b2 = ofy().load().key(bKey).now(); assert b2.getColor().equals(b.getColor()) : "Colors were different after stored/retrieved"; assert b2.getShape().equals(b.getShape()) : "Shapes were different after stored/retrieved"; assert b2.getTaste().equals(b.getTaste()) : "Tastes were different after stored/retrieved"; } /** * A holder of a <T>hing. * * @author Scott Hernandez */ @Entity @Cache static abstract class Holder<T> { @Id Long id; T thing; /** Default constructor must always exist */ protected Holder() {} protected Holder(T t) {this.thing = t;} public T getThing() { return this.thing; } public void setThing(T t) { this.thing = t; } } /** * A holder of a string. * * @author Scott Hernandez */ @Entity @Cache static class HolderOfString extends Holder<String> { /** Default constructor must always exist */ public HolderOfString() {} public HolderOfString(String s) {super(s);} public void setMyThing(String s) { this.thing = s; } public String getMyThing() { return this.thing; } } /** */ @Test public void testStringHolder() throws Exception { fact().register(HolderOfString.class); String s = "my secret"; HolderOfString hos = new HolderOfString(s); Key<HolderOfString> hosKey = ofy().save().entity(hos).now(); HolderOfString hos2 = ofy().load().key(hosKey).now(); assert hos.getThing().equals(hos2.getMyThing()) : "Strings were different after stored/retrieved"; assert hos.getThing().getClass().equals(hos2.getMyThing().getClass()) : "Classes were differnt"; } /** * A holder of a string, and a Long. * * @author Scott Hernandez */ @Entity @Cache static class HolderOfStringAndLong extends HolderOfString { protected Long myPrecious; /** Default constructor must always exist */ public HolderOfStringAndLong() {} public HolderOfStringAndLong(String s, Long l) {super(s); this.myPrecious = l; } public Long getMyPrecious() { return this.myPrecious; } } /** */ @Test public void testStringHolderWithALong() throws Exception { fact().register(HolderOfStringAndLong.class); String s = "my secret"; HolderOfStringAndLong hosal = new HolderOfStringAndLong(s,2L); Key<HolderOfStringAndLong> hosKey = ofy().save().entity(hosal).now(); HolderOfStringAndLong hosal2 = ofy().load().key(hosKey).now(); assert hosal.getMyPrecious().equals(hosal2.getMyPrecious()) : "Longs were different after stored/retrieved"; assert hosal.getThing().equals(hosal2.getMyThing()) : "Strings were different after stored/retrieved"; assert hosal.getThing().getClass().equals(hosal2.getMyThing().getClass()) : "Classes were differnt"; } /** */ @Test public void testToPojoAndBack() throws Exception { fact().register(Trivial.class); Trivial triv = new Trivial(123L, "blah", 456); com.google.appengine.api.datastore.Entity ent = ofy().save().toEntity(triv); assert ent.getKey().getId() == 123L; assert ent.getProperty("someString").equals("blah"); assert ent.getProperty("someNumber").equals(456L); Trivial converted = ofy().load().fromEntity(ent); assert converted.getId().equals(triv.getId()); assert converted.getSomeString().equals(triv.getSomeString()); assert converted.getSomeNumber() == triv.getSomeNumber(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jclouds.snia.cdmi.v1.options; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.Charset; import java.util.Map; import com.google.common.base.Charsets; import com.google.common.io.CharStreams; import com.google.common.io.Files; /** * CreateDataObjectOptions options supported in the REST API for the CREATE CDMI Data Object * operation. <h2> * */ public class CreateDataObjectOptions extends CreateCDMIObjectOptions { public CreateDataObjectOptions() { jsonObjectBody.addProperty("value", ""); } /** * Create CDMI data object with metadata * * @param metadata * @return CreateDataObjectOptions */ public CreateDataObjectOptions metadata(Map<String, String> metadata) { super.metadata(metadata); return this; } /** * Create CDMI data object with mimetype * * @param mimetype * @return CreateDataObjectOptions */ public CreateDataObjectOptions mimetype(String mimetype) { jsonObjectBody.addProperty("mimetype", mimetype); this.payload = jsonObjectBody.toString(); return this; } /** * Create CDMI data object with value equal to empty string * * @return CreateDataObjectOptions */ public CreateDataObjectOptions value() { this.payload = jsonObjectBody.toString(); return this; } /** * Create CDMI data object with String value * * @param value * String value * @return CreateDataObjectOptions */ public CreateDataObjectOptions value(String value) { jsonObjectBody.addProperty("value", (value == null) ? "" : value); this.payload = jsonObjectBody.toString(); return this; } /** * Create CDMI data object with byte array value * * @param value * byte array value byte array is converted to a String value * @return CreateDataObjectOptions */ public CreateDataObjectOptions value(byte[] value) throws IOException { jsonObjectBody.addProperty("value", (value == null) ? "" : new DataInputStream( new ByteArrayInputStream(value)).readUTF()); this.payload = jsonObjectBody.toString(); return this; } /** * Create CDMI data object with file value * * @param value * File File is converted to a String value with charset UTF_8 * @return CreateDataObjectOptions */ public CreateDataObjectOptions value(File value) throws IOException { jsonObjectBody.addProperty("value", (value == null) ? "" : Files.toString(value, Charsets.UTF_8)); this.payload = jsonObjectBody.toString(); return this; } /** * Create CDMI data object with file value * * @param value * File * @param charset * character set of file File is converted to a String value * @return CreateDataObjectOptions */ public CreateDataObjectOptions value(File value, Charset charset) throws IOException { jsonObjectBody.addProperty("value", (value == null) ? "" : Files.toString(value, charset)); this.payload = jsonObjectBody.toString(); return this; } /** * Create CDMI data object with InputStream value * * @param value * InputSteam InputSteam is converted to a String value with charset UTF_8 * @return CreateDataObjectOptions */ public CreateDataObjectOptions value(InputStream value) throws IOException { jsonObjectBody.addProperty("value", (value == null) ? "" : CharStreams.toString(new InputStreamReader(value, Charsets.UTF_8))); this.payload = jsonObjectBody.toString(); return this; } /** * Create CDMI data object with InputStream value * * @param value * InputSteam * @param charset * character set of input stream InputSteam is converted to a String value with charset * UTF_8 * @return CreateDataObjectOptions */ public CreateDataObjectOptions value(InputStream value, Charset charset) throws IOException { jsonObjectBody.addProperty("value", (value == null) ? "" : CharStreams.toString(new InputStreamReader(value, charset))); this.payload = jsonObjectBody.toString(); return this; } public static class Builder { public static CreateDataObjectOptions metadata(Map<String, String> metadata) { CreateDataObjectOptions options = new CreateDataObjectOptions(); return options.metadata(metadata); } public static CreateDataObjectOptions mimetype(String mimetype) { CreateDataObjectOptions options = new CreateDataObjectOptions(); return options.mimetype(mimetype); } public static CreateDataObjectOptions value() { CreateDataObjectOptions options = new CreateDataObjectOptions(); return options.value(); } public static CreateDataObjectOptions value(String value) { CreateDataObjectOptions options = new CreateDataObjectOptions(); return options.value(value); } public static CreateDataObjectOptions value(byte[] value) throws IOException { CreateDataObjectOptions options = new CreateDataObjectOptions(); return options.value(value); } public static CreateDataObjectOptions value(File value) throws IOException { CreateDataObjectOptions options = new CreateDataObjectOptions(); return options.value(value); } public static CreateDataObjectOptions value(File value, Charset charset) throws IOException { CreateDataObjectOptions options = new CreateDataObjectOptions(); return options.value(value, charset); } public static CreateDataObjectOptions value(InputStream value) throws IOException { CreateDataObjectOptions options = new CreateDataObjectOptions(); return options.value(value); } public static CreateDataObjectOptions value(InputStream value, Charset charset) throws IOException { CreateDataObjectOptions options = new CreateDataObjectOptions(); return options.value(value, charset); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.cosmos.implementation.directconnectivity; import com.azure.cosmos.implementation.BadRequestException; import com.azure.cosmos.BridgeInternal; import com.azure.cosmos.implementation.ConflictException; import com.azure.cosmos.CosmosException; import com.azure.cosmos.implementation.ConnectionPolicy; import com.azure.cosmos.implementation.ForbiddenException; import com.azure.cosmos.implementation.GoneException; import com.azure.cosmos.implementation.InternalServerErrorException; import com.azure.cosmos.implementation.InvalidPartitionException; import com.azure.cosmos.implementation.LockedException; import com.azure.cosmos.implementation.MethodNotAllowedException; import com.azure.cosmos.implementation.NotFoundException; import com.azure.cosmos.implementation.PartitionIsMigratingException; import com.azure.cosmos.implementation.PartitionKeyRangeGoneException; import com.azure.cosmos.implementation.PartitionKeyRangeIsSplittingException; import com.azure.cosmos.implementation.PreconditionFailedException; import com.azure.cosmos.implementation.RequestEntityTooLargeException; import com.azure.cosmos.implementation.RequestRateTooLargeException; import com.azure.cosmos.implementation.RequestTimeoutException; import com.azure.cosmos.implementation.RetryWithException; import com.azure.cosmos.implementation.ServiceUnavailableException; import com.azure.cosmos.implementation.UnauthorizedException; import com.azure.cosmos.implementation.Configs; import com.azure.cosmos.implementation.HttpConstants; import com.azure.cosmos.implementation.Integers; import com.azure.cosmos.implementation.Lists; import com.azure.cosmos.implementation.Longs; import com.azure.cosmos.implementation.MutableVolatile; import com.azure.cosmos.implementation.OperationType; import com.azure.cosmos.implementation.PathsHelper; import com.azure.cosmos.implementation.RMResources; import com.azure.cosmos.implementation.ResourceType; import com.azure.cosmos.implementation.RuntimeConstants; import com.azure.cosmos.implementation.RxDocumentServiceRequest; import com.azure.cosmos.implementation.Strings; import com.azure.cosmos.implementation.UserAgentContainer; import com.azure.cosmos.implementation.Utils; import com.azure.cosmos.implementation.apachecommons.lang.StringUtils; import com.azure.cosmos.implementation.http.HttpClient; import com.azure.cosmos.implementation.http.HttpClientConfig; import com.azure.cosmos.implementation.http.HttpHeaders; import com.azure.cosmos.implementation.http.HttpRequest; import com.azure.cosmos.implementation.http.HttpResponse; import io.netty.handler.codec.http.HttpMethod; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import reactor.core.publisher.Mono; import java.net.URI; import java.time.Duration; import java.time.Instant; import java.util.HashMap; import java.util.List; import java.util.Map; import static com.azure.cosmos.implementation.Utils.trimBeginningAndEndingSlashes; /* * The following code only support Document Write without any error handling support. */ public class HttpTransportClient extends TransportClient { private final Logger logger = LoggerFactory.getLogger(HttpTransportClient.class); private final HttpClient httpClient; private final Map<String, String> defaultHeaders; private final Configs configs; HttpClient createHttpClient(ConnectionPolicy connectionPolicy) { // TODO: use one instance of SSL context everywhere HttpClientConfig httpClientConfig = new HttpClientConfig(this.configs); httpClientConfig.withRequestTimeout(connectionPolicy.getRequestTimeout()); httpClientConfig.withPoolSize(configs.getDirectHttpsMaxConnectionLimit()); return HttpClient.createFixed(httpClientConfig); } public HttpTransportClient(Configs configs, ConnectionPolicy connectionPolicy, UserAgentContainer userAgent) { this.configs = configs; this.httpClient = createHttpClient(connectionPolicy); this.defaultHeaders = new HashMap<>(); // Set requested API version header for version enforcement. this.defaultHeaders.put(HttpConstants.HttpHeaders.VERSION, HttpConstants.Versions.CURRENT_VERSION); this.defaultHeaders.put(HttpConstants.HttpHeaders.CACHE_CONTROL, HttpConstants.HeaderValues.NO_CACHE); if (userAgent == null) { userAgent = new UserAgentContainer(); } this.defaultHeaders.put(HttpConstants.HttpHeaders.USER_AGENT, userAgent.getUserAgent()); this.defaultHeaders.put(HttpConstants.HttpHeaders.ACCEPT, RuntimeConstants.MediaTypes.JSON); } @Override public void close() { httpClient.shutdown(); } public Mono<StoreResponse> invokeStoreAsync( Uri physicalAddressUri, RxDocumentServiceRequest request) { try { URI physicalAddress = physicalAddressUri.getURI(); ResourceOperation resourceOperation = new ResourceOperation(request.getOperationType(), request.getResourceType()); // uuid correlation manager String activityId = request.getActivityId().toString(); if (resourceOperation.operationType == OperationType.Recreate) { Map<String, String> errorResponseHeaders = new HashMap<>(); errorResponseHeaders.put(HttpConstants.HttpHeaders.REQUEST_VALIDATION_FAILURE, "1"); logger.error("Received Recreate request on Http client"); throw new InternalServerErrorException(RMResources.InternalServerError, null, errorResponseHeaders, null); } HttpRequest httpRequest = prepareHttpMessage(activityId, physicalAddressUri, resourceOperation, request); MutableVolatile<Instant> sendTimeUtc = new MutableVolatile<>(); Duration responseTimeout = Duration.ofSeconds(Configs.getHttpResponseTimeoutInSeconds()); if (OperationType.QueryPlan.equals(request.getOperationType())) { responseTimeout = Duration.ofSeconds(Configs.getQueryPlanResponseTimeoutInSeconds()); } else if (request.isAddressRefresh()) { responseTimeout = Duration.ofSeconds(Configs.getAddressRefreshResponseTimeoutInSeconds()); } Mono<HttpResponse> httpResponseMono = this.httpClient .send(httpRequest, responseTimeout) .doOnSubscribe(subscription -> { sendTimeUtc.v = Instant.now(); this.beforeRequest( activityId, httpRequest.uri(), request.getResourceType(), httpRequest.headers()); }) .onErrorResume(t -> { Exception exception = Utils.as(t, Exception.class); if (exception == null) { logger.error("critical failure", t); t.printStackTrace(); assert false : "critical failure"; return Mono.error(t); } //Trace.CorrelationManager.ActivityId = activityId; if (WebExceptionUtility.isWebExceptionRetriable(exception)) { logger.debug("Received retriable exception {} " + "sending the request to {}, will re-resolve the address " + "send time UTC: {}", exception, physicalAddress, sendTimeUtc); GoneException goneException = new GoneException( String.format( RMResources.ExceptionMessage, RMResources.Gone), exception, null, physicalAddress); return Mono.error(goneException); } else if (request.isReadOnlyRequest()) { logger.trace("Received exception {} on readonly request" + "sending the request to {}, will reresolve the address " + "send time UTC: {}", exception, physicalAddress, sendTimeUtc); GoneException goneException = new GoneException( String.format( RMResources.ExceptionMessage, RMResources.Gone), exception, null, physicalAddress); return Mono.error(goneException); } else { // We can't throw a GoneException here because it will cause retry and we don't // know if the request failed before or after the message got sent to the server. // So in order to avoid duplicating the request we will not retry. // TODO: a possible solution for this is to add the ability to send a request to the server // to check if the previous request was received or not and act accordingly. ServiceUnavailableException serviceUnavailableException = new ServiceUnavailableException( exception.getMessage(), exception, null, physicalAddress.toString()); serviceUnavailableException.getResponseHeaders().put(HttpConstants.HttpHeaders.REQUEST_VALIDATION_FAILURE, "1"); serviceUnavailableException.getResponseHeaders().put(HttpConstants.HttpHeaders.WRITE_REQUEST_TRIGGER_ADDRESS_REFRESH, "1"); return Mono.error(serviceUnavailableException); }}) .doOnSuccess(httpClientResponse -> { Instant receivedTimeUtc = Instant.now(); double durationInMilliSeconds = (receivedTimeUtc.toEpochMilli() - sendTimeUtc.v.toEpochMilli()); this.afterRequest( activityId, httpClientResponse.statusCode(), durationInMilliSeconds, httpClientResponse.headers()); }) .doOnError(e -> { Instant receivedTimeUtc = Instant.now(); double durationInMilliSeconds = (receivedTimeUtc.toEpochMilli() - sendTimeUtc.v.toEpochMilli()); this.afterRequest( activityId, 0, durationInMilliSeconds, null); }); return httpResponseMono.flatMap(rsp -> processHttpResponse(request.getResourceAddress(), httpRequest, activityId, rsp, physicalAddress)); } catch (Exception e) { return Mono.error(e); } } private void beforeRequest(String activityId, URI uri, ResourceType resourceType, HttpHeaders requestHeaders) { // TODO: perf counters // https://msdata.visualstudio.com/CosmosDB/_workitems/edit/258624 } private void afterRequest(String activityId, int statusCode, double durationInMilliSeconds, HttpHeaders responseHeaders) { // TODO: perf counters // https://msdata.visualstudio.com/CosmosDB/_workitems/edit/258624 } private static void addHeader(HttpHeaders requestHeaders, String headerName, RxDocumentServiceRequest request) { String headerValue = request.getHeaders().get(headerName); if (!Strings.isNullOrEmpty(headerValue)) { requestHeaders.set(headerName, headerValue); } } private static void addHeader(HttpHeaders requestHeaders, String headerName, String headerValue) { if (!Strings.isNullOrEmpty(headerValue)) { requestHeaders.set(headerName, headerValue); } } private String getMatch(RxDocumentServiceRequest request, ResourceOperation resourceOperation) { switch (resourceOperation.operationType) { case Delete: case ExecuteJavaScript: case Replace: case Update: case Upsert: return request.getHeaders().get(HttpConstants.HttpHeaders.IF_MATCH); case Read: case ReadFeed: return request.getHeaders().get(HttpConstants.HttpHeaders.IF_NONE_MATCH); default: return null; } } private HttpRequest prepareHttpMessage( String activityId, Uri physicalAddress, ResourceOperation resourceOperation, RxDocumentServiceRequest request) throws Exception { HttpRequest httpRequestMessage; String requestUri; HttpMethod method; // The StreamContent created below will own and dispose its underlying stream, but we may need to reuse the stream on the // RxDocumentServiceRequest for future requests. Hence we need to clone without incurring copy cost, so that when // HttpRequestMessage -> StreamContent -> MemoryStream all get disposed, the original stream will be left open. switch (resourceOperation.operationType) { case Create: requestUri = getResourceFeedUri(resourceOperation.resourceType, physicalAddress.getURIAsString(), request); method = HttpMethod.POST; assert request.getContentAsByteArrayFlux() != null; httpRequestMessage = new HttpRequest(method, requestUri, physicalAddress.getURI().getPort()); httpRequestMessage.withBody(request.getContentAsByteArrayFlux()); break; case ExecuteJavaScript: requestUri = getResourceEntryUri(resourceOperation.resourceType, physicalAddress.getURIAsString(), request); method = HttpMethod.POST; assert request.getContentAsByteArrayFlux() != null; httpRequestMessage = new HttpRequest(method, requestUri, physicalAddress.getURI().getPort()); httpRequestMessage.withBody(request.getContentAsByteArrayFlux()); break; case Delete: requestUri = getResourceEntryUri(resourceOperation.resourceType, physicalAddress.getURIAsString(), request); method = HttpMethod.DELETE; httpRequestMessage = new HttpRequest(method, requestUri, physicalAddress.getURI().getPort()); break; case Read: requestUri = getResourceEntryUri(resourceOperation.resourceType, physicalAddress.getURIAsString(), request); method = HttpMethod.GET; httpRequestMessage = new HttpRequest(method, requestUri, physicalAddress.getURI().getPort()); break; case ReadFeed: requestUri = getResourceFeedUri(resourceOperation.resourceType, physicalAddress.getURIAsString(), request); method = HttpMethod.GET; httpRequestMessage = new HttpRequest(method, requestUri, physicalAddress.getURI().getPort()); break; case Replace: requestUri = getResourceEntryUri(resourceOperation.resourceType, physicalAddress.getURIAsString(), request); method = HttpMethod.PUT; assert request.getContentAsByteArrayFlux() != null; httpRequestMessage = new HttpRequest(method, requestUri, physicalAddress.getURI().getPort()); httpRequestMessage.withBody(request.getContentAsByteArrayFlux()); break; case Update: requestUri = getResourceEntryUri(resourceOperation.resourceType, physicalAddress.getURIAsString(), request); method = new HttpMethod("PATCH"); assert request.getContentAsByteArrayFlux() != null; httpRequestMessage = new HttpRequest(method, requestUri, physicalAddress.getURI().getPort()); httpRequestMessage.withBody(request.getContentAsByteArrayFlux()); break; case Query: case SqlQuery: requestUri = getResourceFeedUri(resourceOperation.resourceType, physicalAddress.getURIAsString(), request); method = HttpMethod.POST; assert request.getContentAsByteArrayFlux() != null; httpRequestMessage = new HttpRequest(method, requestUri, physicalAddress.getURI().getPort()); httpRequestMessage.withBody(request.getContentAsByteArrayFlux()); HttpTransportClient.addHeader(httpRequestMessage.headers(), HttpConstants.HttpHeaders.CONTENT_TYPE, request); break; case Upsert: requestUri = getResourceFeedUri(resourceOperation.resourceType, physicalAddress.getURIAsString(), request); method = HttpMethod.POST; assert request.getContentAsByteArrayFlux() != null; httpRequestMessage = new HttpRequest(method, requestUri, physicalAddress.getURI().getPort()); httpRequestMessage.withBody(request.getContentAsByteArrayFlux()); break; case Head: requestUri = getResourceEntryUri(resourceOperation.resourceType, physicalAddress.getURIAsString(), request); method = HttpMethod.HEAD; httpRequestMessage = new HttpRequest(method, requestUri, physicalAddress.getURI().getPort()); break; case HeadFeed: requestUri = getResourceFeedUri(resourceOperation.resourceType, physicalAddress.getURIAsString(), request); method = HttpMethod.HEAD; httpRequestMessage = new HttpRequest(method, requestUri, physicalAddress.getURI().getPort()); break; default: assert false : "Unsupported operation type"; throw new IllegalStateException(); } Map<String, String> documentServiceRequestHeaders = request.getHeaders(); HttpHeaders httpRequestHeaders = httpRequestMessage.headers(); // add default headers for(Map.Entry<String, String> entry: defaultHeaders.entrySet()) { HttpTransportClient.addHeader(httpRequestHeaders, entry.getKey(), entry.getValue()); } HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.VERSION, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.USER_AGENT, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.PAGE_SIZE, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.PRE_TRIGGER_INCLUDE, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.PRE_TRIGGER_EXCLUDE, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.POST_TRIGGER_INCLUDE, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.POST_TRIGGER_EXCLUDE, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.AUTHORIZATION, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.INDEXING_DIRECTIVE, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.MIGRATE_COLLECTION_DIRECTIVE, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.CONSISTENCY_LEVEL, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.SESSION_TOKEN, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.PREFER, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.RESOURCE_TOKEN_EXPIRY, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.ENABLE_SCAN_IN_QUERY, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.EMIT_VERBOSE_TRACES_IN_QUERY, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.CAN_CHARGE, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.CAN_THROTTLE, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.ENABLE_LOW_PRECISION_ORDER_BY, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.ENABLE_LOGGING, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.IS_READ_ONLY_SCRIPT, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.CONTENT_SERIALIZATION_FORMAT, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.CONTINUATION, request.getContinuation()); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.ACTIVITY_ID, activityId); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.PARTITION_KEY, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.PARTITION_KEY_RANGE_ID, request); String dateHeader = HttpUtils.getDateHeader(documentServiceRequestHeaders); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.X_DATE, dateHeader); HttpTransportClient.addHeader(httpRequestHeaders, "Match", this.getMatch(request, resourceOperation)); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.IF_MODIFIED_SINCE, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.A_IM, request); if (!request.getIsNameBased()) { HttpTransportClient.addHeader(httpRequestHeaders, WFConstants.BackendHeaders.RESOURCE_ID, request.getResourceId()); } HttpTransportClient.addHeader(httpRequestHeaders, WFConstants.BackendHeaders.ENTITY_ID, request.entityId); String fanoutRequestHeader = request.getHeaders().get(WFConstants.BackendHeaders.IS_FANOUT_REQUEST); HttpTransportClient.addHeader(httpRequestMessage.headers(), WFConstants.BackendHeaders.IS_FANOUT_REQUEST, fanoutRequestHeader); if (request.getResourceType() == ResourceType.DocumentCollection) { HttpTransportClient.addHeader(httpRequestHeaders, WFConstants.BackendHeaders.COLLECTION_PARTITION_INDEX, documentServiceRequestHeaders.get(WFConstants.BackendHeaders.COLLECTION_PARTITION_INDEX)); HttpTransportClient.addHeader(httpRequestHeaders, WFConstants.BackendHeaders.COLLECTION_SERVICE_INDEX, documentServiceRequestHeaders.get(WFConstants.BackendHeaders.COLLECTION_SERVICE_INDEX)); } if (documentServiceRequestHeaders.get(WFConstants.BackendHeaders.BIND_REPLICA_DIRECTIVE) != null) { HttpTransportClient.addHeader(httpRequestHeaders, WFConstants.BackendHeaders.BIND_REPLICA_DIRECTIVE, documentServiceRequestHeaders.get(WFConstants.BackendHeaders.BIND_REPLICA_DIRECTIVE)); HttpTransportClient.addHeader(httpRequestHeaders, WFConstants.BackendHeaders.PRIMARY_MASTER_KEY, documentServiceRequestHeaders.get(WFConstants.BackendHeaders.PRIMARY_MASTER_KEY)); HttpTransportClient.addHeader(httpRequestHeaders, WFConstants.BackendHeaders.SECONDARY_MASTER_KEY, documentServiceRequestHeaders.get(WFConstants.BackendHeaders.SECONDARY_MASTER_KEY)); HttpTransportClient.addHeader(httpRequestHeaders, WFConstants.BackendHeaders.PRIMARY_READONLY_KEY, documentServiceRequestHeaders.get(WFConstants.BackendHeaders.PRIMARY_READONLY_KEY)); HttpTransportClient.addHeader(httpRequestHeaders, WFConstants.BackendHeaders.SECONDARY_READONLY_KEY, documentServiceRequestHeaders.get(WFConstants.BackendHeaders.SECONDARY_READONLY_KEY)); } if (documentServiceRequestHeaders.get(HttpConstants.HttpHeaders.CAN_OFFER_REPLACE_COMPLETE) != null) { HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.CAN_OFFER_REPLACE_COMPLETE, documentServiceRequestHeaders.get(HttpConstants.HttpHeaders.CAN_OFFER_REPLACE_COMPLETE)); } //Query HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.IS_QUERY, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.QUERY, request); // Upsert HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.IS_UPSERT, request); // SupportSpatialLegacyCoordinates HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.SUPPORT_SPATIAL_LEGACY_COORDINATES, request); HttpTransportClient.addHeader(httpRequestHeaders, WFConstants.BackendHeaders.PARTITION_COUNT, request); HttpTransportClient.addHeader(httpRequestHeaders, WFConstants.BackendHeaders.COLLECTION_RID, request); // Filter by schema HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.FILTER_BY_SCHEMA_RESOURCE_ID, request); // UsePolygonsSmallerThanAHemisphere HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.USE_POLYGONS_SMALLER_THAN_AHEMISPHERE, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.GATEWAY_SIGNATURE, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.POPULATE_QUOTA_INFO, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.POPULATE_QUERY_METRICS, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.FORCE_QUERY_SCAN, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.RESPONSE_CONTINUATION_TOKEN_LIMIT_IN_KB, request); HttpTransportClient.addHeader(httpRequestHeaders, WFConstants.BackendHeaders.REMOTE_STORAGE_TYPE, request); HttpTransportClient.addHeader(httpRequestHeaders, WFConstants.BackendHeaders.SHARE_THROUGHPUT, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.POPULATE_PARTITION_STATISTICS, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.POPULATE_COLLECTION_THROUGHPUT_INFO, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.REMAINING_TIME_IN_MS_ON_CLIENT_REQUEST, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.CLIENT_RETRY_ATTEMPT_COUNT, request); // target lsn for head requests. HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.TARGET_LSN, request); HttpTransportClient.addHeader(httpRequestHeaders, HttpConstants.HttpHeaders.TARGET_GLOBAL_COMMITTED_LSN, request); HttpTransportClient.addHeader(httpRequestHeaders, WFConstants.BackendHeaders.FEDERATION_ID_FOR_AUTH, request); HttpTransportClient.addHeader(httpRequestHeaders, WFConstants.BackendHeaders.FANOUT_OPERATION_STATE, request); HttpTransportClient.addHeader(httpRequestHeaders, WFConstants.BackendHeaders.ALLOW_TENTATIVE_WRITES, request); HttpTransportClient.addHeader(httpRequestHeaders, CustomHeaders.HttpHeaders.EXCLUDE_SYSTEM_PROPERTIES, request); return httpRequestMessage; } static String getResourceFeedUri(ResourceType resourceType, String physicalAddress, RxDocumentServiceRequest request) throws Exception { switch (resourceType) { case Attachment: return getAttachmentFeedUri(physicalAddress, request); case DocumentCollection: return getCollectionFeedUri(physicalAddress, request); case Conflict: return getConflictFeedUri(physicalAddress, request); case Database: return getDatabaseFeedUri(physicalAddress); case Document: return getDocumentFeedUri(physicalAddress, request); case Permission: return getPermissionFeedUri(physicalAddress, request); case StoredProcedure: return getStoredProcedureFeedUri(physicalAddress, request); case Trigger: return getTriggerFeedUri(physicalAddress, request); case User: return getUserFeedUri(physicalAddress, request); case UserDefinedFunction: return getUserDefinedFunctionFeedUri(physicalAddress, request); case Schema: return getSchemaFeedUri(physicalAddress, request); case Offer: return getOfferFeedUri(physicalAddress, request); // Other types: Replica, Module, ModuleCommand, Record, UserDefinedType not applicable to SDK. default: assert false : "Unexpected resource type: " + resourceType; throw new NotFoundException(); } } private static String getResourceEntryUri(ResourceType resourceType, String physicalAddress, RxDocumentServiceRequest request) throws Exception { switch (resourceType) { case Attachment: return getAttachmentEntryUri(physicalAddress, request); case DocumentCollection: return getCollectionEntryUri(physicalAddress, request); case Conflict: return getConflictEntryUri(physicalAddress, request); case Database: return getDatabaseEntryUri(physicalAddress, request); case Document: return getDocumentEntryUri(physicalAddress, request); case Permission: return getPermissionEntryUri(physicalAddress, request); case StoredProcedure: return getStoredProcedureEntryUri(physicalAddress, request); case Trigger: return getTriggerEntryUri(physicalAddress, request); case User: return getUserEntryUri(physicalAddress, request); case UserDefinedFunction: return getUserDefinedFunctionEntryUri(physicalAddress, request); case Schema: return getSchemaEntryUri(physicalAddress, request); case Offer: return getOfferEntryUri(physicalAddress, request); // Other types: Replica, Module, ModuleCommand, Record, UserDefinedType not applicable to SDK. default: assert false: "Unexpected resource type: " + resourceType; throw new IllegalStateException(); } } static String createURI(String baseAddress, String resourcePath) { if (baseAddress.charAt(baseAddress.length() - 1) == '/') { return baseAddress + HttpUtils.urlEncode(trimBeginningAndEndingSlashes(resourcePath)); } else { return baseAddress + '/' + HttpUtils.urlEncode(trimBeginningAndEndingSlashes(resourcePath)); } } static String getRootFeedUri(String baseAddress) { return baseAddress; } private static String getDatabaseFeedUri(String baseAddress) { return createURI(baseAddress, PathsHelper.generatePath(ResourceType.Database, StringUtils.EMPTY, true)); } private static String getDatabaseEntryUri(String baseAddress, RxDocumentServiceRequest request) { return createURI(baseAddress, PathsHelper.generatePath(ResourceType.Database, request, false)); } private static String getCollectionFeedUri(String baseAddress, RxDocumentServiceRequest request) { return createURI(baseAddress, PathsHelper.generatePath(ResourceType.DocumentCollection, request, true)); } private static String getStoredProcedureFeedUri(String baseAddress, RxDocumentServiceRequest request) { return createURI(baseAddress, PathsHelper.generatePath(ResourceType.StoredProcedure, request, true)); } private static String getTriggerFeedUri(String baseAddress, RxDocumentServiceRequest request) { return createURI(baseAddress, PathsHelper.generatePath(ResourceType.Trigger, request, true)); } private static String getUserDefinedFunctionFeedUri(String baseAddress, RxDocumentServiceRequest request) { return createURI(baseAddress, PathsHelper.generatePath(ResourceType.UserDefinedFunction, request, true)); } private static String getCollectionEntryUri(String baseAddress, RxDocumentServiceRequest request) { return createURI(baseAddress, PathsHelper.generatePath(ResourceType.DocumentCollection, request, false)); } private static String getStoredProcedureEntryUri(String baseAddress, RxDocumentServiceRequest request) { return createURI(baseAddress, PathsHelper.generatePath(ResourceType.StoredProcedure, request, false)); } private static String getTriggerEntryUri(String baseAddress, RxDocumentServiceRequest request) { return createURI(baseAddress, PathsHelper.generatePath(ResourceType.Trigger, request, false)); } private static String getUserDefinedFunctionEntryUri(String baseAddress, RxDocumentServiceRequest request) { return createURI(baseAddress, PathsHelper.generatePath(ResourceType.UserDefinedFunction, request, false)); } private static String getDocumentFeedUri(String baseAddress, RxDocumentServiceRequest request) { return createURI(baseAddress, PathsHelper.generatePath(ResourceType.Document, request, true)); } private static String getDocumentEntryUri(String baseAddress, RxDocumentServiceRequest request) { return createURI(baseAddress, PathsHelper.generatePath(ResourceType.Document, request, false)); } private static String getConflictFeedUri(String baseAddress, RxDocumentServiceRequest request) { return createURI(baseAddress, PathsHelper.generatePath(ResourceType.Conflict, request, true)); } private static String getConflictEntryUri(String baseAddress, RxDocumentServiceRequest request) { return createURI(baseAddress, PathsHelper.generatePath(ResourceType.Conflict, request, false)); } private static String getAttachmentFeedUri(String baseAddress, RxDocumentServiceRequest request) { return createURI(baseAddress, PathsHelper.generatePath(ResourceType.Attachment, request, true)); } private static String getAttachmentEntryUri(String baseAddress, RxDocumentServiceRequest request) { return createURI(baseAddress, PathsHelper.generatePath(ResourceType.Attachment, request, false)); } private static String getUserFeedUri(String baseAddress, RxDocumentServiceRequest request) { return createURI(baseAddress, PathsHelper.generatePath(ResourceType.User, request, true)); } private static String getUserEntryUri(String baseAddress, RxDocumentServiceRequest request) { return createURI(baseAddress, PathsHelper.generatePath(ResourceType.User, request, false)); } private static String getPermissionFeedUri(String baseAddress, RxDocumentServiceRequest request) { return createURI(baseAddress, PathsHelper.generatePath(ResourceType.Permission, request, true)); } private static String getPermissionEntryUri(String baseAddress, RxDocumentServiceRequest request) { return createURI(baseAddress, PathsHelper.generatePath(ResourceType.Permission, request, false)); } private static String getOfferFeedUri(String baseAddress, RxDocumentServiceRequest request) { return createURI(baseAddress, PathsHelper.generatePath(ResourceType.Offer, request, true)); } private static String getSchemaFeedUri(String baseAddress, RxDocumentServiceRequest request) { return createURI(baseAddress, PathsHelper.generatePath(ResourceType.Schema, request, true)); } private static String getSchemaEntryUri(String baseAddress, RxDocumentServiceRequest request) { return createURI(baseAddress, PathsHelper.generatePath(ResourceType.Schema, request, false)); } private static String getOfferEntryUri(String baseAddress, RxDocumentServiceRequest request) { return createURI(baseAddress, PathsHelper.generatePath(ResourceType.Offer, request, false)); } static String getHeader(String[] names, String[] values, String name) { for (int idx = 0; idx < names.length; idx++) { if (Strings.areEqual(names[idx], name)) { return values[idx]; } } return null; } private Mono<StoreResponse> processHttpResponse(String resourceAddress, HttpRequest httpRequest, String activityId, HttpResponse response, URI physicalAddress) { if (response == null) { InternalServerErrorException exception = new InternalServerErrorException( String.format( RMResources.ExceptionMessage, RMResources.InvalidBackendResponse), null, physicalAddress); exception.getResponseHeaders().put(HttpConstants.HttpHeaders.ACTIVITY_ID, activityId); exception.getResponseHeaders().put(HttpConstants.HttpHeaders.REQUEST_VALIDATION_FAILURE, "1"); return Mono.error(exception); } // If the status code is < 300 or 304 NotModified (we treat not modified as success) then it means that it's a success code and shouldn't throw. if (response.statusCode() < HttpConstants.StatusCodes.MINIMUM_STATUSCODE_AS_ERROR_GATEWAY || response.statusCode() == HttpConstants.StatusCodes.NOT_MODIFIED) { return ResponseUtils.toStoreResponse(response, httpRequest); } else { return this.createErrorResponseFromHttpResponse(resourceAddress, activityId, httpRequest, response); } } private Mono<StoreResponse> createErrorResponseFromHttpResponse(String resourceAddress, String activityId, HttpRequest request, HttpResponse response) { int statusCode = response.statusCode(); Mono<String> errorMessageObs = ErrorUtils.getErrorResponseAsync(response, request); return errorMessageObs.flatMap( errorMessage -> { long responseLSN = -1; List<String> lsnValues = null; String[] headerValues = response.headers().values(WFConstants.BackendHeaders.LSN); if (headerValues != null) { lsnValues = com.azure.cosmos.implementation.guava25.collect.Lists.newArrayList(headerValues); } if (lsnValues != null) { String temp = lsnValues.isEmpty() ? null : lsnValues.get(0); responseLSN = Longs.tryParse(temp, responseLSN); } String responsePartitionKeyRangeId = null; List<String> partitionKeyRangeIdValues = null; headerValues = response.headers().values(WFConstants.BackendHeaders.PARTITION_KEY_RANGE_ID); if (headerValues != null) { partitionKeyRangeIdValues = com.azure.cosmos.implementation.guava25.collect.Lists.newArrayList(headerValues); } if (partitionKeyRangeIdValues != null) { responsePartitionKeyRangeId = Lists.firstOrDefault(partitionKeyRangeIdValues, null); } CosmosException exception; switch (statusCode) { case HttpConstants.StatusCodes.UNAUTHORIZED: exception = new UnauthorizedException( String.format( RMResources.ExceptionMessage, Strings.isNullOrEmpty(errorMessage) ? RMResources.Unauthorized : errorMessage), response.headers(), request.uri()); break; case HttpConstants.StatusCodes.FORBIDDEN: exception = new ForbiddenException( String.format( RMResources.ExceptionMessage, Strings.isNullOrEmpty(errorMessage) ? RMResources.Forbidden : errorMessage), response.headers(), request.uri()); break; case HttpConstants.StatusCodes.NOTFOUND: // HTTP.SYS returns NotFound (404) if the URI // is not registered. This is really an indication that // the replica which registered the URI is not // available at the server. We detect this case by // the presence of Content-Type header in the response // and map it to HTTP Gone (410), which is the more // appropriate response for this case. if (response.body() != null && response.headers() != null && response.headers().value(HttpConstants.HttpHeaders.CONTENT_TYPE) != null && !Strings.isNullOrEmpty(response.headers().value(HttpConstants.HttpHeaders.CONTENT_TYPE)) && Strings.containsIgnoreCase(response.headers().value(HttpConstants.HttpHeaders.CONTENT_TYPE), RuntimeConstants.MediaTypes.TEXT_HTML)) { // Have the request URL in the exception message for debugging purposes. exception = new GoneException( String.format( RMResources.ExceptionMessage, RMResources.Gone), request.uri().toString()); exception.getResponseHeaders().put(HttpConstants.HttpHeaders.ACTIVITY_ID, activityId); break; } else { exception = new NotFoundException( String.format( RMResources.ExceptionMessage, Strings.isNullOrEmpty(errorMessage) ? RMResources.NotFound : errorMessage), response.headers(), request.uri()); break; } case HttpConstants.StatusCodes.BADREQUEST: exception = new BadRequestException( String.format( RMResources.ExceptionMessage, Strings.isNullOrEmpty(errorMessage) ? RMResources.BadRequest : errorMessage), response.headers(), request.uri()); break; case HttpConstants.StatusCodes.METHOD_NOT_ALLOWED: exception = new MethodNotAllowedException( String.format( RMResources.ExceptionMessage, Strings.isNullOrEmpty(errorMessage) ? RMResources.MethodNotAllowed : errorMessage), null, response.headers(), request.uri().toString()); break; case HttpConstants.StatusCodes.GONE: { // TODO: update perf counter // https://msdata.visualstudio.com/CosmosDB/_workitems/edit/258624 ErrorUtils.logGoneException(request.uri(), activityId); Integer nSubStatus = 0; String valueSubStatus = response.headers().value(WFConstants.BackendHeaders.SUB_STATUS); if (!Strings.isNullOrEmpty(valueSubStatus)) { if ((nSubStatus = Integers.tryParse(valueSubStatus)) == null) { exception = new InternalServerErrorException( String.format( RMResources.ExceptionMessage, RMResources.InvalidBackendResponse), response.headers(), request.uri()); break; } } if (nSubStatus == HttpConstants.SubStatusCodes.NAME_CACHE_IS_STALE) { exception = new InvalidPartitionException( String.format( RMResources.ExceptionMessage, Strings.isNullOrEmpty(errorMessage) ? RMResources.Gone : errorMessage), response.headers(), request.uri().toString()); break; } else if (nSubStatus == HttpConstants.SubStatusCodes.PARTITION_KEY_RANGE_GONE) { exception = new PartitionKeyRangeGoneException( String.format( RMResources.ExceptionMessage, Strings.isNullOrEmpty(errorMessage) ? RMResources.Gone : errorMessage), response.headers(), request.uri().toString()); break; } else if (nSubStatus == HttpConstants.SubStatusCodes.COMPLETING_SPLIT) { exception = new PartitionKeyRangeIsSplittingException( String.format( RMResources.ExceptionMessage, Strings.isNullOrEmpty(errorMessage) ? RMResources.Gone : errorMessage), response.headers(), request.uri().toString()); break; } else if (nSubStatus == HttpConstants.SubStatusCodes.COMPLETING_PARTITION_MIGRATION) { exception = new PartitionIsMigratingException( String.format( RMResources.ExceptionMessage, Strings.isNullOrEmpty(errorMessage) ? RMResources.Gone : errorMessage), response.headers(), request.uri().toString()); break; } else { // Have the request URL in the exception message for debugging purposes. exception = new GoneException( String.format( RMResources.ExceptionMessage, RMResources.Gone), response.headers(), request.uri()); exception.getResponseHeaders().put(HttpConstants.HttpHeaders.ACTIVITY_ID, activityId); break; } } case HttpConstants.StatusCodes.CONFLICT: exception = new ConflictException( String.format( RMResources.ExceptionMessage, Strings.isNullOrEmpty(errorMessage) ? RMResources.EntityAlreadyExists : errorMessage), response.headers(), request.uri().toString()); break; case HttpConstants.StatusCodes.PRECONDITION_FAILED: exception = new PreconditionFailedException( String.format( RMResources.ExceptionMessage, Strings.isNullOrEmpty(errorMessage) ? RMResources.PreconditionFailed : errorMessage), response.headers(), request.uri().toString()); break; case HttpConstants.StatusCodes.REQUEST_ENTITY_TOO_LARGE: exception = new RequestEntityTooLargeException( String.format( RMResources.ExceptionMessage, String.format( RMResources.RequestEntityTooLarge, HttpConstants.HttpHeaders.PAGE_SIZE)), response.headers(), request.uri().toString()); break; case HttpConstants.StatusCodes.LOCKED: exception = new LockedException( String.format( RMResources.ExceptionMessage, Strings.isNullOrEmpty(errorMessage) ? RMResources.Locked : errorMessage), response.headers(), request.uri().toString()); break; case HttpConstants.StatusCodes.SERVICE_UNAVAILABLE: exception = new ServiceUnavailableException(errorMessage, response.headers(), request.uri()); break; case HttpConstants.StatusCodes.REQUEST_TIMEOUT: exception = new RequestTimeoutException( String.format( RMResources.ExceptionMessage, Strings.isNullOrEmpty(errorMessage) ? RMResources.RequestTimeout : errorMessage), response.headers(), request.uri()); break; case HttpConstants.StatusCodes.RETRY_WITH: exception = new RetryWithException( String.format( RMResources.ExceptionMessage, Strings.isNullOrEmpty(errorMessage) ? RMResources.RetryWith : errorMessage), response.headers(), request.uri()); break; case HttpConstants.StatusCodes.TOO_MANY_REQUESTS: exception = new RequestRateTooLargeException( String.format( RMResources.ExceptionMessage, Strings.isNullOrEmpty(errorMessage) ? RMResources.TooManyRequests : errorMessage), response.headers(), request.uri()); List<String> values = null; headerValues = response.headers().values(HttpConstants.HttpHeaders.RETRY_AFTER_IN_MILLISECONDS); if (headerValues != null) { values = com.azure.cosmos.implementation.guava25.collect.Lists.newArrayList(headerValues); } if (values == null || values.isEmpty()) { logger.warn("RequestRateTooLargeException being thrown without RetryAfter."); } else { exception.getResponseHeaders().put(HttpConstants.HttpHeaders.RETRY_AFTER_IN_MILLISECONDS, values.get(0)); } break; case HttpConstants.StatusCodes.INTERNAL_SERVER_ERROR: exception = new InternalServerErrorException( String.format( RMResources.ExceptionMessage, Strings.isNullOrEmpty(errorMessage) ? RMResources.InternalServerError : errorMessage), response.headers(), request.uri()); break; default: logger.error("Unrecognized status code {} returned by backend. ActivityId {}", statusCode, activityId); ErrorUtils.logException(request.uri(), activityId); exception = new InternalServerErrorException( String.format( RMResources.ExceptionMessage, RMResources.InvalidBackendResponse), response.headers(), request.uri()); break; } BridgeInternal.setLSN(exception, responseLSN); BridgeInternal.setPartitionKeyRangeId(exception, responsePartitionKeyRangeId); BridgeInternal.setResourceAddress(exception, resourceAddress); BridgeInternal.setRequestHeaders(exception, HttpUtils.asMap(request.headers())); return Mono.error(exception); } ); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.validate; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.query.MoreLikeThisQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.hamcrest.Matcher; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.ISODateTimeFormat; import java.io.IOException; import java.nio.charset.StandardCharsets; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; /** * */ @ClusterScope(randomDynamicTemplates = false, scope = Scope.SUITE) public class SimpleValidateQueryIT extends ESIntegTestCase { public void testSimpleValidateQuery() throws Exception { createIndex("test"); ensureGreen(); client().admin().indices().preparePutMapping("test").setType("type1") .setSource(XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("foo").field("type", "text").endObject() .startObject("bar").field("type", "integer").endObject() .endObject().endObject().endObject()) .execute().actionGet(); refresh(); assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.wrapperQuery("foo".getBytes(StandardCharsets.UTF_8))).execute().actionGet().isValid(), equalTo(false)); assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryStringQuery("_id:1")).execute().actionGet().isValid(), equalTo(true)); assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryStringQuery("_i:d:1")).execute().actionGet().isValid(), equalTo(false)); assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryStringQuery("foo:1")).execute().actionGet().isValid(), equalTo(true)); assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryStringQuery("bar:hey")).execute().actionGet().isValid(), equalTo(false)); assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryStringQuery("nonexistent:hello")).execute().actionGet().isValid(), equalTo(true)); assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryStringQuery("foo:1 AND")).execute().actionGet().isValid(), equalTo(false)); } public void testExplainValidateQueryTwoNodes() throws IOException { createIndex("test"); ensureGreen(); client().admin().indices().preparePutMapping("test").setType("type1") .setSource(XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("foo").field("type", "text").endObject() .startObject("bar").field("type", "integer").endObject() .startObject("baz").field("type", "text").field("analyzer", "snowball").endObject() .startObject("pin").startObject("properties").startObject("location").field("type", "geo_point").endObject().endObject().endObject() .endObject().endObject().endObject()) .execute().actionGet(); refresh(); for (Client client : internalCluster().getClients()) { ValidateQueryResponse response = client.admin().indices().prepareValidateQuery("test") .setQuery(QueryBuilders.wrapperQuery("foo".getBytes(StandardCharsets.UTF_8))) .setExplain(true) .execute().actionGet(); assertThat(response.isValid(), equalTo(false)); assertThat(response.getQueryExplanation().size(), equalTo(1)); assertThat(response.getQueryExplanation().get(0).getError(), containsString("Failed to derive xcontent")); assertThat(response.getQueryExplanation().get(0).getExplanation(), nullValue()); } for (Client client : internalCluster().getClients()) { ValidateQueryResponse response = client.admin().indices().prepareValidateQuery("test") .setQuery(QueryBuilders.queryStringQuery("foo")) .setExplain(true) .execute().actionGet(); assertThat(response.isValid(), equalTo(true)); assertThat(response.getQueryExplanation().size(), equalTo(1)); assertThat(response.getQueryExplanation().get(0).getExplanation(), equalTo("_all:foo")); assertThat(response.getQueryExplanation().get(0).getError(), nullValue()); } } // Issue #3629 public void testExplainDateRangeInQueryString() { assertAcked(prepareCreate("test").setSettings(Settings.settingsBuilder() .put(indexSettings()) .put("index.number_of_shards", 1))); String aMonthAgo = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).minusMonths(1)); String aMonthFromNow = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).plusMonths(1)); client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get(); refresh(); ValidateQueryResponse response = client().admin().indices().prepareValidateQuery() .setQuery(queryStringQuery("past:[now-2M/d TO now/d]")).setRewrite(true).get(); assertNoFailures(response); assertThat(response.getQueryExplanation().size(), equalTo(1)); assertThat(response.getQueryExplanation().get(0).getError(), nullValue()); DateTime twoMonthsAgo = new DateTime(DateTimeZone.UTC).minusMonths(2).withTimeAtStartOfDay(); DateTime now = new DateTime(DateTimeZone.UTC).plusDays(1).withTimeAtStartOfDay().minusMillis(1); assertThat(response.getQueryExplanation().get(0).getExplanation(), equalTo("past:[" + twoMonthsAgo.getMillis() + " TO " + now.getMillis() + "]")); assertThat(response.isValid(), equalTo(true)); } public void testValidateEmptyCluster() { try { client().admin().indices().prepareValidateQuery().get(); fail("Expected IndexNotFoundException"); } catch (IndexNotFoundException e) { assertThat(e.getMessage(), is("no such index")); } } public void testExplainNoQuery() { createIndex("test"); ensureGreen(); ValidateQueryResponse validateQueryResponse = client().admin().indices().prepareValidateQuery().setExplain(true).get(); assertThat(validateQueryResponse.isValid(), equalTo(true)); assertThat(validateQueryResponse.getQueryExplanation().size(), equalTo(1)); assertThat(validateQueryResponse.getQueryExplanation().get(0).getIndex(), equalTo("test")); assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), equalTo("*:*")); } public void testExplainFilteredAlias() { assertAcked(prepareCreate("test") .addMapping("test", "field", "type=text") .addAlias(new Alias("alias").filter(QueryBuilders.termQuery("field", "value1")))); ensureGreen(); ValidateQueryResponse validateQueryResponse = client().admin().indices().prepareValidateQuery("alias") .setQuery(QueryBuilders.matchAllQuery()).setExplain(true).get(); assertThat(validateQueryResponse.isValid(), equalTo(true)); assertThat(validateQueryResponse.getQueryExplanation().size(), equalTo(1)); assertThat(validateQueryResponse.getQueryExplanation().get(0).getIndex(), equalTo("test")); assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), containsString("field:value1")); } public void testExplainMatchPhrasePrefix() { assertAcked(prepareCreate("test").setSettings( Settings.settingsBuilder().put(indexSettings()) .put("index.analysis.filter.syns.type", "synonym") .putArray("index.analysis.filter.syns.synonyms", "one,two") .put("index.analysis.analyzer.syns.tokenizer", "standard") .putArray("index.analysis.analyzer.syns.filter", "syns") ).addMapping("test", "field","type=text,analyzer=syns")); ensureGreen(); ValidateQueryResponse validateQueryResponse = client().admin().indices().prepareValidateQuery("test") .setQuery(QueryBuilders.matchPhrasePrefixQuery("field", "foo")).setExplain(true).get(); assertThat(validateQueryResponse.isValid(), equalTo(true)); assertThat(validateQueryResponse.getQueryExplanation().size(), equalTo(1)); assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), containsString("field:\"foo*\"")); validateQueryResponse = client().admin().indices().prepareValidateQuery("test") .setQuery(QueryBuilders.matchPhrasePrefixQuery("field", "foo bar")).setExplain(true).get(); assertThat(validateQueryResponse.isValid(), equalTo(true)); assertThat(validateQueryResponse.getQueryExplanation().size(), equalTo(1)); assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), containsString("field:\"foo bar*\"")); // Stacked tokens validateQueryResponse = client().admin().indices().prepareValidateQuery("test") .setQuery(QueryBuilders.matchPhrasePrefixQuery("field", "one bar")).setExplain(true).get(); assertThat(validateQueryResponse.isValid(), equalTo(true)); assertThat(validateQueryResponse.getQueryExplanation().size(), equalTo(1)); assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), containsString("field:\"(one two) bar*\"")); validateQueryResponse = client().admin().indices().prepareValidateQuery("test") .setQuery(QueryBuilders.matchPhrasePrefixQuery("field", "foo one")).setExplain(true).get(); assertThat(validateQueryResponse.isValid(), equalTo(true)); assertThat(validateQueryResponse.getQueryExplanation().size(), equalTo(1)); assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), containsString("field:\"foo (one* two*)\"")); } @SuppressWarnings("deprecation") // fuzzy queries will be removed in 4.0 public void testExplainWithRewriteValidateQuery() throws Exception { client().admin().indices().prepareCreate("test") .addMapping("type1", "field", "type=text,analyzer=whitespace") .setSettings(SETTING_NUMBER_OF_SHARDS, 1).get(); client().prepareIndex("test", "type1", "1").setSource("field", "quick lazy huge brown pidgin").get(); client().prepareIndex("test", "type1", "2").setSource("field", "the quick brown fox").get(); client().prepareIndex("test", "type1", "3").setSource("field", "the quick lazy huge brown fox jumps over the tree").get(); client().prepareIndex("test", "type1", "4").setSource("field", "the lazy dog quacks like a duck").get(); refresh(); // prefix queries assertExplanation(QueryBuilders.matchPhrasePrefixQuery("field", "qu"), containsString("field:quick"), true); assertExplanation(QueryBuilders.matchPhrasePrefixQuery("field", "ju"), containsString("field:jumps"), true); // common terms queries assertExplanation(QueryBuilders.commonTermsQuery("field", "huge brown pidgin").cutoffFrequency(1), containsString("+field:pidgin (field:huge field:brown)"), true); assertExplanation(QueryBuilders.commonTermsQuery("field", "the brown").analyzer("stop"), containsString("field:brown"), true); // match queries with cutoff frequency assertExplanation(QueryBuilders.matchQuery("field", "huge brown pidgin").cutoffFrequency(1), containsString("+field:pidgin (field:huge field:brown)"), true); assertExplanation(QueryBuilders.matchQuery("field", "the brown").analyzer("stop"), containsString("field:brown"), true); // fuzzy queries assertExplanation(QueryBuilders.fuzzyQuery("field", "the").fuzziness(Fuzziness.fromEdits(2)), containsString("field:the (field:tree)^0.3333333"), true); assertExplanation(QueryBuilders.fuzzyQuery("field", "jump"), containsString("(field:jumps)^0.75"), true); // more like this queries assertExplanation(QueryBuilders.moreLikeThisQuery(new String[] { "field" }, null, MoreLikeThisQueryBuilder.ids("1")) .include(true).minTermFreq(1).minDocFreq(1).maxQueryTerms(2), containsString("field:huge field:pidgin"), true); assertExplanation(QueryBuilders.moreLikeThisQuery(new String[] { "field" }, new String[] {"the huge pidgin"}, null) .minTermFreq(1).minDocFreq(1).maxQueryTerms(2), containsString("field:huge field:pidgin"), true); } public void testIrrelevantPropertiesBeforeQuery() throws IOException { createIndex("test"); ensureGreen(); refresh(); assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.wrapperQuery(new BytesArray("{\"foo\": \"bar\", \"query\": {\"term\" : { \"user\" : \"kimchy\" }}}"))).get().isValid(), equalTo(false)); } public void testIrrelevantPropertiesAfterQuery() throws IOException { createIndex("test"); ensureGreen(); refresh(); assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.wrapperQuery(new BytesArray("{\"query\": {\"term\" : { \"user\" : \"kimchy\" }}, \"foo\": \"bar\"}"))).get().isValid(), equalTo(false)); } private static void assertExplanation(QueryBuilder queryBuilder, Matcher<String> matcher, boolean withRewrite) { ValidateQueryResponse response = client().admin().indices().prepareValidateQuery("test") .setTypes("type1") .setQuery(queryBuilder) .setExplain(true) .setRewrite(withRewrite) .execute().actionGet(); assertThat(response.getQueryExplanation().size(), equalTo(1)); assertThat(response.getQueryExplanation().get(0).getError(), nullValue()); assertThat(response.getQueryExplanation().get(0).getExplanation(), matcher); assertThat(response.isValid(), equalTo(true)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.io.contextualtextio; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.ReadableByteChannel; import java.nio.channels.SeekableByteChannel; import java.util.NoSuchElementException; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.io.FileBasedSource; import org.apache.beam.sdk.io.fs.EmptyMatchTreatment; import org.apache.beam.sdk.io.fs.MatchResult; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.options.ValueProvider; import org.apache.beam.sdk.schemas.SchemaCoder; import org.apache.beam.sdk.values.Row; import org.apache.beam.vendor.grpc.v1p36p0.com.google.protobuf.ByteString; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions; import org.checkerframework.checker.nullness.qual.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Implementation detail of {@link ContextualTextIO.Read}. * * <p>A {@link FileBasedSource} which can decode records delimited by newline characters. * * <p>This source splits the data into records using {@code UTF-8} {@code \n}, {@code \r}, or {@code * \r\n} as the delimiter. This source is not strict and supports decoding the last record even if * it is not delimited. Finally, no records are decoded if the stream is empty. * * <p>This source supports reading from any arbitrary byte position within the stream. If the * starting position is not {@code 0}, then bytes are skipped until the first delimiter is found * representing the beginning of the first record to be decoded. */ @VisibleForTesting @SuppressWarnings({ "nullness" // TODO(https://issues.apache.org/jira/browse/BEAM-10402) }) class ContextualTextIOSource extends FileBasedSource<Row> { byte[] delimiter; private static final Logger LOG = LoggerFactory.getLogger(ContextualTextIOSource.class); // Used to Override isSplittable private boolean hasMultilineCSVRecords; @Override protected boolean isSplittable() throws Exception { if (hasMultilineCSVRecords) { // When Having Multiline CSV Records, // Splitting the file may cause a split to be within a record, // Disabling split prevents this from happening return false; } return super.isSplittable(); } ContextualTextIOSource( ValueProvider<String> fileSpec, EmptyMatchTreatment emptyMatchTreatment, byte[] delimiter, boolean hasMultilineCSVRecords) { super(fileSpec, emptyMatchTreatment, 1L); this.delimiter = delimiter; this.hasMultilineCSVRecords = hasMultilineCSVRecords; } private ContextualTextIOSource( MatchResult.Metadata metadata, long start, long end, byte[] delimiter, boolean hasMultilineCSVRecords) { super(metadata, 1L, start, end); this.delimiter = delimiter; this.hasMultilineCSVRecords = hasMultilineCSVRecords; } @Override protected FileBasedSource<Row> createForSubrangeOfFile( MatchResult.Metadata metadata, long start, long end) { return new ContextualTextIOSource(metadata, start, end, delimiter, hasMultilineCSVRecords); } @Override protected FileBasedReader<Row> createSingleFileReader(PipelineOptions options) { return new MultiLineTextBasedReader(this, delimiter, hasMultilineCSVRecords); } @Override public Coder<Row> getOutputCoder() { return SchemaCoder.of(RecordWithMetadata.getSchema()); } /** * A {@link FileBasedReader FileBasedReader} which can decode records delimited by delimiter * characters. * * <p>See {@link ContextualTextIOSource} for further details. */ @VisibleForTesting static class MultiLineTextBasedReader extends FileBasedReader<Row> { public static final int READ_BUFFER_SIZE = 8192; private static final ByteString UTF8_BOM = ByteString.copyFrom(new byte[] {(byte) 0xEF, (byte) 0xBB, (byte) 0xBF}); private final ByteBuffer readBuffer = ByteBuffer.allocate(READ_BUFFER_SIZE); private ByteString buffer; private int startOfDelimiterInBuffer; private int endOfDelimiterInBuffer; private long startOfRecord; private volatile long startOfNextRecord; private volatile boolean eof; private volatile boolean elementIsPresent; private @Nullable Row currentValue; private @Nullable ReadableByteChannel inChannel; private byte @Nullable [] delimiter; // Add to override the isSplittable private boolean hasMultilineCSVRecords; private long startingOffset; private long totalRecordCount; private MultiLineTextBasedReader( ContextualTextIOSource source, byte[] delimiter, boolean hasMultilineCSVRecords) { super(source); buffer = ByteString.EMPTY; this.delimiter = delimiter; this.hasMultilineCSVRecords = hasMultilineCSVRecords; startingOffset = getCurrentSource().getStartOffset(); // Start offset; } @Override protected long getCurrentOffset() throws NoSuchElementException { if (!elementIsPresent) { throw new NoSuchElementException(); } return startOfRecord; } @Override public long getSplitPointsRemaining() { if (isStarted() && startOfNextRecord >= getCurrentSource().getEndOffset()) { return isDone() ? 0 : 1; } return super.getSplitPointsRemaining(); } @Override public Row getCurrent() throws NoSuchElementException { if (!elementIsPresent) { throw new NoSuchElementException(); } return currentValue; } @Override protected void startReading(ReadableByteChannel channel) throws IOException { this.inChannel = channel; // If the first offset is greater than zero, we need to skip bytes until we see our // first delimiter. long startOffset = getCurrentSource().getStartOffset(); if (startOffset > 0) { Preconditions.checkState( channel instanceof SeekableByteChannel, "%s only supports reading from a SeekableByteChannel when given a start offset" + " greater than 0.", ContextualTextIOSource.class.getSimpleName()); long requiredPosition = startOffset - 1; if (delimiter != null && startOffset >= delimiter.length) { // we need to move back the offset of at worse delimiter.size to be sure to see // all the bytes of the delimiter in the call to findDelimiterBounds() below requiredPosition = startOffset - delimiter.length; } ((SeekableByteChannel) channel).position(requiredPosition); findDelimiterBounds(); buffer = buffer.substring(endOfDelimiterInBuffer); startOfNextRecord = requiredPosition + endOfDelimiterInBuffer; endOfDelimiterInBuffer = 0; startOfDelimiterInBuffer = 0; } } /** * Locates the start position and end position of the next delimiter. Will consume the channel * till either EOF or the delimiter bounds are found. * * <p>If {@link ContextualTextIOSource#hasMultilineCSVRecords} is set then the behaviour will * change from the standard read seen in {@link org.apache.beam.sdk.io.TextIO}. The assumption * when {@link ContextualTextIOSource#hasMultilineCSVRecords} is set is that the file is being * read with a single thread. * * <p>This fills the buffer and updates the positions as follows: * * <pre>{@code * ------------------------------------------------------ * | element bytes | delimiter bytes | unconsumed bytes | * ------------------------------------------------------ * 0 start of end of buffer * delimiter delimiter size * in buffer in buffer * }</pre> */ private void findDelimiterBounds() throws IOException { int bytePositionInBuffer = 0; boolean doubleQuoteClosed = true; while (true) { if (!tryToEnsureNumberOfBytesInBuffer(bytePositionInBuffer + 1)) { startOfDelimiterInBuffer = endOfDelimiterInBuffer = bytePositionInBuffer; break; } byte currentByte = buffer.byteAt(bytePositionInBuffer); if (hasMultilineCSVRecords) { // Check if we are inside an open Quote if (currentByte == '"') { doubleQuoteClosed = !doubleQuoteClosed; } } else { doubleQuoteClosed = true; } if (delimiter == null) { // default delimiter if (currentByte == '\n') { startOfDelimiterInBuffer = bytePositionInBuffer; endOfDelimiterInBuffer = startOfDelimiterInBuffer + 1; if (doubleQuoteClosed) { break; } } else if (currentByte == '\r') { startOfDelimiterInBuffer = bytePositionInBuffer; endOfDelimiterInBuffer = startOfDelimiterInBuffer + 1; if (tryToEnsureNumberOfBytesInBuffer(bytePositionInBuffer + 2)) { currentByte = buffer.byteAt(bytePositionInBuffer + 1); if (currentByte == '\n') { endOfDelimiterInBuffer += 1; } } if (doubleQuoteClosed) { break; } } } else { // when the user defines a delimiter int i = 0; startOfDelimiterInBuffer = endOfDelimiterInBuffer = bytePositionInBuffer; while ((i < delimiter.length) && (currentByte == delimiter[i])) { // read next byte; i++; if (tryToEnsureNumberOfBytesInBuffer(bytePositionInBuffer + i + 1)) { currentByte = buffer.byteAt(bytePositionInBuffer + i); } else { // corner case: delimiter truncate at the end of file startOfDelimiterInBuffer = endOfDelimiterInBuffer = bytePositionInBuffer; break; } } if (i == delimiter.length) { endOfDelimiterInBuffer = bytePositionInBuffer + i; if (doubleQuoteClosed) { break; } } } bytePositionInBuffer += 1; } } @Override protected boolean readNextRecord() throws IOException { startOfRecord = startOfNextRecord; findDelimiterBounds(); // If we have reached EOF file and consumed all of the buffer then we know // that there are no more records. if (eof && buffer.isEmpty()) { elementIsPresent = false; return false; } decodeCurrentElement(); startOfNextRecord = startOfRecord + endOfDelimiterInBuffer; return true; } /** * Decodes the current element updating the buffer to only contain the unconsumed bytes. * * <p>This invalidates the currently stored {@code startOfDelimiterInBuffer} and {@code * endOfDelimiterInBuffer}. */ private void decodeCurrentElement() throws IOException { ByteString dataToDecode = buffer.substring(0, startOfDelimiterInBuffer); // If present, the UTF8 Byte Order Mark (BOM) will be removed. if (startOfRecord == 0 && dataToDecode.startsWith(UTF8_BOM)) { dataToDecode = dataToDecode.substring(UTF8_BOM.size()); } // The line num is: long recordUniqueNum = totalRecordCount++; // The single filename can be found as: // fileName.substring(fileName.lastIndexOf('/') + 1); currentValue = Row.withSchema(RecordWithMetadata.getSchema()) .withFieldValue(RecordWithMetadata.RECORD_NUM_IN_OFFSET, recordUniqueNum) .withFieldValue(RecordWithMetadata.RANGE_OFFSET, startingOffset) .withFieldValue(RecordWithMetadata.RECORD_OFFSET, startOfRecord) .withFieldValue( RecordWithMetadata.RESOURCE_ID, getCurrentSource().getSingleFileMetadata().resourceId()) .withFieldValue(RecordWithMetadata.VALUE, dataToDecode.toStringUtf8()) .build(); elementIsPresent = true; buffer = buffer.substring(endOfDelimiterInBuffer); } /** Returns false if we were unable to ensure the minimum capacity by consuming the channel. */ private boolean tryToEnsureNumberOfBytesInBuffer(int minCapacity) throws IOException { // While we aren't at EOF or haven't fulfilled the minimum buffer capacity, // attempt to read more bytes. while (buffer.size() <= minCapacity && !eof) { eof = inChannel.read(readBuffer) == -1; readBuffer.flip(); buffer = buffer.concat(ByteString.copyFrom(readBuffer)); readBuffer.clear(); } // Return true if we were able to honor the minimum buffer capacity request return buffer.size() >= minCapacity; } } }
/* * Copyright 2013 Red Hat Inc. and/or its affiliates and other contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.switchyard.transform.ootb.map; import org.jboss.logging.Logger; import org.switchyard.common.xml.QNameUtil; import org.switchyard.transform.BaseTransformer; import org.switchyard.transform.Transformer; import org.switchyard.transform.internal.TransformMessages; import javax.xml.namespace.QName; import java.beans.BeanInfo; import java.beans.IntrospectionException; import java.beans.Introspector; import java.beans.PropertyDescriptor; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; /** * Transformer that takes a Map graph and from it, builds a Java object * graph. * * @param <F> From Type * @param <T> To Type. * * @author <a href="mailto:tom.fennelly@gmail.com">tom.fennelly@gmail.com</a> */ public class FromMapToJava<F, T> extends BaseTransformer<Map, Object> { private static Logger _logger = Logger.getLogger(FromMapToJava.class); private GraphBuilder _graphBuilder; private static Map<Class, Class> primitives = new HashMap<Class, Class>(); { primitives.put(Integer.TYPE, Integer.class); primitives.put(Long.TYPE, Long.class); primitives.put(Double.TYPE, Double.class); primitives.put(Float.TYPE, Float.class); primitives.put(Boolean.TYPE, Boolean.class); primitives.put(Character.TYPE, Character.class); primitives.put(Byte.TYPE, Byte.class); primitives.put(Short.TYPE, Short.class); } @Override public Transformer setTo(QName toType) { super.setTo(toType); if (!QNameUtil.isJavaMessageType(toType)) { throw TransformMessages.MESSAGES.invalidToTypeNotJavaObject(toType.toString()); } Class<?> javaType = QNameUtil.toJavaMessageType(toType); if (javaType == null) { throw TransformMessages.MESSAGES.invalidToTypeClassNotFound(toType.toString()); } _graphBuilder = new ComplexTypeBuilder(javaType, null, null); return this; } @Override public Object transform(Map from) { return _graphBuilder.build(from); } private abstract class GraphBuilder { private Class _javaType; private String _parentPropertyName; private GraphBuilder _parentNode; private Method _parentSetterMethod; private BeanInfo _beanInfo; private GraphBuilder(Class javaType, GraphBuilder parentBuilder, Method parentSetterMethod) { this._javaType = javaType; this._parentNode = parentBuilder; this._parentSetterMethod = parentSetterMethod; try { _beanInfo = Introspector.getBeanInfo(_javaType); } catch (IntrospectionException e) { throw TransformMessages.MESSAGES.failedToExtractBeanInfo(_javaType.getName(), e); } } public Class getJavaType() { return _javaType; } public String getParentPropertyName() { return _parentPropertyName; } public void setParentPropertyName(String parentPropertyName) { this._parentPropertyName = parentPropertyName; } abstract Object build(Object value); protected void setPropertyValue(Object instance, Object propertyVal, Method parentSetterMethod) throws IllegalAccessException, InvocationTargetException { Class<?> paramType = parentSetterMethod.getParameterTypes()[0]; if (paramType.isInstance(propertyVal)) { parentSetterMethod.invoke(instance, propertyVal); return; } if (paramType.isPrimitive()) { paramType = primitives.get(paramType); } // Try building from a String ... try { Constructor<?> stringConstructor = paramType.getConstructor(String.class); parentSetterMethod.invoke(instance, stringConstructor.newInstance(propertyVal.toString())); return; } catch (Exception e) { if (_logger.isDebugEnabled()) { _logger.debug("Unable to set property '" + _parentPropertyName + "' on bean instance of type '" + instance.getClass().getName() + "'.", e); } } } protected GraphBuilder newBuilder(String propertyName, Class mapType) { Method propertySetter = getSetter(propertyName); Class propertyType = propertySetter.getParameterTypes()[0]; GraphBuilder builder = newBuilder(mapType, propertyType, propertySetter); builder.setParentPropertyName(propertyName); return builder; } protected GraphBuilder newBuilder(Class mapType, Class javaType, Method parentSetter) { GraphBuilder nodeBuilder = null; if (Collection.class.isAssignableFrom(mapType)) { Type[] generics = parentSetter.getGenericParameterTypes(); if (Collection.class.isAssignableFrom(javaType)) { if (generics != null && generics.length == 1 && generics[0] != null) { ParameterizedType genericType = (ParameterizedType) generics[0]; Type collectionEntryType = genericType.getActualTypeArguments()[0]; nodeBuilder = new CollectionTypeBuilder(javaType, (Class<?>) collectionEntryType, this, parentSetter); } } } else { if (Map.class.isAssignableFrom(mapType)) { nodeBuilder = new ComplexTypeBuilder(javaType, this, parentSetter); } else { nodeBuilder = new SimpleTypeBuilder(javaType, this, parentSetter); } } return nodeBuilder; } private Method getSetter(String propertyName) { Method setterMethod = null; for (PropertyDescriptor propertyDesc : _beanInfo.getPropertyDescriptors()) { if (propertyDesc.getName().equals(propertyName)) { setterMethod = propertyDesc.getWriteMethod(); break; } } if (setterMethod == null) { throw TransformMessages.MESSAGES.noSetterMethodForProperty(propertyName, _javaType.getName()); } return setterMethod; } protected Object newInstance() { if (_javaType == Collection.class) { return new ArrayList(); } if (_javaType == List.class) { return new ArrayList(); } try { return _javaType.newInstance(); } catch (Exception e) { throw TransformMessages.MESSAGES.unableToCreateInstance(_javaType.getName(), e); } } } private final class SimpleTypeBuilder extends GraphBuilder { private SimpleTypeBuilder(Class javaType, GraphBuilder parentBuilder, Method parentSetterMethod) { super(javaType, parentBuilder, parentSetterMethod); } @Override Object build(Object value) { return value; } } private final class ComplexTypeBuilder extends GraphBuilder { private Map<String, GraphBuilder> _childNodes = new ConcurrentHashMap<String, GraphBuilder>(); private ComplexTypeBuilder(Class javaType, GraphBuilder parentBuilder, Method parentSetterMethod) { super(javaType, parentBuilder, parentSetterMethod); } @Override Object build(Object value) { Object instance = newInstance(); if (value instanceof Map && !((Map) value).isEmpty()) { Set properties = ((Map) value).entrySet(); Iterator propertyIterator = properties.iterator(); while (propertyIterator.hasNext()) { Map.Entry entry = (Map.Entry) propertyIterator.next(); String propertyName = (String) entry.getKey(); Object propertyValue = entry.getValue(); GraphBuilder nodeBuilder = _childNodes.get(propertyName); if (nodeBuilder == null) { nodeBuilder = newBuilder(propertyName, propertyValue.getClass()); _childNodes.put(propertyName, nodeBuilder); } Object propertyVal = nodeBuilder.build(propertyValue); try { setPropertyValue(instance, propertyVal, nodeBuilder._parentSetterMethod); } catch (Exception e) { throw TransformMessages.MESSAGES.errorInvokingSetter(nodeBuilder._parentSetterMethod.getName(), getJavaType().getName(), e); } } } return instance; } } private final class CollectionTypeBuilder extends GraphBuilder { private volatile GraphBuilder _collectionEntryBuilder; private Class<?> _collectionEntryType; private CollectionTypeBuilder(Class<?> collectionType, Class<?> collectionEntryType, GraphBuilder parentBuilder, Method parentSetterMethod) { super(collectionType, parentBuilder, parentSetterMethod); this._collectionEntryType = collectionEntryType; } @Override Object build(Object value) { synchronized (this) { Collection instance = (Collection) newInstance(); if (value instanceof Collection) { for (Object arrayEntry : (Collection)value) { if (_collectionEntryBuilder == null) { _collectionEntryBuilder = newBuilder(arrayEntry.getClass(), _collectionEntryType, null); } instance.add(_collectionEntryBuilder.build(arrayEntry)); } } return instance; } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.discovery.zen; import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.service.InternalClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.membership.MembershipAction; import java.util.*; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; /** * This class processes incoming join request (passed zia {@link ZenDiscovery}). Incoming nodes * are directly added to the cluster state or are accumulated during master election. */ public class NodeJoinController extends AbstractComponent { final ClusterService clusterService; final RoutingService routingService; final DiscoverySettings discoverySettings; final AtomicBoolean accumulateJoins = new AtomicBoolean(false); // this is site while trying to become a master final AtomicReference<ElectionContext> electionContext = new AtomicReference<>(); protected final Map<DiscoveryNode, List<MembershipAction.JoinCallback>> pendingJoinRequests = new HashMap<>(); public NodeJoinController(ClusterService clusterService, RoutingService routingService, DiscoverySettings discoverySettings, Settings settings) { super(settings); this.clusterService = clusterService; this.routingService = routingService; this.discoverySettings = discoverySettings; } /** * waits for enough incoming joins from master eligible nodes to complete the master election * <p> * You must start accumulating joins before calling this method. See {@link #startAccumulatingJoins()} * <p> * The method will return once the local node has been elected as master or some failure/timeout has happened. * The exact outcome is communicated via the callback parameter, which is guaranteed to be called. * * @param requiredMasterJoins the number of joins from master eligible needed to complete the election * @param timeValue how long to wait before failing. a timeout is communicated via the callback's onFailure method. * @param callback the result of the election (success or failure) will be communicated by calling methods on this * object **/ public void waitToBeElectedAsMaster(int requiredMasterJoins, TimeValue timeValue, final ElectionCallback callback) { assert accumulateJoins.get() : "waitToBeElectedAsMaster is called we are not accumulating joins"; final CountDownLatch done = new CountDownLatch(1); final ElectionContext newContext = new ElectionContext(callback, requiredMasterJoins, clusterService) { @Override void onClose() { if (electionContext.compareAndSet(this, null)) { stopAccumulatingJoins("election closed"); } else { assert false : "failed to remove current election context"; } done.countDown(); } }; if (electionContext.compareAndSet(null, newContext) == false) { // should never happen, but be conservative failContext(newContext, new IllegalStateException("double waiting for election")); return; } try { // check what we have so far.. checkPendingJoinsAndElectIfNeeded(); try { if (done.await(timeValue.millis(), TimeUnit.MILLISECONDS)) { // callback handles everything return; } } catch (InterruptedException e) { } if (logger.isTraceEnabled()) { final int pendingNodes; synchronized (pendingJoinRequests) { pendingNodes = pendingJoinRequests.size(); } logger.trace("timed out waiting to be elected. waited [{}]. pending node joins [{}]", timeValue, pendingNodes); } // callback will clear the context, if it's active failContext(newContext, new ElasticsearchTimeoutException("timed out waiting to be elected")); } catch (Throwable t) { logger.error("unexpected failure while waiting for incoming joins", t); failContext(newContext, "unexpected failure while waiting for pending joins", t); } } private void failContext(final ElectionContext context, final Throwable throwable) { failContext(context, throwable.getMessage(), throwable); } /** utility method to fail the given election context under the cluster state thread */ private void failContext(final ElectionContext context, final String reason, final Throwable throwable) { clusterService.submitStateUpdateTask("zen-disco-join(failure [" + reason + "])", new ClusterStateUpdateTask(Priority.IMMEDIATE) { @Override public boolean runOnlyOnMaster() { return false; } @Override public ClusterState execute(ClusterState currentState) throws Exception { context.onFailure(throwable); return currentState; } @Override public void onFailure(String source, Throwable updateFailure) { logger.warn("unexpected error while trying to fail election context due to [{}]. original exception [{}]", updateFailure, reason, throwable); context.onFailure(updateFailure); } }); } /** * Accumulates any future incoming join request. Pending join requests will be processed in the final steps of becoming a * master or when {@link #stopAccumulatingJoins(String)} is called. */ public void startAccumulatingJoins() { logger.trace("starting to accumulate joins"); boolean b = accumulateJoins.getAndSet(true); assert b == false : "double startAccumulatingJoins() calls"; assert electionContext.get() == null : "startAccumulatingJoins() called, but there is an ongoing election context"; } /** Stopped accumulating joins. All pending joins will be processed. Future joins will be processed immediately */ public void stopAccumulatingJoins(String reason) { logger.trace("stopping join accumulation ([{}])", reason); assert electionContext.get() == null : "stopAccumulatingJoins() called, but there is an ongoing election context"; boolean b = accumulateJoins.getAndSet(false); assert b : "stopAccumulatingJoins() called but not accumulating"; synchronized (pendingJoinRequests) { if (pendingJoinRequests.size() > 0) { processJoins("pending joins after accumulation stop [" + reason + "]"); } } } /** * processes or queues an incoming join request. * <p> * Note: doesn't do any validation. This should have been done before. */ public void handleJoinRequest(final DiscoveryNode node, final MembershipAction.JoinCallback callback) { synchronized (pendingJoinRequests) { List<MembershipAction.JoinCallback> nodeCallbacks = pendingJoinRequests.get(node); if (nodeCallbacks == null) { nodeCallbacks = new ArrayList<>(); pendingJoinRequests.put(node, nodeCallbacks); } nodeCallbacks.add(callback); } if (accumulateJoins.get() == false) { processJoins("join from node[" + node + "]"); } else { checkPendingJoinsAndElectIfNeeded(); } } /** * checks if there is an on going request to become master and if it has enough pending joins. If so, the node will * become master via a ClusterState update task. */ private void checkPendingJoinsAndElectIfNeeded() { assert accumulateJoins.get() : "election check requested but we are not accumulating joins"; final ElectionContext context = electionContext.get(); if (context == null) { return; } int pendingMasterJoins = 0; synchronized (pendingJoinRequests) { for (DiscoveryNode node : pendingJoinRequests.keySet()) { if (node.isMasterNode()) { pendingMasterJoins++; } } } if (pendingMasterJoins < context.requiredMasterJoins) { if (context.pendingSetAsMasterTask.get() == false) { logger.trace("not enough joins for election. Got [{}], required [{}]", pendingMasterJoins, context.requiredMasterJoins); } return; } if (context.pendingSetAsMasterTask.getAndSet(true)) { logger.trace("elected as master task already submitted, ignoring..."); return; } final String source = "zen-disco-join(elected_as_master, [" + pendingMasterJoins + "] joins received)"; clusterService.submitStateUpdateTask(source, new ProcessJoinsTask(Priority.IMMEDIATE) { @Override public ClusterState execute(ClusterState currentState) { // Take into account the previous known nodes, if they happen not to be available // then fault detection will remove these nodes. if (currentState.nodes().masterNode() != null) { // TODO can we tie break here? we don't have a remote master cluster state version to decide on logger.trace("join thread elected local node as master, but there is already a master in place: {}", currentState.nodes().masterNode()); throw new NotMasterException("Node [" + clusterService.localNode() + "] not master for join request"); } DiscoveryNodes.Builder builder = new DiscoveryNodes.Builder(currentState.nodes()).masterNodeId(currentState.nodes().localNode().id()); // update the fact that we are the master... ClusterBlocks clusterBlocks = ClusterBlocks.builder().blocks(currentState.blocks()).removeGlobalBlock(discoverySettings.getNoMasterBlock()).build(); currentState = ClusterState.builder(currentState).nodes(builder).blocks(clusterBlocks).build(); // reroute now to remove any dead nodes (master may have stepped down when they left and didn't update the routing table) RoutingAllocation.Result result = routingService.getAllocationService().reroute(currentState, "nodes joined"); if (result.changed()) { currentState = ClusterState.builder(currentState).routingResult(result).build(); } // Add the incoming join requests. // Note: we only do this now (after the reroute) to avoid assigning shards to these nodes. return super.execute(currentState); } @Override public boolean runOnlyOnMaster() { return false; } @Override public void onFailure(String source, Throwable t) { super.onFailure(source, t); context.onFailure(t); } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { super.clusterStateProcessed(source, oldState, newState); context.onElectedAsMaster(newState); } }); } /** process all pending joins */ private void processJoins(String reason) { clusterService.submitStateUpdateTask("zen-disco-join(" + reason + ")", new ProcessJoinsTask(Priority.URGENT)); } public interface ElectionCallback { /** * called when the local node is successfully elected as master * Guaranteed to be called on the cluster state update thread **/ void onElectedAsMaster(ClusterState state); /** * called when the local node failed to be elected as master * Guaranteed to be called on the cluster state update thread **/ void onFailure(Throwable t); } static abstract class ElectionContext implements ElectionCallback { private final ElectionCallback callback; private final int requiredMasterJoins; private final ClusterService clusterService; /** set to true after enough joins have been seen and a cluster update task is submitted to become master */ final AtomicBoolean pendingSetAsMasterTask = new AtomicBoolean(); final AtomicBoolean closed = new AtomicBoolean(); ElectionContext(ElectionCallback callback, int requiredMasterJoins, ClusterService clusterService) { this.callback = callback; this.requiredMasterJoins = requiredMasterJoins; this.clusterService = clusterService; } abstract void onClose(); @Override public void onElectedAsMaster(ClusterState state) { assert pendingSetAsMasterTask.get() : "onElectedAsMaster called but pendingSetAsMasterTask is not set"; assertClusterStateThread(); assert state.nodes().localNodeMaster() : "onElectedAsMaster called but local node is not master"; if (closed.compareAndSet(false, true)) { try { onClose(); } finally { callback.onElectedAsMaster(state); } } } @Override public void onFailure(Throwable t) { assertClusterStateThread(); if (closed.compareAndSet(false, true)) { try { onClose(); } finally { callback.onFailure(t); } } } private void assertClusterStateThread() { assert clusterService instanceof InternalClusterService == false || ((InternalClusterService) clusterService).assertClusterStateThread(); } } /** * Processes any pending joins via a ClusterState update task. * Note: this task automatically fails (and fails all pending joins) if the current node is not marked as master */ class ProcessJoinsTask extends ClusterStateUpdateTask { public ProcessJoinsTask(Priority priority) { super(priority); } private final List<MembershipAction.JoinCallback> joinCallbacksToRespondTo = new ArrayList<>(); private boolean nodeAdded = false; @Override public ClusterState execute(ClusterState currentState) { DiscoveryNodes.Builder nodesBuilder; synchronized (pendingJoinRequests) { if (pendingJoinRequests.isEmpty()) { return currentState; } nodesBuilder = DiscoveryNodes.builder(currentState.nodes()); Iterator<Map.Entry<DiscoveryNode, List<MembershipAction.JoinCallback>>> iterator = pendingJoinRequests.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<DiscoveryNode, List<MembershipAction.JoinCallback>> entry = iterator.next(); final DiscoveryNode node = entry.getKey(); joinCallbacksToRespondTo.addAll(entry.getValue()); iterator.remove(); if (currentState.nodes().nodeExists(node.id())) { logger.debug("received a join request for an existing node [{}]", node); } else { nodeAdded = true; nodesBuilder.put(node); for (DiscoveryNode existingNode : currentState.nodes()) { if (node.address().equals(existingNode.address())) { nodesBuilder.remove(existingNode.id()); logger.warn("received join request from node [{}], but found existing node {} with same address, removing existing node", node, existingNode); } } } } } // we must return a new cluster state instance to force publishing. This is important // for the joining node to finalize it's join and set us as a master final ClusterState.Builder newState = ClusterState.builder(currentState); if (nodeAdded) { newState.nodes(nodesBuilder); } return newState.build(); } @Override public void onNoLongerMaster(String source) { // we are rejected, so drain all pending task (execute never run) synchronized (pendingJoinRequests) { Iterator<Map.Entry<DiscoveryNode, List<MembershipAction.JoinCallback>>> iterator = pendingJoinRequests.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<DiscoveryNode, List<MembershipAction.JoinCallback>> entry = iterator.next(); joinCallbacksToRespondTo.addAll(entry.getValue()); iterator.remove(); } } Exception e = new NotMasterException("Node [" + clusterService.localNode() + "] not master for join request"); innerOnFailure(e); } void innerOnFailure(Throwable t) { for (MembershipAction.JoinCallback callback : joinCallbacksToRespondTo) { try { callback.onFailure(t); } catch (Exception e) { logger.error("error during task failure", e); } } } @Override public void onFailure(String source, Throwable t) { logger.error("unexpected failure during [{}]", t, source); innerOnFailure(t); } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { if (nodeAdded) { // we reroute not in the same cluster state update since in certain areas we rely on // the node to be in the cluster state (sampled from ClusterService#state) to be there, also // shard transitions need to better be handled in such cases routingService.reroute("post_node_add"); } for (MembershipAction.JoinCallback callback : joinCallbacksToRespondTo) { try { callback.onSuccess(); } catch (Exception e) { logger.error("unexpected error during [{}]", e, source); } } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by http://code.google.com/p/protostuff/ ... DO NOT EDIT! // Generated from protobuf package org.apache.drill.exec.proto.beans; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import com.dyuproject.protostuff.GraphIOUtil; import com.dyuproject.protostuff.Input; import com.dyuproject.protostuff.Message; import com.dyuproject.protostuff.Output; import com.dyuproject.protostuff.Schema; public final class RpcFailure implements Externalizable, Message<RpcFailure>, Schema<RpcFailure> { public static Schema<RpcFailure> getSchema() { return DEFAULT_INSTANCE; } public static RpcFailure getDefaultInstance() { return DEFAULT_INSTANCE; } static final RpcFailure DEFAULT_INSTANCE = new RpcFailure(); private long errorId; private int errorCode; private String shortError; private String longError; public RpcFailure() { } // getters and setters // errorId public long getErrorId() { return errorId; } public RpcFailure setErrorId(long errorId) { this.errorId = errorId; return this; } // errorCode public int getErrorCode() { return errorCode; } public RpcFailure setErrorCode(int errorCode) { this.errorCode = errorCode; return this; } // shortError public String getShortError() { return shortError; } public RpcFailure setShortError(String shortError) { this.shortError = shortError; return this; } // longError public String getLongError() { return longError; } public RpcFailure setLongError(String longError) { this.longError = longError; return this; } // java serialization public void readExternal(ObjectInput in) throws IOException { GraphIOUtil.mergeDelimitedFrom(in, this, this); } public void writeExternal(ObjectOutput out) throws IOException { GraphIOUtil.writeDelimitedTo(out, this, this); } // message method public Schema<RpcFailure> cachedSchema() { return DEFAULT_INSTANCE; } // schema methods public RpcFailure newMessage() { return new RpcFailure(); } public Class<RpcFailure> typeClass() { return RpcFailure.class; } public String messageName() { return RpcFailure.class.getSimpleName(); } public String messageFullName() { return RpcFailure.class.getName(); } public boolean isInitialized(RpcFailure message) { return true; } public void mergeFrom(Input input, RpcFailure message) throws IOException { for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this)) { switch(number) { case 0: return; case 1: message.errorId = input.readInt64(); break; case 2: message.errorCode = input.readInt32(); break; case 3: message.shortError = input.readString(); break; case 4: message.longError = input.readString(); break; default: input.handleUnknownField(number, this); } } } public void writeTo(Output output, RpcFailure message) throws IOException { if(message.errorId != 0) output.writeInt64(1, message.errorId, false); if(message.errorCode != 0) output.writeInt32(2, message.errorCode, false); if(message.shortError != null) output.writeString(3, message.shortError, false); if(message.longError != null) output.writeString(4, message.longError, false); } public String getFieldName(int number) { switch(number) { case 1: return "errorId"; case 2: return "errorCode"; case 3: return "shortError"; case 4: return "longError"; default: return null; } } public int getFieldNumber(String name) { final Integer number = __fieldMap.get(name); return number == null ? 0 : number.intValue(); } private static final java.util.HashMap<String,Integer> __fieldMap = new java.util.HashMap<String,Integer>(); static { __fieldMap.put("errorId", 1); __fieldMap.put("errorCode", 2); __fieldMap.put("shortError", 3); __fieldMap.put("longError", 4); } }
/* * Copyright 2017 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.bigtable.beam; import java.io.IOException; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.concurrent.TimeUnit; import java.util.Map.Entry; import org.apache.beam.sdk.options.ValueProvider; import org.apache.beam.sdk.options.ValueProvider.StaticValueProvider; import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.hadoop.conf.Configuration; import com.google.bigtable.repackaged.com.google.cloud.bigtable.config.BigtableOptions; import com.google.bigtable.repackaged.com.google.common.base.Preconditions; import com.google.bigtable.repackaged.com.google.common.collect.ImmutableMap; import com.google.cloud.bigtable.hbase.BigtableOptionsFactory; /** * This class defines configuration that a Cloud Bigtable client needs to connect to a Cloud * Bigtable instance. */ public class CloudBigtableConfiguration implements Serializable { private static final long serialVersionUID = 1655181275627002133L; // TODO(kevinsi): Rename to RUNTIME_VARIABLE_UNAVAILABLE? public static final String VALUE_UNAVAILABLE = "Unavailable during pipeline construction"; /** * Builds a {@link CloudBigtableConfiguration}. */ public static class Builder { protected ValueProvider<String> projectId; protected ValueProvider<String> instanceId; protected Map<String, ValueProvider<String>> additionalConfiguration = new HashMap<>(); public Builder() { } protected void copyFrom(Map<String, ValueProvider<String>> configuration) { this.additionalConfiguration.putAll(configuration); this.projectId = this.additionalConfiguration.remove(BigtableOptionsFactory.PROJECT_ID_KEY); this.instanceId = this.additionalConfiguration.remove(BigtableOptionsFactory.INSTANCE_ID_KEY); } /** * Specifies the project ID for the Cloud Bigtable instance. * @param projectId The project ID for the instance. * @return The {@link CloudBigtableConfiguration.Builder} for chaining convenience. */ public Builder withProjectId(String projectId) { return withProjectId(StaticValueProvider.of(projectId)); } /** * Specifies the project ID for the Cloud Bigtable instance. * @param projectId The project ID for the instance. * @return The {@link CloudBigtableConfiguration.Builder} for chaining convenience. */ Builder withProjectId(ValueProvider<String> projectId) { this.projectId = projectId; return this; } /** * Specifies the Cloud Bigtable instanceId. * @param instanceId The Cloud Bigtable instanceId. * @return The {@link CloudBigtableConfiguration.Builder} for chaining convenience. */ public Builder withInstanceId(String instanceId) { return withInstanceId(StaticValueProvider.of(instanceId)); } /** * Specifies the Cloud Bigtable instanceId. * @param instanceId The Cloud Bigtable instanceId. * @return The {@link CloudBigtableConfiguration.Builder} for chaining convenience. */ Builder withInstanceId(ValueProvider<String> instanceId) { this.instanceId = instanceId; return this; } /** * Specifies the AppProfile to use. */ public Builder withAppProfileId(String appProfileId) { return withAppProfileId(StaticValueProvider.of(appProfileId)); } /** * Specifies the AppProfile to use. */ Builder withAppProfileId(ValueProvider<String> appProfileId) { return withConfiguration(BigtableOptionsFactory.APP_PROFILE_ID_KEY, appProfileId); } /** * Adds additional connection configuration. * {@link BigtableOptionsFactory#fromConfiguration(Configuration)} for more information about * configuration options. * @return The {@link CloudBigtableConfiguration.Builder} for chaining convenience. */ public Builder withConfiguration(String key, String value) { return withConfiguration(key, StaticValueProvider.of(value)); } /** * Adds additional connection configuration. * {@link BigtableOptionsFactory#fromConfiguration(Configuration)} for more information about * configuration options. * @return The {@link CloudBigtableConfiguration.Builder} for chaining convenience. */ Builder withConfiguration(String key, ValueProvider<String> value) { Preconditions.checkArgument(value != null, "Value cannot be null"); this.additionalConfiguration.put(key, value); return this; } /** * Builds the {@link CloudBigtableConfiguration}. * * @return The new {@link CloudBigtableConfiguration}. */ public CloudBigtableConfiguration build() { // Keeping the legacy constructor for backwards compatibility. // Choose the new one if instance is specified. return new CloudBigtableConfiguration( projectId, instanceId, additionalConfiguration); } } // Not final due to serialization of CloudBigtableScanConfiguration. private Map<String, ValueProvider<String>> configuration; // Used for serialization of CloudBigtableScanConfiguration. CloudBigtableConfiguration() { } /** * Creates a {@link CloudBigtableConfiguration} using the specified project ID and instance ID. * * @param projectId The project ID for the instance. * @param instanceId The instance ID. * @param additionalConfiguration A {@link Map} with additional connection configuration. See * {@link BigtableOptionsFactory#fromConfiguration(Configuration)} for more information * about configuration options. */ protected CloudBigtableConfiguration( ValueProvider<String> projectId, ValueProvider<String> instanceId, Map<String, ValueProvider<String>> additionalConfiguration) { this.configuration = new HashMap<>(additionalConfiguration); setValue(BigtableOptionsFactory.PROJECT_ID_KEY, projectId, "Project ID"); setValue(BigtableOptionsFactory.INSTANCE_ID_KEY, instanceId, "Instance ID"); } private void setValue(String key, ValueProvider<String> value, String type) { Preconditions.checkArgument(!configuration.containsKey(key), "%s was set twice", key); Preconditions.checkArgument(value != null, "%s must be set.", type); configuration.put(key, value); } /** * Gets the project ID for the Cloud Bigtable instance. * @return The project ID for the instance. */ public String getProjectId() { return configuration.get(BigtableOptionsFactory.PROJECT_ID_KEY).get(); } /** * Gets the Cloud Bigtable instance id. * @return The Cloud Bigtable instance id. */ public String getInstanceId() { return configuration.get(BigtableOptionsFactory.INSTANCE_ID_KEY).get(); } /** * Get the Cloud Bigtable App Profile id. */ public String getAppProfileId() { return configuration.get(BigtableOptionsFactory.APP_PROFILE_ID_KEY).get(); } /** * Converts the {@link CloudBigtableConfiguration} to a {@link BigtableOptions} object. * @return The {@link BigtableOptions} object. */ public BigtableOptions toBigtableOptions() throws IOException { return BigtableOptionsFactory.fromConfiguration(toHBaseConfig()); } /** * Converts the {@link CloudBigtableConfiguration} to an HBase {@link Configuration}. * @return The {@link Configuration}. */ public Configuration toHBaseConfig() { Configuration config = new Configuration(false); config.set(BigtableOptionsFactory.BIGTABLE_USE_CACHED_DATA_CHANNEL_POOL, "true"); // Beam should use a different endpoint for data operations than online traffic. config.set(BigtableOptionsFactory.BIGTABLE_HOST_KEY, BigtableOptions.BIGTABLE_BATCH_DATA_HOST_DEFAULT); config.set(BigtableOptionsFactory.INITIAL_ELAPSED_BACKOFF_MILLIS_KEY, String.valueOf(TimeUnit.SECONDS.toMillis(5))); config.set(BigtableOptionsFactory.MAX_ELAPSED_BACKOFF_MILLIS_KEY, String.valueOf(TimeUnit.MINUTES.toMillis(5))); // This setting can potentially decrease performance for large scale writes. However, this // setting prevents problems that occur when streaming Sources, such as PubSub, are used. // To override this behavior, call: // Builder.withConfiguration(BigtableOptionsFactory.BIGTABLE_ASYNC_MUTATOR_COUNT_KEY, // BigtableOptions.BIGTABLE_ASYNC_MUTATOR_COUNT_DEFAULT); config.set(BigtableOptionsFactory.BIGTABLE_ASYNC_MUTATOR_COUNT_KEY, "0"); for (Entry<String, ValueProvider<String>> entry : configuration.entrySet()) { // If the value from ValueProvider is null, the value was not provided at runtime. if (entry.getValue().get() != null) { config.set(entry.getKey(), entry.getValue().get()); } } setUserAgent(config); return config; } private void setUserAgent(Configuration config) { String beamUserAgent = "HBaseBeam"; if (configuration.containsKey(BigtableOptionsFactory.CUSTOM_USER_AGENT_KEY)) { beamUserAgent += "," + configuration.get(BigtableOptionsFactory.CUSTOM_USER_AGENT_KEY); } config.set(BigtableOptionsFactory.CUSTOM_USER_AGENT_KEY, beamUserAgent); } /** * Creates a new {@link Builder} object containing the existing configuration. * @return A new {@link Builder}. */ public Builder toBuilder() { Builder builder = new Builder(); copyConfig(builder); return builder; } /** * Gets an immutable copy of the configuration map. */ protected ImmutableMap<String, ValueProvider<String>> getConfiguration() { return ImmutableMap.copyOf(configuration); } /** * Compares this configuration with the specified object. * * @param obj The object to compare this configuration against. * @return {@code true} if the given object has the same configuration, {@code false} otherwise. */ @Override public boolean equals(Object obj) { if (obj == null || obj.getClass() != this.getClass()) { return false; } CloudBigtableConfiguration other = (CloudBigtableConfiguration) obj; if (!Objects.equals(configuration.keySet(), other.configuration.keySet())) { return false; } for (String key : configuration.keySet()) { if (!Objects.equals(configuration.get(key).get(), other.configuration.get(key).get())) { return false; } } return true; } public void copyConfig(Builder builder) { builder.copyFrom(configuration); } /** * Checks if the parameters are accessible. Runtime parameters are not accessible at pipeline * construction time. */ protected boolean areParametersAccessible() { return configuration.get(BigtableOptionsFactory.PROJECT_ID_KEY).isAccessible(); } /** * @param <T> parameter The runtime parameter. * @return the String value of runtime parameter if the parameter is accessible, returns * "Unavailable during pipeline construction" otherwise for debugging purpose. */ protected static <T> String getDisplayValue(ValueProvider<T> parameter) { if (parameter.isAccessible()) { return String.valueOf(parameter.get()); } return VALUE_UNAVAILABLE; } public void populateDisplayData(DisplayData.Builder builder) { builder.add( DisplayData.item( "projectId", getDisplayValue(configuration.get(BigtableOptionsFactory.PROJECT_ID_KEY))) .withLabel("Project ID")); builder.add( DisplayData.item( "instanceId", getDisplayValue(configuration.get(BigtableOptionsFactory.INSTANCE_ID_KEY))) .withLabel("Instance ID")); Map<String, ValueProvider<String>> hashMap = new HashMap<String, ValueProvider<String>>(configuration); hashMap.remove(BigtableOptionsFactory.PROJECT_ID_KEY); hashMap.remove(BigtableOptionsFactory.INSTANCE_ID_KEY); for (Entry<String, ValueProvider<String>> entry : configuration.entrySet()) { builder.add( DisplayData.item(entry.getKey(), getDisplayValue(entry.getValue())) .withLabel(entry.getKey())); } } protected static void checkNotNullOrEmpty(String value, String name) { Preconditions.checkArgument( value != null && !value.isEmpty(), "A " + name + " must be set to configure Bigtable properly."); } public void validate() { if (areParametersAccessible()) { checkNotNullOrEmpty(getProjectId(), "projectId"); checkNotNullOrEmpty(getInstanceId(), "instanceId"); } } }
// Copyright (c) 2017, Baidu.com, Inc. All Rights Reserved // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.baidu.palo.backup; import com.baidu.palo.alter.RollupHandler; import com.baidu.palo.analysis.AddPartitionClause; import com.baidu.palo.analysis.AddRollupClause; import com.baidu.palo.analysis.AlterClause; import com.baidu.palo.analysis.AlterTableStmt; import com.baidu.palo.analysis.CreateTableStmt; import com.baidu.palo.analysis.LabelName; import com.baidu.palo.catalog.Catalog; import com.baidu.palo.catalog.Database; import com.baidu.palo.catalog.MaterializedIndex; import com.baidu.palo.catalog.OlapTable; import com.baidu.palo.catalog.OlapTable.OlapTableState; import com.baidu.palo.catalog.Partition; import com.baidu.palo.catalog.PartitionInfo; import com.baidu.palo.catalog.PartitionType; import com.baidu.palo.catalog.RangePartitionInfo; import com.baidu.palo.catalog.Replica; import com.baidu.palo.catalog.Table; import com.baidu.palo.catalog.Tablet; import com.baidu.palo.catalog.TabletInvertedIndex; import com.baidu.palo.catalog.TabletMeta; import com.baidu.palo.catalog.Table.TableType; import com.baidu.palo.common.AnalysisException; import com.baidu.palo.common.DdlException; import com.baidu.palo.common.Pair; import com.baidu.palo.common.io.Text; import com.baidu.palo.common.util.TimeUtils; import com.baidu.palo.common.util.Util; import com.baidu.palo.task.AgentBatchTask; import com.baidu.palo.task.AgentTask; import com.baidu.palo.task.AgentTaskExecutor; import com.baidu.palo.task.AgentTaskQueue; import com.baidu.palo.task.RestoreTask; import com.baidu.palo.thrift.TTaskType; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.HashBasedTable; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.io.DataInput; import java.io.DataOutput; import java.io.File; import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; public class RestoreJob extends AbstractBackupJob { private static final Logger LOG = LogManager.getLogger(RestoreJob.class); public enum RestoreJobState { PENDING, RESTORE_META, DOWNLOAD, DOWNLOADING, FINISHED, CANCELLED } private RestoreJobState state; private Map<String, Set<String>> tableToPartitionNames; private Map<String, String> tableRenameMap; private Map<String, CreateTableStmt> tableToCreateTableStmt; private Map<String, AlterTableStmt> tableToRollupStmt; private com.google.common.collect.Table<String, String, AlterTableStmt> tableToPartitionStmts; private Map<String, Boolean> tableToReplace; private Map<String, Table> restoredTables; // tableid - partition name - partition private com.google.common.collect.Table<Long, String, Partition> restoredPartitions; private long metaRestoredTime; private long downloadFinishedTime; public RestoreJob() { super(); } public RestoreJob(long jobId, long dbId, LabelName labelName, String restorePath, Map<String, String> remoteProperties, Map<String, Set<String>> tableToPartitionNames, Map<String, String> tableRenameMap) { super(jobId, dbId, labelName, restorePath, remoteProperties); state = RestoreJobState.PENDING; this.tableToPartitionNames = tableToPartitionNames; this.tableRenameMap = tableRenameMap; this.tableToCreateTableStmt = Maps.newHashMap(); this.tableToRollupStmt = Maps.newHashMap(); this.tableToPartitionStmts = HashBasedTable.create(); this.tableToReplace = Maps.newHashMap(); this.restoredTables = Maps.newHashMap(); this.restoredPartitions = HashBasedTable.create(); this.metaRestoredTime = -1L; this.downloadFinishedTime = -1L; } public void setState(RestoreJobState state) { this.state = state; } public RestoreJobState getState() { return state; } public long getMetaRestoredTime() { return metaRestoredTime; } public long getDownloadFinishedTime() { return downloadFinishedTime; } public Map<String, Set<String>> getTableToPartitionNames() { return tableToPartitionNames; } @Override public List<Comparable> getJobInfo() { List<Comparable> jobInfo = Lists.newArrayList(); jobInfo.add(jobId); jobInfo.add(getLabel()); jobInfo.add(state.name()); jobInfo.add(TimeUtils.longToTimeString(createTime)); jobInfo.add(TimeUtils.longToTimeString(metaRestoredTime)); jobInfo.add(TimeUtils.longToTimeString(downloadFinishedTime)); jobInfo.add(TimeUtils.longToTimeString(finishedTime)); jobInfo.add(errMsg); jobInfo.add(remotePath); jobInfo.add(getLeftTasksNum()); return jobInfo; } @Override public void runOnce() { LOG.debug("begin to run restore job: {}, state: {}", jobId, state.name()); try { switch (state) { case PENDING: downloadBackupMeta(); break; case RESTORE_META: restoreMeta(); break; case DOWNLOAD: download(); break; case DOWNLOADING: waitDownload(); break; default: break; } } catch (Exception e) { errMsg = Strings.nullToEmpty(e.getMessage()); LOG.warn("failed to restore: [" + errMsg + "], job[" + jobId + "]", e); state = RestoreJobState.CANCELLED; } if (state == RestoreJobState.FINISHED || state == RestoreJobState.CANCELLED) { end(Catalog.getInstance(), false); } } private void downloadBackupMeta() throws DdlException, IOException, AnalysisException, InterruptedException, ExecutionException { Catalog catalog = Catalog.getInstance(); Database db = catalog.getDb(dbId); if (db == null) { throw new DdlException("Database[" + getDbName() + "] does not exist"); } if (pathBuilder == null) { pathBuilder = PathBuilder.createPathBuilder(getLocalDirName()); } if (commandBuilder == null) { String remotePropFilePath = pathBuilder.remoteProperties(); commandBuilder = CommandBuilder.create(remotePropFilePath, remoteProperties); } if (future == null) { // 1. download manifest LOG.info("begin to submit download backup meta. job: {}", jobId); MetaDownloadTask task = new MetaDownloadTask(jobId, getDbName(), getLabel(), getLocalDirName(), remotePath, pathBuilder, commandBuilder, tableToPartitionNames, tableToCreateTableStmt, tableToRollupStmt, tableToPartitionStmts, tableToReplace, tableRenameMap); future = Catalog.getInstance().getBackupHandler().getAsynchronousCmdExecutor().submit(task); } else { boolean finished = checkFuture("download backup meta"); if (!finished) { return; } future = null; state = RestoreJobState.RESTORE_META; } } private void restoreMeta() throws DdlException { Catalog catalog = Catalog.getInstance(); Database db = catalog.getDb(dbId); if (db == null) { throw new DdlException("Database[" + getDbName() + "] does not exist"); } for (Map.Entry<String, CreateTableStmt> entry : tableToCreateTableStmt.entrySet()) { String newTableName = entry.getKey(); CreateTableStmt createTableStmt = entry.getValue(); Boolean replace = tableToReplace.get(newTableName); if (replace) { // 1. create table Table restoredTable = catalog.createTable(createTableStmt, true); restoredTables.put(newTableName, restoredTable); if (restoredTable.getType() != TableType.OLAP) { continue; } OlapTable restoredOlapTable = (OlapTable) restoredTable; // 2. create rollup RollupHandler rollupHandler = catalog.getRollupHandler(); AlterTableStmt rollupStmt = tableToRollupStmt.get(newTableName); if (rollupStmt != null) { // check if new table name conflicts with rollup index name for (AlterClause clause : rollupStmt.getOps()) { Preconditions.checkState(clause instanceof AddRollupClause); String rollupName = ((AddRollupClause) clause).getRollupName(); if (rollupName.equals(newTableName)) { throw new DdlException("New table name[" + newTableName + "] conflicts with rollup index name"); } } rollupHandler.process(rollupStmt.getOps(), db, restoredOlapTable, true); } // 3. create partition Map<String, AlterTableStmt> partitionStmts = tableToPartitionStmts.row(newTableName); if (partitionStmts.isEmpty()) { continue; } RangePartitionInfo rangePartitionInfo = (RangePartitionInfo) restoredOlapTable.getPartitionInfo(); for (Map.Entry<String, AlterTableStmt> entry2 : partitionStmts.entrySet()) { AlterTableStmt stmt = entry2.getValue(); AddPartitionClause clause = (AddPartitionClause) stmt.getOps().get(0); Pair<Long, Partition> res = catalog.addPartition(db, newTableName, restoredOlapTable, clause, true); Partition partition = res.second; rangePartitionInfo.handleNewSinglePartitionDesc(clause.getSingeRangePartitionDesc(), partition.getId()); restoredOlapTable.addPartition(partition); } } else { Map<String, AlterTableStmt> partitionStmts = tableToPartitionStmts.row(newTableName); for (Map.Entry<String, AlterTableStmt> entry2 : partitionStmts.entrySet()) { AlterTableStmt stmt = entry2.getValue(); Pair<Long, Partition> res = catalog.addPartition(db, newTableName, null, (AddPartitionClause) stmt.getOps().get(0), true); long tableId = res.first; Partition partition = res.second; restoredPartitions.put(tableId, partition.getName(), partition); } } } metaRestoredTime = System.currentTimeMillis(); state = RestoreJobState.DOWNLOAD; LOG.info("finished restore tables. job[{}]", jobId); } private void download() { for (Map.Entry<String, Table> entry : restoredTables.entrySet()) { String newTableName = entry.getKey(); String tableName = tableRenameMap.get(newTableName); Table table = entry.getValue(); if (table.getType() != TableType.OLAP) { continue; } AgentBatchTask batchTask = new AgentBatchTask(); OlapTable olapTable = (OlapTable) table; long tableId = olapTable.getId(); for (Partition partition : olapTable.getPartitions()) { String partitionName = partition.getName(); if (olapTable.getPartitionInfo().getType() == PartitionType.UNPARTITIONED) { // single partition table partitionName = tableName; } long partitionId = partition.getId(); for (MaterializedIndex index : partition.getMaterializedIndices()) { long indexId = index.getId(); String indexName = olapTable.getIndexNameById(index.getId()); if (indexName.equals(newTableName)) { // base index indexName = tableName; } List<Long> orderedBackupedTabletIdList = getRestoredTabletInfo(tableName, partitionName, indexName); int schemaHash = olapTable.getSchemaHashByIndexId(index.getId()); List<Tablet> tablets = index.getTablets(); for (int i = 0; i < tablets.size(); i++) { Tablet tablet = tablets.get(i); Long backupedTabletId = orderedBackupedTabletIdList.get(i); String remoteFilePath = PathBuilder.createPath(remotePath, getDbName(), tableName, partitionName, indexName, backupedTabletId.toString()); for (Replica replica : tablet.getReplicas()) { RestoreTask task = new RestoreTask(null, replica.getBackendId(), jobId, dbId, tableId, partitionId, indexId, tablet.getId(), schemaHash, remoteFilePath, remoteProperties); batchTask.addTask(task); } } // end for tablets } // end for indices } // end for partitions synchronized (unfinishedTabletIds) { for (AgentTask task : batchTask.getAllTasks()) { AgentTaskQueue.addTask(task); unfinishedTabletIds.put(task.getTabletId(), task.getBackendId()); } } AgentTaskExecutor.submit(batchTask); LOG.info("finished send restore tasks for table: {}, job: {}", tableName, jobId); } // end for tables state = RestoreJobState.DOWNLOADING; LOG.info("finished send all restore tasks. job: {}", jobId); } private List<Long> getRestoredTabletInfo(String tableName, String partitionName, String indexName) { // pathBuilder.getRoot().print("\t"); DirSaver indexDir = (DirSaver) pathBuilder.getRoot().getChild(getDbName()).getChild(tableName) .getChild(partitionName).getChild(indexName); Collection<String> tabletNames = indexDir.getChildrenName(); Set<Long> orderedBackupedTabletIds = Sets.newTreeSet(); for (String tabletName : tabletNames) { orderedBackupedTabletIds.add(Long.valueOf(tabletName)); } List<Long> orderedBackupedTabletIdList = Lists.newArrayList(orderedBackupedTabletIds); return orderedBackupedTabletIdList; } private void waitDownload() throws DdlException { synchronized (unfinishedTabletIds) { if (!unfinishedTabletIds.isEmpty()) { LOG.debug("waiting for unfinished download task. size: {}", unfinishedTabletIds.size()); return; } } downloadFinishedTime = System.currentTimeMillis(); LOG.info("all tablets restore finished. job: {}", jobId); finishing(Catalog.getInstance(), false); state = RestoreJobState.FINISHED; } public void finishing(Catalog catalog, boolean isReplay) throws DdlException { Database db = catalog.getDb(dbId); if (db == null && !isReplay) { throw new DdlException("Database[{}] does not exist"); } db.writeLock(); try { // check again if table or partition already exist for (Map.Entry<String, Table> entry : restoredTables.entrySet()) { String tableName = entry.getKey(); Table currentTable = db.getTable(tableName); if (currentTable != null) { throw new DdlException("Table[" + tableName + "]' already exist. " + "Drop table first or restore to another table"); } } for (long tableId : restoredPartitions.rowKeySet()) { Table table = db.getTable(tableId); if (table == null || table.getType() != TableType.OLAP) { throw new DdlException("Table[" + tableId + "]' does not exist."); } Map<String, Partition> partitions = restoredPartitions.row(tableId); OlapTable olapTable = (OlapTable) table; for (Map.Entry<String, Partition> entry : partitions.entrySet()) { String partitionName = entry.getKey(); Partition currentPartition = olapTable.getPartition(partitionName); if (currentPartition != null) { throw new DdlException("Partition[" + partitionName + "]' already exist in table[" + tableId + "]. Drop partition first or restore to another table"); } } } // add tables for (Map.Entry<String, Table> entry : restoredTables.entrySet()) { String tableName = entry.getKey(); Table restoredTable = entry.getValue(); if (restoredTable.getType() == TableType.OLAP) { OlapTable olapTable = (OlapTable) restoredTable; olapTable.setState(OlapTableState.NORMAL); if (isReplay) { // add inverted index TabletInvertedIndex invertedIndex = Catalog.getCurrentInvertedIndex(); long tableId = olapTable.getId(); for (Partition partition : olapTable.getPartitions()) { long partitionId = partition.getId(); for (MaterializedIndex index : partition.getMaterializedIndices()) { long indexId = index.getId(); int schemaHash = olapTable.getSchemaHashByIndexId(indexId); for (Tablet tablet : index.getTablets()) { long tabletId = tablet.getId(); TabletMeta tabletMeta = new TabletMeta(dbId, tableId, partitionId, indexId, schemaHash); invertedIndex.addTablet(tabletId, tabletMeta); for (Replica replica : tablet.getReplicas()) { invertedIndex.addReplica(tabletId, replica); } } } } } } db.createTable(restoredTable); LOG.info("finished add table: {}, job: {}, replay: {}", tableName, jobId, isReplay); } // add partitions for (long tableId : restoredPartitions.rowKeySet()) { Table table = db.getTable(tableId); String tableName = table.getName(); Preconditions.checkState(table != null, tableName); Preconditions.checkState(table.getType() == TableType.OLAP, tableName); OlapTable olapTable = (OlapTable) table; PartitionInfo partitionInfo = olapTable.getPartitionInfo(); Preconditions.checkState(partitionInfo.getType() == PartitionType.RANGE); RangePartitionInfo rangePartitionInfo = (RangePartitionInfo) partitionInfo; Map<String, Partition> partitions = restoredPartitions.row(tableId); for (Map.Entry<String, Partition> entry : partitions.entrySet()) { String partitionName = entry.getKey(); Partition partition = entry.getValue(); long partitionId = partition.getId(); // add restored partition AlterTableStmt stmt = tableToPartitionStmts.get(tableName, partitionName); AddPartitionClause clause = (AddPartitionClause) stmt.getOps().get(0); rangePartitionInfo.handleNewSinglePartitionDesc(clause.getSingeRangePartitionDesc(), partitionId); olapTable.addPartition(partition); // add inverted index if (isReplay) { TabletInvertedIndex invertedIndex = Catalog.getCurrentInvertedIndex(); for (MaterializedIndex index : partition.getMaterializedIndices()) { long indexId = index.getId(); int schemaHash = olapTable.getSchemaHashByIndexId(indexId); for (Tablet tablet : index.getTablets()) { long tabletId = tablet.getId(); for (Replica replica : tablet.getReplicas()) { invertedIndex.addReplica(tabletId, replica); } TabletMeta tabletMeta = new TabletMeta(dbId, tableId, partitionId, indexId, schemaHash); invertedIndex.addTablet(tabletId, tabletMeta); } } } LOG.info("finished add partition: {}, table: {}, job: {}, replay: {}", partitionName, tableName, jobId, isReplay); } // end for partitions olapTable.setState(OlapTableState.NORMAL); } // end for tables } finally { db.writeUnlock(); } } public void handleFinishedRestore(long tabletId, long backendId) { synchronized (unfinishedTabletIds) { if (unfinishedTabletIds.remove(tabletId, backendId)) { LOG.debug("finished restore tablet[{}], backend[{}]", tabletId, backendId); } } } @Override public void end(Catalog catalog, boolean isReplay) { if (state == RestoreJobState.CANCELLED) { rollback(catalog); } // 2. set table state // restoreTableState(catalog); if (!isReplay) { // 3. remove agent tasks if left removeLeftTasks(); // 4. remove local file String labelDir = pathBuilder.getRoot().getFullPath(); Util.deleteDirectory(new File(labelDir)); LOG.debug("delete local dir: {}", labelDir); // 5. remove unused tablet in tablet inverted index clearInvertedIndex(); finishedTime = System.currentTimeMillis(); // log Catalog.getInstance().getEditLog().logRestoreFinish(this); } // clear for saving memory clearJob(); LOG.info("finished end job[{}]. state: {}, replay: {}", jobId, state.name(), isReplay); } private void clearInvertedIndex() { TabletInvertedIndex invertedIndex = Catalog.getCurrentInvertedIndex(); if (state == RestoreJobState.CANCELLED) { // clear restored table tablets for (Table restoredTable : restoredTables.values()) { if (restoredTable.getType() != TableType.OLAP) { continue; } OlapTable olapTable = (OlapTable) restoredTable; for (Partition partition : olapTable.getPartitions()) { for (MaterializedIndex index : partition.getMaterializedIndices()) { for (Tablet tablet : index.getTablets()) { invertedIndex.deleteTablet(tablet.getId()); } } } } // partition for (Partition partition : restoredPartitions.values()) { for (MaterializedIndex index : partition.getMaterializedIndices()) { for (Tablet tablet : index.getTablets()) { invertedIndex.deleteTablet(tablet.getId()); } } } } } @Override protected void clearJob() { tableRenameMap = null; tableToCreateTableStmt = null; tableToRollupStmt = null; tableToPartitionStmts = null; tableToReplace = null; restoredTables = null; restoredPartitions = null; unfinishedTabletIds = null; remoteProperties = null; pathBuilder = null; commandBuilder = null; LOG.info("job[{}] cleared for saving memory", jobId); } private void rollback(Catalog catalog) { Database db = catalog.getDb(dbId); if (db == null) { errMsg = "Database does not exist[" + getDbName() + "]"; LOG.info("{}. finished restore old meta. job: {}", errMsg, jobId); return; } db.writeLock(); try { // tables for (Table restoredTable : restoredTables.values()) { String tableName = restoredTable.getName(); // use table id rather than table name. // because table with same name may be created when doing restore. // find table by name may get unexpected one. Table currentTable = db.getTable(restoredTable.getId()); // drop restored table if (currentTable != null) { db.dropTable(tableName); LOG.info("drop restored table[{}] in db[{}]", tableName, dbId); } } // partitions for (long tableId : restoredPartitions.rowKeySet()) { OlapTable currentTable = (OlapTable) db.getTable(tableId); if (currentTable == null) { // table may be dropped during FINISHING phase continue; } // drop restored partitions for (String partitionName : restoredPartitions.row(tableId).keySet()) { Partition currentPartition = currentTable.getPartition(partitionName); if (currentPartition != null) { currentTable.dropPartition(dbId, partitionName, true); LOG.info("drop restored partition[{}] in table[{}] in db[{}]", partitionName, tableId, dbId); } currentTable.setState(OlapTableState.NORMAL); } } } finally { db.writeUnlock(); } } private void removeLeftTasks() { for (Map.Entry<Long, Long> entry : unfinishedTabletIds.entries()) { AgentTaskQueue.removeTask(entry.getValue(), TTaskType.RESTORE, entry.getKey()); } } @Override public void write(DataOutput out) throws IOException { super.write(out); Text.writeString(out, state.name()); if (tableToPartitionNames == null) { out.writeBoolean(false); } else { out.writeBoolean(true); int size = tableToPartitionNames.size(); out.writeInt(size); for (Map.Entry<String, Set<String>> entry : tableToPartitionNames.entrySet()) { Text.writeString(out, entry.getKey()); Set<String> partitionNames = entry.getValue(); size = partitionNames.size(); out.writeInt(size); for (String partitionName : partitionNames) { Text.writeString(out, partitionName); } } } if (tableRenameMap == null) { out.writeBoolean(false); } else { out.writeBoolean(true); int size = tableRenameMap.size(); out.writeInt(size); for (Map.Entry<String, String> entry : tableRenameMap.entrySet()) { Text.writeString(out, entry.getKey()); Text.writeString(out, entry.getValue()); } } if (tableToCreateTableStmt == null) { out.writeBoolean(false); } else { out.writeBoolean(true); int size = tableToCreateTableStmt.size(); out.writeInt(size); for (Map.Entry<String, CreateTableStmt> entry : tableToCreateTableStmt.entrySet()) { Text.writeString(out, entry.getKey()); entry.getValue().write(out); } } if (tableToRollupStmt == null) { out.writeBoolean(false); } else { out.writeBoolean(true); int size = tableToRollupStmt.size(); out.writeInt(size); for (Map.Entry<String, AlterTableStmt> entry : tableToRollupStmt.entrySet()) { Text.writeString(out, entry.getKey()); entry.getValue().write(out); } } if (tableToPartitionStmts == null) { out.writeBoolean(false); } else { out.writeBoolean(true); int size = tableToPartitionStmts.rowKeySet().size(); out.writeInt(size); for (String tableName : tableToPartitionStmts.rowKeySet()) { Text.writeString(out, tableName); Map<String, AlterTableStmt> row = tableToPartitionStmts.row(tableName); size = row.size(); out.writeInt(size); for (Map.Entry<String, AlterTableStmt> entry : row.entrySet()) { Text.writeString(out, entry.getKey()); entry.getValue().write(out); } } } if (tableToReplace == null) { out.writeBoolean(false); } else { out.writeBoolean(true); int size = tableToReplace.size(); out.writeInt(size); for (Map.Entry<String, Boolean> entry : tableToReplace.entrySet()) { Text.writeString(out, entry.getKey()); out.writeBoolean(entry.getValue()); } } if (restoredTables == null) { out.writeBoolean(false); } else { out.writeBoolean(true); int size = restoredTables.size(); out.writeInt(size); for (Map.Entry<String, Table> entry : restoredTables.entrySet()) { Text.writeString(out, entry.getKey()); entry.getValue().write(out); } } if (restoredPartitions == null) { out.writeBoolean(false); } else { out.writeBoolean(true); int size = restoredPartitions.size(); out.writeInt(size); for (long tableId : restoredPartitions.rowKeySet()) { out.writeLong(tableId); Map<String, Partition> row = restoredPartitions.row(tableId); size = row.size(); out.writeInt(size); for (Map.Entry<String, Partition> entry : row.entrySet()) { Text.writeString(out, entry.getKey()); entry.getValue().write(out); } } } out.writeLong(metaRestoredTime); out.writeLong(downloadFinishedTime); } @Override public void readFields(DataInput in) throws IOException { super.readFields(in); state = RestoreJobState.valueOf(Text.readString(in)); if (in.readBoolean()) { tableToPartitionNames = Maps.newHashMap(); int size = in.readInt(); for (int i = 0; i < size; i++) { String tableName = Text.readString(in); int count = in.readInt(); Set<String> partitionNames = Sets.newHashSet(); for (int j = 0; j < count; j++) { String partitionName = Text.readString(in); partitionNames.add(partitionName); } tableToPartitionNames.put(tableName, partitionNames); } } if (in.readBoolean()) { tableRenameMap = Maps.newHashMap(); int size = in.readInt(); for (int i = 0; i < size; i++) { String newTableName = Text.readString(in); String tableName = Text.readString(in); tableRenameMap.put(newTableName, tableName); } } if (in.readBoolean()) { tableToCreateTableStmt = Maps.newHashMap(); int size = in.readInt(); for (int i = 0; i < size; i++) { String tableName = Text.readString(in); CreateTableStmt stmt = CreateTableStmt.read(in); tableToCreateTableStmt.put(tableName, stmt); } } if (in.readBoolean()) { tableToRollupStmt = Maps.newHashMap(); int size = in.readInt(); for (int i = 0; i < size; i++) { String tableName = Text.readString(in); AlterTableStmt stmt = new AlterTableStmt(); stmt.readFields(in); tableToRollupStmt.put(tableName, stmt); } } if (in.readBoolean()) { tableToPartitionStmts = HashBasedTable.create(); int size = in.readInt(); for (int i = 0; i < size; i++) { String tableName = Text.readString(in); int count = in.readInt(); for (int j = 0; j < count; j++) { String partitionName = Text.readString(in); AlterTableStmt stmt = new AlterTableStmt(); stmt.readFields(in); tableToPartitionStmts.put(tableName, partitionName, stmt); } } } if (in.readBoolean()) { tableToReplace = Maps.newHashMap(); int size = in.readInt(); for (int i = 0; i < size; i++) { String tableName = Text.readString(in); Boolean replace = in.readBoolean(); tableToReplace.put(tableName, replace); } } if (in.readBoolean()) { restoredTables = Maps.newHashMap(); int size = in.readInt(); for (int i = 0; i < size; i++) { String tableName = Text.readString(in); Table table = Table.read(in); restoredTables.put(tableName, table); } } if (in.readBoolean()) { restoredPartitions = HashBasedTable.create(); int size = in.readInt(); for (int i = 0; i < size; i++) { long tableId = in.readLong(); int count = in.readInt(); for (int j = 0; j < count; j++) { String partitionName = Text.readString(in); Partition partition = Partition.read(in); restoredPartitions.put(tableId, partitionName, partition); } } } metaRestoredTime = in.readLong(); downloadFinishedTime = in.readLong(); } }
/** * $RCSfile$ * $Revision: 10875 $ * $Date: 2008-11-13 12:30:49 -0600 (Thu, 13 Nov 2008) $ * * Copyright 2003-2007 Jive Software. * * All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.smackx; import org.jivesoftware.smack.packet.Packet; import org.jivesoftware.smack.packet.PacketExtension; import org.jivesoftware.smackx.packet.DataForm; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.StringTokenizer; /** * Represents a Form for gathering data. The form could be of the following types: * <ul> * <li>form -> Indicates a form to fill out.</li> * <li>submit -> The form is filled out, and this is the data that is being returned from * the form.</li> * <li>cancel -> The form was cancelled. Tell the asker that piece of information.</li> * <li>result -> Data results being returned from a search, or some other query.</li> * </ul> * * Depending of the form's type different operations are available. For example, it's only possible * to set answers if the form is of type "submit". * * @author Gaston Dombiak */ public class Form { public static final String TYPE_FORM = "form"; public static final String TYPE_SUBMIT = "submit"; public static final String TYPE_CANCEL = "cancel"; public static final String TYPE_RESULT = "result"; private DataForm dataForm; /** * Returns a new ReportedData if the packet is used for gathering data and includes an * extension that matches the elementName and namespace "x","jabber:x:data". * * @param packet the packet used for gathering data. * @return the data form parsed from the packet or <tt>null</tt> if there was not * a form in the packet. */ public static Form getFormFrom(Packet packet) { // Check if the packet includes the DataForm extension PacketExtension packetExtension = packet.getExtension("x","jabber:x:data"); if (packetExtension != null) { // Check if the existing DataForm is not a result of a search DataForm dataForm = (DataForm) packetExtension; if (dataForm.getReportedData() == null) return new Form(dataForm); } // Otherwise return null return null; } /** * Creates a new Form that will wrap an existing DataForm. The wrapped DataForm must be * used for gathering data. * * @param dataForm the data form used for gathering data. */ public Form(DataForm dataForm) { this.dataForm = dataForm; } /** * Creates a new Form of a given type from scratch.<p> * * Possible form types are: * <ul> * <li>form -> Indicates a form to fill out.</li> * <li>submit -> The form is filled out, and this is the data that is being returned from * the form.</li> * <li>cancel -> The form was cancelled. Tell the asker that piece of information.</li> * <li>result -> Data results being returned from a search, or some other query.</li> * </ul> * * @param type the form's type (e.g. form, submit,cancel,result). */ public Form(String type) { this.dataForm = new DataForm(type); } /** * Adds a new field to complete as part of the form. * * @param field the field to complete. */ public void addField(FormField field) { dataForm.addField(field); } /** * Sets a new String value to a given form's field. The field whose variable matches the * requested variable will be completed with the specified value. If no field could be found * for the specified variable then an exception will be raised.<p> * * If the value to set to the field is not a basic type (e.g. String, boolean, int, etc.) you * can use this message where the String value is the String representation of the object. * * @param variable the variable name that was completed. * @param value the String value that was answered. * @throws IllegalStateException if the form is not of type "submit". * @throws IllegalArgumentException if the form does not include the specified variable or * if the answer type does not correspond with the field type.. */ public void setAnswer(String variable, String value) { FormField field = getField(variable); if (field == null) { throw new IllegalArgumentException("Field not found for the specified variable name."); } if (!FormField.TYPE_TEXT_MULTI.equals(field.getType()) && !FormField.TYPE_TEXT_PRIVATE.equals(field.getType()) && !FormField.TYPE_TEXT_SINGLE.equals(field.getType()) && !FormField.TYPE_JID_SINGLE.equals(field.getType()) && !FormField.TYPE_HIDDEN.equals(field.getType())) { throw new IllegalArgumentException("This field is not of type String."); } setAnswer(field, value); } /** * Sets a new int value to a given form's field. The field whose variable matches the * requested variable will be completed with the specified value. If no field could be found * for the specified variable then an exception will be raised. * * @param variable the variable name that was completed. * @param value the int value that was answered. * @throws IllegalStateException if the form is not of type "submit". * @throws IllegalArgumentException if the form does not include the specified variable or * if the answer type does not correspond with the field type. */ public void setAnswer(String variable, int value) { FormField field = getField(variable); if (field == null) { throw new IllegalArgumentException("Field not found for the specified variable name."); } if (!FormField.TYPE_TEXT_MULTI.equals(field.getType()) && !FormField.TYPE_TEXT_PRIVATE.equals(field.getType()) && !FormField.TYPE_TEXT_SINGLE.equals(field.getType())) { throw new IllegalArgumentException("This field is not of type int."); } setAnswer(field, value); } /** * Sets a new long value to a given form's field. The field whose variable matches the * requested variable will be completed with the specified value. If no field could be found * for the specified variable then an exception will be raised. * * @param variable the variable name that was completed. * @param value the long value that was answered. * @throws IllegalStateException if the form is not of type "submit". * @throws IllegalArgumentException if the form does not include the specified variable or * if the answer type does not correspond with the field type. */ public void setAnswer(String variable, long value) { FormField field = getField(variable); if (field == null) { throw new IllegalArgumentException("Field not found for the specified variable name."); } if (!FormField.TYPE_TEXT_MULTI.equals(field.getType()) && !FormField.TYPE_TEXT_PRIVATE.equals(field.getType()) && !FormField.TYPE_TEXT_SINGLE.equals(field.getType())) { throw new IllegalArgumentException("This field is not of type long."); } setAnswer(field, value); } /** * Sets a new float value to a given form's field. The field whose variable matches the * requested variable will be completed with the specified value. If no field could be found * for the specified variable then an exception will be raised. * * @param variable the variable name that was completed. * @param value the float value that was answered. * @throws IllegalStateException if the form is not of type "submit". * @throws IllegalArgumentException if the form does not include the specified variable or * if the answer type does not correspond with the field type. */ public void setAnswer(String variable, float value) { FormField field = getField(variable); if (field == null) { throw new IllegalArgumentException("Field not found for the specified variable name."); } if (!FormField.TYPE_TEXT_MULTI.equals(field.getType()) && !FormField.TYPE_TEXT_PRIVATE.equals(field.getType()) && !FormField.TYPE_TEXT_SINGLE.equals(field.getType())) { throw new IllegalArgumentException("This field is not of type float."); } setAnswer(field, value); } /** * Sets a new double value to a given form's field. The field whose variable matches the * requested variable will be completed with the specified value. If no field could be found * for the specified variable then an exception will be raised. * * @param variable the variable name that was completed. * @param value the double value that was answered. * @throws IllegalStateException if the form is not of type "submit". * @throws IllegalArgumentException if the form does not include the specified variable or * if the answer type does not correspond with the field type. */ public void setAnswer(String variable, double value) { FormField field = getField(variable); if (field == null) { throw new IllegalArgumentException("Field not found for the specified variable name."); } if (!FormField.TYPE_TEXT_MULTI.equals(field.getType()) && !FormField.TYPE_TEXT_PRIVATE.equals(field.getType()) && !FormField.TYPE_TEXT_SINGLE.equals(field.getType())) { throw new IllegalArgumentException("This field is not of type double."); } setAnswer(field, value); } /** * Sets a new boolean value to a given form's field. The field whose variable matches the * requested variable will be completed with the specified value. If no field could be found * for the specified variable then an exception will be raised. * * @param variable the variable name that was completed. * @param value the boolean value that was answered. * @throws IllegalStateException if the form is not of type "submit". * @throws IllegalArgumentException if the form does not include the specified variable or * if the answer type does not correspond with the field type. */ public void setAnswer(String variable, boolean value) { FormField field = getField(variable); if (field == null) { throw new IllegalArgumentException("Field not found for the specified variable name."); } if (!FormField.TYPE_BOOLEAN.equals(field.getType())) { throw new IllegalArgumentException("This field is not of type boolean."); } setAnswer(field, (value ? "1" : "0")); } /** * Sets a new Object value to a given form's field. In fact, the object representation * (i.e. #toString) will be the actual value of the field.<p> * * If the value to set to the field is not a basic type (e.g. String, boolean, int, etc.) you * will need to use {@link #setAnswer(String, String))} where the String value is the * String representation of the object.<p> * * Before setting the new value to the field we will check if the form is of type submit. If * the form isn't of type submit means that it's not possible to complete the form and an * exception will be thrown. * * @param field the form field that was completed. * @param value the Object value that was answered. The object representation will be the * actual value. * @throws IllegalStateException if the form is not of type "submit". */ private void setAnswer(FormField field, Object value) { if (!isSubmitType()) { throw new IllegalStateException("Cannot set an answer if the form is not of type " + "\"submit\""); } field.resetValues(); field.addValue(value.toString()); } /** * Sets a new values to a given form's field. The field whose variable matches the requested * variable will be completed with the specified values. If no field could be found for * the specified variable then an exception will be raised.<p> * * The Objects contained in the List could be of any type. The String representation of them * (i.e. #toString) will be actually used when sending the answer to the server. * * @param variable the variable that was completed. * @param values the values that were answered. * @throws IllegalStateException if the form is not of type "submit". * @throws IllegalArgumentException if the form does not include the specified variable. */ public void setAnswer(String variable, List<String> values) { if (!isSubmitType()) { throw new IllegalStateException("Cannot set an answer if the form is not of type " + "\"submit\""); } FormField field = getField(variable); if (field != null) { // Check that the field can accept a collection of values if (!FormField.TYPE_JID_MULTI.equals(field.getType()) && !FormField.TYPE_LIST_MULTI.equals(field.getType()) && !FormField.TYPE_LIST_SINGLE.equals(field.getType()) && !FormField.TYPE_HIDDEN.equals(field.getType())) { throw new IllegalArgumentException("This field only accept list of values."); } // Clear the old values field.resetValues(); // Set the new values. The string representation of each value will be actually used. field.addValues(values); } else { throw new IllegalArgumentException("Couldn't find a field for the specified variable."); } } /** * Sets the default value as the value of a given form's field. The field whose variable matches * the requested variable will be completed with its default value. If no field could be found * for the specified variable then an exception will be raised. * * @param variable the variable to complete with its default value. * @throws IllegalStateException if the form is not of type "submit". * @throws IllegalArgumentException if the form does not include the specified variable. */ public void setDefaultAnswer(String variable) { if (!isSubmitType()) { throw new IllegalStateException("Cannot set an answer if the form is not of type " + "\"submit\""); } FormField field = getField(variable); if (field != null) { // Clear the old values field.resetValues(); // Set the default value for (Iterator<String> it = field.getValues(); it.hasNext();) { field.addValue(it.next()); } } else { throw new IllegalArgumentException("Couldn't find a field for the specified variable."); } } /** * Returns an Iterator for the fields that are part of the form. * * @return an Iterator for the fields that are part of the form. */ public Iterator<FormField> getFields() { return dataForm.getFields(); } /** * Returns the field of the form whose variable matches the specified variable. * The fields of type FIXED will never be returned since they do not specify a * variable. * * @param variable the variable to look for in the form fields. * @return the field of the form whose variable matches the specified variable. */ public FormField getField(String variable) { if (variable == null || variable.equals("")) { throw new IllegalArgumentException("Variable must not be null or blank."); } // Look for the field whose variable matches the requested variable FormField field; for (Iterator<FormField> it=getFields();it.hasNext();) { field = it.next(); if (variable.equals(field.getVariable())) { return field; } } return null; } /** * Returns the instructions that explain how to fill out the form and what the form is about. * * @return instructions that explain how to fill out the form. */ public String getInstructions() { StringBuilder sb = new StringBuilder(); // Join the list of instructions together separated by newlines for (Iterator it = dataForm.getInstructions(); it.hasNext();) { sb.append((String) it.next()); // If this is not the last instruction then append a newline if (it.hasNext()) { sb.append("\n"); } } return sb.toString(); } /** * Returns the description of the data. It is similar to the title on a web page or an X * window. You can put a <title/> on either a form to fill out, or a set of data results. * * @return description of the data. */ public String getTitle() { return dataForm.getTitle(); } /** * Returns the meaning of the data within the context. The data could be part of a form * to fill out, a form submission or data results.<p> * * Possible form types are: * <ul> * <li>form -> Indicates a form to fill out.</li> * <li>submit -> The form is filled out, and this is the data that is being returned from * the form.</li> * <li>cancel -> The form was cancelled. Tell the asker that piece of information.</li> * <li>result -> Data results being returned from a search, or some other query.</li> * </ul> * * @return the form's type. */ public String getType() { return dataForm.getType(); } /** * Sets instructions that explain how to fill out the form and what the form is about. * * @param instructions instructions that explain how to fill out the form. */ public void setInstructions(String instructions) { // Split the instructions into multiple instructions for each existent newline ArrayList<String> instructionsList = new ArrayList<String>(); StringTokenizer st = new StringTokenizer(instructions, "\n"); while (st.hasMoreTokens()) { instructionsList.add(st.nextToken()); } // Set the new list of instructions dataForm.setInstructions(instructionsList); } /** * Sets the description of the data. It is similar to the title on a web page or an X window. * You can put a <title/> on either a form to fill out, or a set of data results. * * @param title description of the data. */ public void setTitle(String title) { dataForm.setTitle(title); } /** * Returns a DataForm that serves to send this Form to the server. If the form is of type * submit, it may contain fields with no value. These fields will be removed since they only * exist to assist the user while editing/completing the form in a UI. * * @return the wrapped DataForm. */ public DataForm getDataFormToSend() { if (isSubmitType()) { // Create a new DataForm that contains only the answered fields DataForm dataFormToSend = new DataForm(getType()); for(Iterator<FormField> it=getFields();it.hasNext();) { FormField field = it.next(); if (field.getValues().hasNext()) { dataFormToSend.addField(field); } } return dataFormToSend; } return dataForm; } /** * Returns true if the form is a form to fill out. * * @return if the form is a form to fill out. */ private boolean isFormType() { return TYPE_FORM.equals(dataForm.getType()); } /** * Returns true if the form is a form to submit. * * @return if the form is a form to submit. */ private boolean isSubmitType() { return TYPE_SUBMIT.equals(dataForm.getType()); } /** * Returns a new Form to submit the completed values. The new Form will include all the fields * of the original form except for the fields of type FIXED. Only the HIDDEN fields will * include the same value of the original form. The other fields of the new form MUST be * completed. If a field remains with no answer when sending the completed form, then it won't * be included as part of the completed form.<p> * * The reason why the fields with variables are included in the new form is to provide a model * for binding with any UI. This means that the UIs will use the original form (of type * "form") to learn how to render the form, but the UIs will bind the fields to the form of * type submit. * * @return a Form to submit the completed values. */ public Form createAnswerForm() { if (!isFormType()) { throw new IllegalStateException("Only forms of type \"form\" could be answered"); } // Create a new Form Form form = new Form(TYPE_SUBMIT); for (Iterator<FormField> fields=getFields(); fields.hasNext();) { FormField field = fields.next(); // Add to the new form any type of field that includes a variable. // Note: The fields of type FIXED are the only ones that don't specify a variable if (field.getVariable() != null) { FormField newField = new FormField(field.getVariable()); newField.setType(field.getType()); form.addField(newField); // Set the answer ONLY to the hidden fields if (FormField.TYPE_HIDDEN.equals(field.getType())) { // Since a hidden field could have many values we need to collect them // in a list List<String> values = new ArrayList<String>(); for (Iterator<String> it=field.getValues();it.hasNext();) { values.add(it.next()); } form.setAnswer(field.getVariable(), values); } } } return form; } }
package flusim; /* FluSim.java * Created on April 3, 2006 * @author Burke Squires, richard.squires@utsouthwestern.edu; burkesquires@gmail.com * Tags: 6 digit number; first 3 digits 100 + proccess ID; last 3 digits: port (1) and event or molecule (2) * e.g. 108101 - */ import eduni.simjava.*; import eduni.simjava.distributions.*; //import java.util.*; //import java.io.*; import java.util.ArrayList; public class FluSim { public static void main(String[] args) { /********************************************************************************/ double DebugSimRunTime = 10000.0; //Time for simulation based upon 84600 seconds in a day //Sim_system.set_seed(10091); Sim_system.initialise(); Binding binding = new Binding("Binding"); Internalization internalization = new Internalization("Internalization"); Actin_dependent_transport actin_dependent_transport = new Actin_dependent_transport("Actin_dependent_transport"); Dynein_directed_transport dynein_directed_transport = new Dynein_directed_transport("Dynein_directed_transport"); End_directed_transport end_directed_transport = new End_directed_transport("End_directed_transport"); Fusion fusion = new Fusion("Fusion"); Uncoating Uncoating = new Uncoating("Uncoating"); RNP_nuclear_import RNP_nuclear_import = new RNP_nuclear_import("RNP_nuclear_import"); cRNA_synthesis cRNA_synthesis = new cRNA_synthesis("cRNA_synthesis"); mRNA_transcription mRNA_transcription = new mRNA_transcription("mRNA_transcription"); Viral_protein_translation Viral_protein_translation = new Viral_protein_translation("Viral_protein_translation"); Viral_protein_import Viral_protein_import = new Viral_protein_import("Viral_protein_import"); vRNA_replication vRNA_replication = new vRNA_replication("vRNA_replication"); RNP_assembly RNP_assembly = new RNP_assembly("RNP_assembly"); RNP_nuclear_export RNP_nuclear_export = new RNP_nuclear_export("RNP_nuclear_export"); M1_translocation M1_translocation = new M1_translocation("M1_translocation"); Protein_secretion Protein_secretion = new Protein_secretion("Protein_secretion"); Virion_assembly Virion_assembly = new Virion_assembly("Virion_assembly"); Virion_release Virion_release = new Virion_release("Virion_release"); /********************************************************************************/ // Link the entities' ports Sim_system.link_ports("Binding", "to_internalization", "Internalization", "In"); Sim_system.link_ports("Internalization", "to_actin_dependent_transport", "Actin_dependent_transport", "In"); Sim_system.link_ports("Actin_dependent_transport", "to_Dynein_directed_transport", "Dynein_directed_transport", "In"); Sim_system.link_ports("Dynein_directed_transport", "to_End_directed_transport", "End_directed_transport", "In"); Sim_system.link_ports("End_directed_transport", "to_fusion", "Fusion", "In"); Sim_system.link_ports("Fusion", "to_uncoating", "Uncoating", "In"); Sim_system.link_ports("Uncoating", "to_RNP_nuclear_import", "RNP_nuclear_import", "In"); Sim_system.link_ports("RNP_nuclear_import", "to_mRNA_transcription", "mRNA_transcription", "In"); Sim_system.link_ports("RNP_nuclear_import", "to_cRNA_synthesis", "cRNA_synthesis", "In"); Sim_system.link_ports("cRNA_synthesis", "to_vRNA_replication", "vRNA_replication", "In"); Sim_system.link_ports("cRNA_synthesis", "to_cRNA_synthesis", "cRNA_synthesis", "In"); Sim_system.link_ports("mRNA_transcription", "to_Viral_protein_translation", "Viral_protein_translation", "In"); Sim_system.link_ports("mRNA_transcription", "to_mRNA_transcription", "mRNA_transcription", "In"); Sim_system.link_ports("Viral_protein_translation", "to_Protein_secretion", "Protein_secretion", "In"); Sim_system.link_ports("Viral_protein_translation", "to_M1_translocation", "M1_translocation", "In"); Sim_system.link_ports("Viral_protein_translation", "to_Viral_protein_import", "Viral_protein_import", "In"); Sim_system.link_ports("Viral_protein_translation", "to_Viral_protein_translation", "Viral_protein_translation", "In"); Sim_system.link_ports("Viral_protein_import", "to_mRNA_transcription", "mRNA_transcription", "In"); Sim_system.link_ports("Viral_protein_import", "to_cRNA_synthesis", "cRNA_synthesis", "In"); Sim_system.link_ports("Viral_protein_import", "to_vRNA_replication", "vRNA_replication", "In"); Sim_system.link_ports("Viral_protein_import", "to_RNP_nuclear_export", "RNP_nuclear_export", "In"); Sim_system.link_ports("vRNA_replication", "to_cRNA_synthesis", "cRNA_synthesis", "In"); Sim_system.link_ports("vRNA_replication", "to_RNP_assembly", "RNP_assembly", "In"); Sim_system.link_ports("vRNA_replication", "to_mRNA_transcription", "mRNA_transcription", "In"); Sim_system.link_ports("vRNA_replication", "to_vRNA_replication", "vRNA_replication", "In"); Sim_system.link_ports("RNP_assembly", "to_RNP_nuclear_export", "RNP_nuclear_export", "In"); Sim_system.link_ports("RNP_nuclear_export", "to_Virion_assembly", "Virion_assembly", "In"); Sim_system.link_ports("M1_translocation", "to_Virion_assembly", "Virion_assembly", "In"); Sim_system.link_ports("Protein_secretion", "to_Virion_assembly", "Virion_assembly", "In"); Sim_system.link_ports("Virion_assembly", "to_Virion_release", "Virion_release", "In"); Sim_system.set_trace_detail(false, false, false); Sim_system.set_termination_condition(Sim_system.TIME_ELAPSED, DebugSimRunTime, false); //Sim_system.set_termination_condition(Sim_system.EVENTS_COMPLETED, "Virion_assembly", 118101, 1, true); //Sim_system.set_output_analysis(Sim_system.IND_REPLICATIONS, 2, 0.5); Sim_system.set_report_detail(false, false); //Sim_system.generate_graphs("FluSim_graphs"); //Sim_system.generate_molecular_graphs("FluSim_mol_graphs"); Sim_system.run(); MolecularCounter.GraphData(); System.out.println("End of simulation!"); } } class Binding extends Sim_entity { private Sim_port to_internalization; private Sim_negexp_obj proc; Binding(String name) { super(name); to_internalization = new Sim_port("to_internalization"); add_port(to_internalization); proc = new Sim_negexp_obj("Processing", 1); add_generator(proc); } @Override public void body() { System.out.println("Binding: 101101 at " + Sim_system.sim_clock()); sim_schedule(to_internalization, proc.sample(), 101101, 1); //sim_trace(1, "Binding: 101101"); } } class Internalization extends Sim_entity { private Sim_port in; private Sim_port to_actin_dependent_transport; private Sim_negexp_obj proc; Internalization(String name) { super(name); in = new Sim_port("In"); add_port(in); to_actin_dependent_transport = new Sim_port("to_actin_dependent_transport"); add_port(to_actin_dependent_transport); proc = new Sim_negexp_obj("Processing", 6); add_generator(proc); } @Override public void body() { Sim_event e = new Sim_event(); sim_get_next(new Sim_type_p(101101), e); sim_completed(e); System.out.println("Internalization: " + e.get_tag() + " at " + Sim_system.sim_clock()); sim_schedule(to_actin_dependent_transport, 54 + proc.sample(), 102101, 1); //sim_trace(1, "Internalization: " + e.get_src() + " from " + e.get_tag() + " at " + Sim_system.sim_clock()); } } class Actin_dependent_transport extends Sim_entity { private Sim_port in; private Sim_port to_Dynein_directed_transport; private Sim_negexp_obj proc; Actin_dependent_transport(String name) { super(name); in = new Sim_port("In"); add_port(in); to_Dynein_directed_transport = new Sim_port("to_Dynein_directed_transport"); add_port(to_Dynein_directed_transport); proc = new Sim_negexp_obj("Processing", 24); add_generator(proc); } @Override public void body() { Sim_event e = new Sim_event(); sim_get_next(new Sim_type_p(102101), e); sim_completed(e); System.out.println("Actin-dependent transport: " + e.get_tag() + " at " + Sim_system.sim_clock()); sim_schedule(to_Dynein_directed_transport, 226 + proc.sample(), 103101); //sim_trace(1, "Actin-dependent transport: " + e.get_src() + " from " + e.get_tag() + " at " + Sim_system.sim_clock()); } } class Dynein_directed_transport extends Sim_entity { private Sim_port in; private Sim_port to_End_directed_transport; private Sim_negexp_obj proc; Dynein_directed_transport(String name) { super(name); in = new Sim_port("In"); add_port(in); to_End_directed_transport = new Sim_port("to_End_directed_transport"); add_port(to_End_directed_transport); proc = new Sim_negexp_obj("Processing", 1); add_generator(proc); } @Override public void body() { Sim_event e = new Sim_event(); sim_get_next(new Sim_type_p(103101), e); sim_completed(e); System.out.println("Dynein_directed_transport: " + e.get_tag() + " at " + Sim_system.sim_clock()); sim_schedule(to_End_directed_transport, 9 + proc.sample(), 104101); //sim_trace(1, "Dynein_directed_transport: " + e.get_src() + " from " + e.get_tag() + " at " + Sim_system.sim_clock()); } } class End_directed_transport extends Sim_entity { private Sim_port in; private Sim_port to_fusion; private Sim_negexp_obj proc; End_directed_transport(String name) { super(name); in = new Sim_port("In"); add_port(in); to_fusion = new Sim_port("to_fusion"); add_port(to_fusion); proc = new Sim_negexp_obj("Processing", 30); add_generator(proc); } @Override public void body() { Sim_event e = new Sim_event(); sim_get_next(new Sim_type_p(104101), e); sim_completed(e); System.out.println("Late endosome: " + e.get_tag() + " at " + Sim_system.sim_clock()); sim_schedule(to_fusion, 270 + proc.sample(), 105101); //sim_trace(1, "Late endosome: " + e.get_src() + " from " + e.get_tag() + " at " + Sim_system.sim_clock()); } } class Fusion extends Sim_entity { private Sim_port in; private Sim_port to_uncoating; private Sim_negexp_obj proc; private Sim_random_obj prob; Fusion(String name) { super(name); in = new Sim_port("In"); add_port(in); to_uncoating = new Sim_port("to_uncoating"); add_port(to_uncoating); proc = new Sim_negexp_obj("Processing", 120); add_generator(proc); prob = new Sim_random_obj("Probability"); add_generator(prob); } @Override public void body() { while (Sim_system.running()) { Sim_event e = new Sim_event(); sim_get_next(new Sim_type_p(105101), e); sim_completed(e); System.out.println("Fusion: " + e.get_tag() + " at " + Sim_system.sim_clock()); sim_schedule(to_uncoating, 1080 + proc.sample(), 106101); //sim_trace(1, "Fusion: " + e.get_src() + " from " + e.get_tag() + " at " + Sim_system.sim_clock()); } } } class Uncoating extends Sim_entity { private Sim_port in; private Sim_port to_RNP_nuclear_import; private Sim_negexp_obj proc; private Sim_random_obj prob; Uncoating(String name) { super(name); in = new Sim_port("In"); add_port(in); to_RNP_nuclear_import = new Sim_port("to_RNP_nuclear_import"); add_port(to_RNP_nuclear_import); proc = new Sim_negexp_obj("Processing", 60); add_generator(proc); prob = new Sim_random_obj("Probability"); add_generator(prob); } @Override public void body() { while (Sim_system.running()) { Sim_event e = new Sim_event(); int[] tags = {106101}; sim_get_next(new Sim_type_p(tags), e); sim_completed(e); System.out.println("Uncoating: " + e.get_tag() + " at " + Sim_system.sim_clock()); for (int vRNPID = 1; vRNPID < 9; vRNPID++) { sim_schedule(to_RNP_nuclear_import, 540 + proc.sample(), (107100 + vRNPID)); } } } } class RNP_nuclear_import extends Sim_entity { private Sim_port in; private Sim_port to_mRNA_transcription; private Sim_port to_cRNA_synthesis; private Sim_negexp_obj proc; private int[] poly = new int[]{29, 29, 27, 22, 19, 17, 12, 11}; RNP_nuclear_import(String name) { super(name); in = new Sim_port("In"); add_port(in); to_mRNA_transcription = new Sim_port("to_mRNA_transcription"); add_port(to_mRNA_transcription); to_cRNA_synthesis = new Sim_port("to_cRNA_synthesis"); add_port(to_cRNA_synthesis); proc = new Sim_negexp_obj("Processing", 60); add_generator(proc); } @Override public void body() { while (Sim_system.running()) { Sim_event e = new Sim_event(); int[] tags = { 107101, 107102, 107103, 107104, 107105, 107106, 107107, 107108 }; sim_get_next(new Sim_type_p(tags), e); if (e.get_tag() != -1) { int tag = e.get_tag(); int vRNPID = tag % 10; sim_completed(e); System.out.println("RNP Import: " + tag + " at " + Sim_system.sim_clock()); sim_schedule(to_mRNA_transcription, 540 + proc.sample(), (108200 + vRNPID)); MolecularCounter.AddmRNACount(vRNPID, poly[vRNPID - 1], Sim_system.sim_clock()); sim_schedule(to_cRNA_synthesis, 540 + proc.sample(), 108100 + vRNPID); } } } } class cRNA_synthesis extends Sim_entity { private Sim_port in; private Sim_port to_vRNA_replication; private Sim_port to_cRNA_synthesis; private Sim_negexp_obj cRNA1; private Sim_negexp_obj cRNA2; private Sim_negexp_obj cRNA3; private Sim_negexp_obj cRNA4; private Sim_negexp_obj cRNA5; private Sim_negexp_obj cRNA6; private Sim_negexp_obj cRNA7; private Sim_negexp_obj cRNA8; private Sim_random_obj prob; private int[] poly = new int[]{29, 29, 27, 22, 19, 17, 12, 11}; private double time; double initiation = 46.0; cRNA_synthesis(String name) { super(name); //Ports in = new Sim_port("In"); add_port(in); to_vRNA_replication = new Sim_port("to_vRNA_replication"); add_port(to_vRNA_replication); to_cRNA_synthesis = new Sim_port("to_cRNA_synthesis"); add_port(to_cRNA_synthesis); //Distributions cRNA1 = new Sim_negexp_obj("cRNA1", initiation + 47); add_generator(cRNA1); cRNA2 = new Sim_negexp_obj("cRNA2", initiation + 47); add_generator(cRNA2); cRNA3 = new Sim_negexp_obj("cRNA3", initiation + 45); add_generator(cRNA3); cRNA4 = new Sim_negexp_obj("cRNA4", initiation + 36); add_generator(cRNA4); cRNA5 = new Sim_negexp_obj("cRNA5", initiation + 31); add_generator(cRNA5); cRNA6 = new Sim_negexp_obj("cRNA6", initiation + 28); add_generator(cRNA6); cRNA7 = new Sim_negexp_obj("cRNA7", initiation + 21); add_generator(cRNA7); cRNA8 = new Sim_negexp_obj("cRNA8", initiation + 18); add_generator(cRNA8); prob = new Sim_random_obj("Probability"); add_generator(prob); } @Override public void body() { while (Sim_system.running()) { Sim_event e = new Sim_event(); int[] tags = { 108101, 108102, 108103, 108104, 108105, 108106, 108107, 108108, 112101, 112102, 112103, 112104, 112105, 112106, 112107, 112108, 113101, 113102, 113103, 113104, 113105, 113106, 113107, 113108 }; sim_get_next(new Sim_type_p(tags), e); int tag = e.get_tag(); if (e.get_tag() != -1) { int cRNAID = tag % 10; boolean ready = false; if (((tag >= 108101) && (tag <= 108108)) | ((tag >= 113101) && (tag <= 113108))) { ready = MolecularCounter.checkSynthesisProteinCount(cRNAID); } sim_completed(e); double concentrations = MolecularCounter.getSynthProteinConc(); switch (cRNAID) { case 1: cRNA1 = new Sim_negexp_obj("cRNA", (initiation + 47) * concentrations); time = cRNA1.sample(); break; case 2: cRNA2 = new Sim_negexp_obj("cRNA", (initiation + 47) * concentrations); time = cRNA2.sample(); break; case 3: cRNA3 = new Sim_negexp_obj("cRNA", (initiation + 45) * concentrations); time = cRNA3.sample(); break; case 4: cRNA4 = new Sim_negexp_obj("cRNA", (initiation + 36) * concentrations); time = cRNA4.sample(); break; case 5: cRNA5 = new Sim_negexp_obj("cRNA", (initiation + 31) * concentrations); time = cRNA5.sample(); break; case 6: cRNA6 = new Sim_negexp_obj("cRNA", (initiation + 28) * concentrations); time = cRNA6.sample(); break; case 7: cRNA7 = new Sim_negexp_obj("cRNA", (initiation + 21) * concentrations); time = cRNA7.sample(); break; case 8: cRNA8 = new Sim_negexp_obj("cRNA", (initiation + 18) * concentrations); time = cRNA8.sample(); break; } if (ready) { sim_schedule(to_vRNA_replication, time, 109100 + cRNAID); MolecularCounter.AddcRNACount(cRNAID, poly[cRNAID - 1], Sim_system.sim_clock()); System.out.println("cRNA synthesis " + tag + " at " + Sim_system.sim_clock()); //sim_trace(1, "cRNA synthesis: " + e.get_src() + " from " + tag + " at " + Sim_system.sim_clock()); } if (prob.sample() < 0.00012) { if (MolecularCounter.GetcRNACount(cRNAID) > poly[cRNAID - 1]) { MolecularCounter.RemovecRNACount(cRNAID, poly[cRNAID - 1], Sim_system.sim_clock()); } } if (((tag >= 108101) && (tag <= 108108)) | ((tag >= 113101) && (tag <= 113108))) { sim_schedule(to_cRNA_synthesis, time, e.get_tag()); } } } } } class mRNA_transcription extends Sim_entity { private Sim_port in; private Sim_port to_Viral_protein_translation; private Sim_port to_mRNA_transcription; private Sim_negexp_obj mRNA1; private Sim_negexp_obj mRNA2; private Sim_negexp_obj mRNA3; private Sim_negexp_obj mRNA4; private Sim_negexp_obj mRNA5; private Sim_negexp_obj mRNA6; private Sim_negexp_obj mRNA7; private Sim_negexp_obj mRNA8; private Sim_random_obj prob; private int[] poly = new int[]{29, 29, 27, 22, 19, 17, 12, 11}; double time; double initiation = 46.0 * 10; mRNA_transcription(String name) { super(name); in = new Sim_port("In"); add_port(in); //Ports to_Viral_protein_translation = new Sim_port("to_Viral_protein_translation"); add_port(to_Viral_protein_translation); to_mRNA_transcription = new Sim_port("to_mRNA_transcription"); add_port(to_mRNA_transcription); //Distributions prob = new Sim_random_obj("Probability"); add_generator(prob); mRNA1 = new Sim_negexp_obj("mRNA", initiation + 47); add_generator(mRNA1); mRNA2 = new Sim_negexp_obj("mRNA", initiation + 47); add_generator(mRNA2); mRNA3 = new Sim_negexp_obj("mRNA", initiation + 45); add_generator(mRNA3); mRNA4 = new Sim_negexp_obj("mRNA", initiation + 36); add_generator(mRNA4); mRNA5 = new Sim_negexp_obj("mRNA", initiation + 31); add_generator(mRNA5); mRNA6 = new Sim_negexp_obj("mRNA", initiation + 28); add_generator(mRNA6); mRNA7 = new Sim_negexp_obj("mRNA", initiation + 21); add_generator(mRNA7); mRNA8 = new Sim_negexp_obj("mRNA", initiation + 18); add_generator(mRNA8); } @Override public void body() { while (Sim_system.running()) { Sim_event e = new Sim_event(); int[] tags = { 108201, 108202, 108203, 108204, 108205, 108206, 108207, 108208, 113301, 113302, 113303, 113304, 113305, 113306, 113307, 113308 }; sim_get_next(new Sim_type_p(tags), e); if (e.get_tag() != -1) { int tag = e.get_tag(); int mRNAID = tag % 10; sim_completed(e); double concentrations = (double) MolecularCounter.getTranscriptionProteinConc(); if (concentrations != 0) { switch (mRNAID) { case 1: mRNA1 = new Sim_negexp_obj("mRNA", (initiation + 470) * concentrations); time = mRNA1.sample(); break; case 2: mRNA2 = new Sim_negexp_obj("mRNA", (initiation + 450) * concentrations); time = mRNA2.sample(); break; case 3: mRNA3 = new Sim_negexp_obj("mRNA", (initiation + 360) * concentrations); time = mRNA3.sample(); break; case 4: mRNA4 = new Sim_negexp_obj("mRNA", (initiation + 360) * concentrations); time = mRNA4.sample(); break; case 5: mRNA5 = new Sim_negexp_obj("mRNA", (initiation + 310) * concentrations); time = mRNA5.sample(); break; case 6: mRNA6 = new Sim_negexp_obj("mRNA", (initiation + 280) * concentrations); time = mRNA6.sample(); break; case 7: mRNA7 = new Sim_negexp_obj("mRNA", (initiation + 210) * concentrations); time = mRNA7.sample(); break; case 8: mRNA8 = new Sim_negexp_obj("mRNA", (initiation + 180) * concentrations); time = mRNA8.sample(); break; } sim_schedule(to_Viral_protein_translation, time, (110100 + mRNAID)); MolecularCounter.AddmRNACount(mRNAID, poly[mRNAID - 1], Sim_system.sim_clock()); MolecularCounter.removePrecursormRNA(poly[mRNAID - 1]); System.out.println("mRNA transcription: " + tag + " at " + Sim_system.sim_clock()); } else { time = 0.25; } if (prob.sample() < 0.0005) { //Degradation MolecularCounter.RemovemRNACount(mRNAID, poly[mRNAID - 1], Sim_system.sim_clock()); } sim_schedule(to_mRNA_transcription, time, e.get_tag()); } } } } class Viral_protein_translation extends Sim_entity { private Sim_port in; private Sim_port to_Protein_secretion; private Sim_port to_M1_translocation; private Sim_port to_Viral_protein_import; private Sim_port to_RNP_nuclear_export; private Sim_port to_Viral_protein_translation; private Sim_negexp_obj trans1; private Sim_negexp_obj trans2; private Sim_negexp_obj trans3; private Sim_negexp_obj trans4; private Sim_negexp_obj trans5; private Sim_negexp_obj trans6; private Sim_negexp_obj trans7; private Sim_negexp_obj trans8; private Sim_negexp_obj trans9; private Sim_negexp_obj trans10; private Sim_random_obj prob; private int[] ribo = new int[]{29, 29, 27, 22, 19, 17, 12, 11, 11, 11}; double time1; double time2; double initiation = 500.0; Viral_protein_translation(String name) { super(name); in = new Sim_port("In"); add_port(in); to_Protein_secretion = new Sim_port("to_Protein_secretion"); add_port(to_Protein_secretion); to_M1_translocation = new Sim_port("to_M1_translocation"); add_port(to_M1_translocation); to_Viral_protein_import = new Sim_port("to_Viral_protein_import"); add_port(to_Viral_protein_import); to_RNP_nuclear_export = new Sim_port("to_RNP_nuclear_export"); add_port(to_RNP_nuclear_export); to_Viral_protein_translation = new Sim_port("to_Viral_protein_translation"); add_port(to_Viral_protein_translation); trans1 = new Sim_negexp_obj("Translate", initiation + 42.0); add_generator(trans1); trans2 = new Sim_negexp_obj("Translate", initiation + 42.0); add_generator(trans2); trans3 = new Sim_negexp_obj("Translate", initiation + 40.0); add_generator(trans3); trans4 = new Sim_negexp_obj("Translate", initiation + 31.0); add_generator(trans4); trans5 = new Sim_negexp_obj("Translate", initiation + 28.0); add_generator(trans5); trans6 = new Sim_negexp_obj("Translate", initiation + 25.0); add_generator(trans6); trans7 = new Sim_negexp_obj("Translate", initiation + 14.0); add_generator(trans7); trans8 = new Sim_negexp_obj("Translate", initiation + 5.0); add_generator(trans8); trans9 = new Sim_negexp_obj("Translate", initiation + 13.0); add_generator(trans9); trans10 = new Sim_negexp_obj("Translate", initiation + 7.0); add_generator(trans10); prob = new Sim_random_obj("Probability"); add_generator(prob); } @Override public void body() { while (Sim_system.running()) { Sim_event e = new Sim_event(); int[] tags = {110101, 110102, 110103, 110104, 110105, 110106, 110107, 110108}; sim_get_next(new Sim_type_p(tags), e); if (e.get_tag() != -1) { sim_completed(e); int tag = e.get_tag(); int transID = tag % 10; double concentrations = MolecularCounter.getTranscriptConc(transID); switch (transID) { case 1: trans1 = new Sim_negexp_obj("Translate", initiation + 420.0 * concentrations); time1 = trans1.sample(); break; case 2: trans2 = new Sim_negexp_obj("Translate", initiation + 420.0 * concentrations); time1 = trans2.sample(); break; case 3: trans3 = new Sim_negexp_obj("Translate", initiation + 400.0 * concentrations); time1 = trans3.sample(); break; case 4: trans4 = new Sim_negexp_obj("Translate", initiation + 310.0 * concentrations); time1 = trans4.sample(); break; case 5: trans5 = new Sim_negexp_obj("Translate", initiation + 280.0 * concentrations); time1 = trans5.sample(); break; case 6: trans6 = new Sim_negexp_obj("Translate", initiation + 250.0 * concentrations); time1 = trans6.sample(); break; case 7: trans7 = new Sim_negexp_obj("Translate", initiation + 140.0 * concentrations); time1 = trans7.sample(); break; case 8: trans8 = new Sim_negexp_obj("Translate", initiation + 130.0 * concentrations); time1 = trans9.sample(); //Segment 8 really code for protein 9 in this case break; } double clock = Sim_system.sim_clock(); switch (transID) { case 1: case 2: case 3: case 4: case 5: case 6: sim_schedule(to_Viral_protein_import, time1, 111300 + transID); MolecularCounter.AddProteinCount(transID, ribo[transID - 1], clock); break; case 7: if (prob.sample() < 0.5) { sim_schedule(to_M1_translocation, time1, 111207); } else { sim_schedule(to_Viral_protein_import, time1, 111307); } MolecularCounter.AddProteinCount(7, ribo[transID - 1], clock); if (prob.sample() < 0.1) { time2 = trans8.sample(); sim_schedule(to_Protein_secretion, time2, 111108); MolecularCounter.AddProteinCount(8, ribo[transID], clock); } break; case 8: sim_schedule(to_Viral_protein_import, time1, 111309); MolecularCounter.AddProteinCount(9, ribo[transID], clock); if (prob.sample() < 0.1) { time2 = trans10.sample(); sim_schedule(to_Viral_protein_import, time2, 111310); MolecularCounter.AddProteinCount(10, ribo[transID + 1], clock); } break; } System.out.println("Translation: time: " + time1 + " at " + clock); sim_schedule(to_Viral_protein_translation, time1, e.get_tag()); } } } } class Viral_protein_import extends Sim_entity { private Sim_port in; private Sim_port to_mRNA_transcription; private Sim_port to_cRNA_synthesis; private Sim_port to_vRNA_replication; private Sim_port to_RNP_nuclear_export; private Sim_negexp_obj proc; private Sim_random_obj prob; Viral_protein_import(String name) { super(name); in = new Sim_port("In"); add_port(in); to_mRNA_transcription = new Sim_port("to_mRNA_transcription"); add_port(to_mRNA_transcription); to_cRNA_synthesis = new Sim_port("to_cRNA_synthesis"); add_port(to_cRNA_synthesis); to_vRNA_replication = new Sim_port("to_vRNA_replication"); add_port(to_vRNA_replication); to_RNP_nuclear_export = new Sim_port("to_RNP_nuclear_export"); add_port(to_RNP_nuclear_export); proc = new Sim_negexp_obj("Processing", 10.0); add_generator(proc); prob = new Sim_random_obj("Probability"); add_generator(prob); } @Override public void body() { while (Sim_system.running()) { Sim_event e = new Sim_event(); int[] tags = {111301, 111302, 111303, 111304, 111305, 111306, 111307, 111308, 111309, 111310};// sim_get_next(new Sim_type_p(tags), e); if (e.get_tag() != -1) { double cRNAprob = 0; double vRNAprob = 0; double vRNPExportprob = 0; sim_completed(e); int tag = e.get_tag(); int proteinID = tag % 100; System.out.println("Protein Import: " + tag + " at " + Sim_system.sim_clock()); switch (proteinID) { case 8: cRNAprob = 0.0; vRNAprob = 1.0; //vRNPExportprob = 1.0; break; case 10: cRNAprob = 0.0; vRNAprob = 0.0; vRNPExportprob = 1.0; break; default: cRNAprob = 0.5; vRNAprob = 1.0; //vRNPExportprob = 1.0; break; } double p = prob.sample(); if (p <= cRNAprob) { sim_schedule(to_cRNA_synthesis, proc.sample(), 112100 + proteinID); } else if (p <= vRNAprob) { sim_schedule(to_vRNA_replication, proc.sample(), 112200 + proteinID); } else if (p <= vRNPExportprob) { sim_schedule(to_RNP_nuclear_export, proc.sample(), 112300 + proteinID); } //sim_trace(1, "Viral protein import: " + e.get_src() + " from " + tag + " at " + Sim_system.sim_clock()); } } } } class vRNA_replication extends Sim_entity { private Sim_port in; private Sim_port to_cRNA_synthesis; private Sim_port to_RNP_assembly; private Sim_port to_vRNA_replication; private Sim_port to_mRNA_transcription; private Sim_negexp_obj vRNA1; private Sim_negexp_obj vRNA2; private Sim_negexp_obj vRNA3; private Sim_negexp_obj vRNA4; private Sim_negexp_obj vRNA5; private Sim_negexp_obj vRNA6; private Sim_negexp_obj vRNA7; private Sim_negexp_obj vRNA8; private Sim_negexp_obj vRNP; private Sim_random_obj prob; private static ArrayList vRNParrived = new ArrayList(); private int[] poly = new int[]{29, 29, 27, 22, 19, 17, 12, 11}; double time = 0; double initiation = 46.0; vRNA_replication(String name) { super(name); in = new Sim_port("In"); add_port(in); to_cRNA_synthesis = new Sim_port("to_cRNA_synthesis"); add_port(to_cRNA_synthesis); to_RNP_assembly = new Sim_port("to_RNP_assembly"); add_port(to_RNP_assembly); to_vRNA_replication = new Sim_port("to_vRNA_replication"); add_port(to_vRNA_replication); to_mRNA_transcription = new Sim_port("to_mRNA_transcription"); add_port(to_mRNA_transcription); vRNA1 = new Sim_negexp_obj("vRNA", initiation + 47); add_generator(vRNA1); vRNA2 = new Sim_negexp_obj("vRNA", initiation + 47); add_generator(vRNA2); vRNA3 = new Sim_negexp_obj("vRNA", initiation + 45); add_generator(vRNA3); vRNA4 = new Sim_negexp_obj("vRNA", initiation + 36); add_generator(vRNA4); vRNA5 = new Sim_negexp_obj("vRNA", initiation + 31); add_generator(vRNA5); vRNA6 = new Sim_negexp_obj("vRNA", initiation + 28); add_generator(vRNA6); vRNA7 = new Sim_negexp_obj("vRNA", initiation + 21); add_generator(vRNA7); vRNA8 = new Sim_negexp_obj("vRNA", initiation + 18); add_generator(vRNA8); vRNP = new Sim_negexp_obj("vRNP", 6.0); add_generator(vRNP); prob = new Sim_random_obj("Probability"); add_generator(prob); } @Override public void body() { while (Sim_system.running()) { Sim_event e = new Sim_event(); int[] tags = { 109101, 109102, 109103, 109104, 109105, 109106, 109107, 109108, 112201, 112202, 112203, 112204, 112205, 112206, 112207, 112210 }; sim_get_next(new Sim_type_p(tags), e); if (e.get_tag() != -1) { int tag = e.get_tag(); int vRNAID = tag % 10; boolean ready = false; sim_completed(e); if (((tag >= 109101) && (tag <= 109108))) { ready = MolecularCounter.checkReplicationProteinCount(vRNAID); } double concentrations = MolecularCounter.getSynthProteinConc(); switch (vRNAID) { case 1: vRNA1 = new Sim_negexp_obj("vRNA", (initiation + 47) * concentrations); time = vRNA1.sample(); break; case 2: vRNA2 = new Sim_negexp_obj("vRNA", (initiation + 47) * concentrations); time = vRNA2.sample(); break; case 3: vRNA3 = new Sim_negexp_obj("vRNA", (initiation + 47) * concentrations); time = vRNA3.sample(); break; case 4: vRNA4 = new Sim_negexp_obj("vRNA", (initiation + 45) * concentrations); time = vRNA4.sample(); break; case 5: vRNA5 = new Sim_negexp_obj("vRNA", (initiation + 38) * concentrations); time = vRNA5.sample(); break; case 6: vRNA6 = new Sim_negexp_obj("vRNA", (initiation + 28) * concentrations); time = vRNA6.sample(); break; case 7: vRNA7 = new Sim_negexp_obj("vRNA", (initiation + 21) * concentrations); time = vRNA7.sample(); break; case 8: vRNA8 = new Sim_negexp_obj("vRNA", (initiation + 18) * concentrations); time = vRNA8.sample(); //Segment 8 really code for protein 9 in this case break; } if (ready) { double p = prob.sample(); if (p < 0.06) { sim_schedule(to_cRNA_synthesis, time, 113100 + vRNAID); } else { sim_schedule(to_mRNA_transcription, time, 113300 + vRNAID); } MolecularCounter.AddvRNACount(vRNAID, poly[vRNAID - 1], Sim_system.sim_clock()); System.out.println("vRNA replication: " + tag + " at " + Sim_system.sim_clock()); //sim_trace(1, "vRNA synthesis: " + e.get_src() + " from " + tag + " at " + Sim_system.sim_clock()); vRNParrived.add(vRNAID); } if (tag == 112207) { int length = vRNParrived.size(); if (length > 0) { double temp = prob.sample() * length; int index = (int) Math.ceil(temp - 1); vRNAID = (Integer) vRNParrived.get(index); vRNA7 = new Sim_negexp_obj("vRNA", (initiation + 21) * concentrations); time = vRNA7.sample(); sim_schedule(to_RNP_assembly, time, 113200 + vRNAID); vRNParrived.remove(index); } } sim_schedule(to_vRNA_replication, time, e.get_tag()); } } } } class RNP_assembly extends Sim_entity { private Sim_port in; private Sim_port to_RNP_nuclear_export; private Sim_negexp_obj proc; private Sim_random_obj prob; RNP_assembly(String name) { super(name); in = new Sim_port("In"); add_port(in); to_RNP_nuclear_export = new Sim_port("to_RNP_nuclear_export"); add_port(to_RNP_nuclear_export); proc = new Sim_negexp_obj("Processing", 10.0); add_generator(proc); prob = new Sim_random_obj("Probability"); add_generator(prob); } @Override public void body() { while (Sim_system.running()) { Sim_event e = new Sim_event(); int[] tags = {113201, 113202, 113203, 113204, 113205, 113206, 113207, 113208}; sim_get_next(new Sim_type_p(tags), e); if (e.get_tag() != -1) { sim_completed(e); int tag = e.get_tag(); int vRNPID = tag % 10; System.out.println("RNP assmebly: " + tag + " at " + Sim_system.sim_clock()); sim_schedule(to_RNP_nuclear_export, proc.sample(), 114100 + vRNPID); MolecularCounter.AddvRNPCount(vRNPID, 1, Sim_system.sim_clock()); //sim_trace(1, "RNP assembly: " + e.get_src() + " from " + tag + " at " + Sim_system.sim_clock()); } } } } class RNP_nuclear_export extends Sim_entity { private Sim_port in; private Sim_port to_Virion_assembly; private Sim_negexp_obj proc; private Sim_random_obj prob; private Sim_stat stat; RNP_nuclear_export(String name) { super(name); in = new Sim_port("In"); add_port(in); to_Virion_assembly = new Sim_port("to_Virion_assembly"); add_port(to_Virion_assembly); proc = new Sim_negexp_obj("Processing", 600.0); add_generator(proc); prob = new Sim_random_obj("Probability"); add_generator(prob); } @Override public void body() { while (Sim_system.running()) { Sim_event e = new Sim_event(); int[] tags = { //111101, 111102, 111103, 111104, 111105, 111106, 111107, 111108, 112307, 112310, //M1, NS2/NEP 114101, 114102, 114103, 114104, 114105, 114106, 114107, 114108 }; sim_get_next(new Sim_type_p(tags), e); if (e.get_tag() != -1) { sim_completed(e); if ((e.get_tag() >= 114101) && (e.get_tag() <= 114108)) { int tag = e.get_tag(); int vRNPID = tag % 10; sim_schedule(to_Virion_assembly, proc.sample(), (115100 + vRNPID)); System.out.println("RNP assmebly: " + tag + " at " + Sim_system.sim_clock()); //sim_trace(1, "RNP export: " + e.get_src() + " from " + tag + " at " + Sim_system.sim_clock()); } } } } } class M1_translocation extends Sim_entity { private Sim_port in; private Sim_port to_Virion_assembly; private Sim_negexp_obj proc; private Sim_random_obj prob; M1_translocation(String name) { super(name); in = new Sim_port("In"); add_port(in); to_Virion_assembly = new Sim_port("to_Virion_assembly"); add_port(to_Virion_assembly); proc = new Sim_negexp_obj("Processing", 1.0); add_generator(proc); prob = new Sim_random_obj("Probability"); add_generator(prob); } @Override public void body() { while (Sim_system.running()) { Sim_event e = new Sim_event(); int[] tags = {111207}; sim_get_next(new Sim_type_p(tags), e); if (e.get_tag() != -1) { sim_completed(e); sim_schedule(to_Virion_assembly, proc.sample(), 116107); //sim_trace(1, "M1 translocation: " + e.get_src() + " from " + e.get_tag() + " at " + Sim_system.sim_clock()); } } } } class Protein_secretion extends Sim_entity { private Sim_port in; private Sim_port to_Virion_assembly; private Sim_negexp_obj proc; private Sim_random_obj prob; Protein_secretion(String name) { super(name); in = new Sim_port("In"); add_port(in); to_Virion_assembly = new Sim_port("to_Virion_assembly"); add_port(to_Virion_assembly); proc = new Sim_negexp_obj("Processing", 1.0); add_generator(proc); prob = new Sim_random_obj("Probability"); add_generator(prob); } @Override public void body() { while (Sim_system.running()) { Sim_event e = new Sim_event(); int[] tags = {111104, 111106, 111108}; sim_get_next(new Sim_type_p(tags), e); if (e.get_tag() != -1) { sim_completed(e); switch (e.get_tag()) { case 111104: sim_schedule(to_Virion_assembly, proc.sample(), 117104); break; case 111106: sim_schedule(to_Virion_assembly, proc.sample(), 117106); break; case 111108: sim_schedule(to_Virion_assembly, proc.sample(), 117108); break; } //sim_trace(1, "Protein secretion: " + e.get_src() + " from " + e.get_tag() + " at " + Sim_system.sim_clock()); } } } } class Virion_assembly extends Sim_entity { private Sim_port in; private Sim_port to_Virion_release; private Sim_negexp_obj proc; private Sim_random_obj prob; Virion_assembly(String name) { super(name); in = new Sim_port("In"); add_port(in); to_Virion_release = new Sim_port("to_Virion_release"); add_port(to_Virion_release); proc = new Sim_negexp_obj("proc", 10.0); add_generator(proc); prob = new Sim_random_obj("Probability"); add_generator(prob); } @Override public void body() { boolean ready = false; while (Sim_system.running()) { Sim_event e = new Sim_event(); int[] tags = { 115101, 115102, 115103, 115104, 115105, 115106, 115107, 115108, 116107, 117104, 117106, 117108 }; sim_get_next(new Sim_type_p(tags), e); sim_completed(e); if ((e.get_tag() >= 115101) && (e.get_tag() <= 115108)) { ready = MolecularCounter.checkVirionvRNPCount(); if (ready) { ready = MolecularCounter.checkVirionProteinCount(); } if (ready) { sim_schedule(to_Virion_release, proc.sample(), 118101); //sim_trace(1, "Virion Assembly: " + e.get_tag() + " at " + Sim_system.sim_clock()); System.out.println("Virion Assembly: " + e.get_tag() + " at " + Sim_system.sim_clock()); } } } } } class Virion_release extends Sim_entity { private Sim_port in; private Sim_negexp_obj proc; private Sim_random_obj prob; Virion_release(String name) { super(name); in = new Sim_port("In"); add_port(in); proc = new Sim_negexp_obj("Processing", 1.0); add_generator(proc); prob = new Sim_random_obj("Probability"); add_generator(prob); } @Override public void body() { while (Sim_system.running()) { Sim_event e = new Sim_event(); int[] tags = {118101}; sim_get_next(new Sim_type_p(tags), e); if (e.get_tag() != -1) { sim_completed(e); MolecularCounter.AddVirionCount(1, Sim_system.sim_clock()); //sim_trace(1, "Virion release: " + e.get_src() + " from " + tag + " at " + Sim_system.sim_clock()); } } } }
/******************************************************************************* * Copyright (c) Intel Corporation * Copyright (c) 2017 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package org.osc.core.broker.rest.server.api; import java.util.List; import java.util.Set; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.Context; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.osc.core.broker.rest.server.ApiUtil; import org.osc.core.broker.rest.server.OscAuthFilter; import org.osc.core.broker.rest.server.ServerRestConstants; import org.osc.core.broker.rest.server.annotations.OscAuth; import org.osc.core.broker.service.api.AddSecurityGroupServiceApi; import org.osc.core.broker.service.api.AddServiceFunctionChainServiceApi; import org.osc.core.broker.service.api.AddVirtualizationConnectorServiceApi; import org.osc.core.broker.service.api.BindSecurityGroupServiceApi; import org.osc.core.broker.service.api.DeleteSecurityGroupServiceApi; import org.osc.core.broker.service.api.DeleteServiceFunctionChainServiceApi; import org.osc.core.broker.service.api.GetDtoFromEntityServiceApi; import org.osc.core.broker.service.api.GetDtoFromEntityServiceFactoryApi; import org.osc.core.broker.service.api.ListSecurityGroupBindingsBySgServiceApi; import org.osc.core.broker.service.api.ListSecurityGroupByVcServiceApi; import org.osc.core.broker.service.api.ListSecurityGroupMembersBySgServiceApi; import org.osc.core.broker.service.api.ListServiceFunctionChainByVcServiceApi; import org.osc.core.broker.service.api.ListVirtualizationConnectorServiceApi; import org.osc.core.broker.service.api.SyncSecurityGroupServiceApi; import org.osc.core.broker.service.api.UpdateSecurityGroupPropertiesServiceApi; import org.osc.core.broker.service.api.UpdateSecurityGroupServiceApi; import org.osc.core.broker.service.api.UpdateServiceFunctionChainServiceApi; import org.osc.core.broker.service.api.UpdateVirtualizationConnectorServiceApi; import org.osc.core.broker.service.api.server.UserContextApi; import org.osc.core.broker.service.api.vc.DeleteVirtualizationConnectorServiceApi; import org.osc.core.broker.service.dto.BaseDto; import org.osc.core.broker.service.dto.BaseVirtualSystemPoliciesDto; import org.osc.core.broker.service.dto.SecurityGroupDto; import org.osc.core.broker.service.dto.SecurityGroupMemberItemDto; import org.osc.core.broker.service.dto.ServiceFunctionChainDto; import org.osc.core.broker.service.dto.VirtualSystemPolicyBindingDto; import org.osc.core.broker.service.dto.VirtualizationConnectorDto; import org.osc.core.broker.service.exceptions.ErrorCodeDto; import org.osc.core.broker.service.request.AddOrUpdateSecurityGroupRequest; import org.osc.core.broker.service.request.AddOrUpdateServiceFunctionChainRequest; import org.osc.core.broker.service.request.BaseDeleteRequest; import org.osc.core.broker.service.request.BaseIdRequest; import org.osc.core.broker.service.request.BaseRequest; import org.osc.core.broker.service.request.BindSecurityGroupRequest; import org.osc.core.broker.service.request.DryRunRequest; import org.osc.core.broker.service.request.GetDtoFromEntityRequest; import org.osc.core.broker.service.request.UpdateSecurityGroupMemberRequest; import org.osc.core.broker.service.request.VirtualizationConnectorRequest; import org.osc.core.broker.service.response.BaseJobResponse; import org.osc.core.broker.service.response.BaseResponse; import org.osc.core.broker.service.response.BindSecurityGroupResponse; import org.osc.core.broker.service.response.ListResponse; import org.osc.core.broker.service.response.SetResponse; import org.osgi.service.component.annotations.Component; import org.osgi.service.component.annotations.Reference; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import io.swagger.annotations.ApiResponse; import io.swagger.annotations.ApiResponses; import io.swagger.annotations.Authorization; @Component(service = VirtualizationConnectorApis.class) @Api(tags = "Operations for Virtualization Connectors", authorizations = { @Authorization(value = "Basic Auth") }) @Path(ServerRestConstants.SERVER_API_PATH_PREFIX + "/virtualizationConnectors") @Consumes({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) @OscAuth public class VirtualizationConnectorApis { private static final Logger logger = LoggerFactory.getLogger(VirtualizationConnectorApis.class); @Reference private ApiUtil apiUtil; @Reference private UpdateVirtualizationConnectorServiceApi updateVirtualizationConnectorService; @Reference private AddVirtualizationConnectorServiceApi addVirtualizationConnectorService; @Reference private AddSecurityGroupServiceApi addSecurityGroupService; @Reference private UpdateSecurityGroupServiceApi updateSecurityGroupService; @Reference private UpdateSecurityGroupPropertiesServiceApi updateSecurityGroupPropertiesService; @Reference private DeleteSecurityGroupServiceApi deleteSecurityGroupService; @Reference private ListSecurityGroupBindingsBySgServiceApi listSecurityGroupBindingsBySgService; @Reference private BindSecurityGroupServiceApi bindSecurityGroupService; @Reference private ListSecurityGroupByVcServiceApi listSecurityGroupByVcService; @Reference private ListSecurityGroupMembersBySgServiceApi listSecurityGroupMembersBySgService; @Reference private SyncSecurityGroupServiceApi syncSecurityGroupService; @Reference private DeleteVirtualizationConnectorServiceApi deleteVirtualizationConnectorService; @Reference private ListVirtualizationConnectorServiceApi listVirtualizationConnectorService; @Reference private GetDtoFromEntityServiceFactoryApi getDtoFromEntityServiceFactory; @Reference private AddServiceFunctionChainServiceApi addServiceFunctionChainService; @Reference private ListServiceFunctionChainByVcServiceApi listServiceFunctionChainByVcService; @Reference DeleteServiceFunctionChainServiceApi deleteServiceFunctionChainService; @Reference UpdateServiceFunctionChainServiceApi updateServiceFunctionChainService; @Reference private UserContextApi userContext; @ApiOperation(value = "Lists All Virtualization Connectors", notes = "Password information is not returned as it is sensitive information", response = VirtualizationConnectorDto.class, responseContainer = "Set") @ApiResponses(value = { @ApiResponse(code = 200, message = "Successful operation"), @ApiResponse(code = 400, message = "In case of any error", response = ErrorCodeDto.class) }) @GET public List<VirtualizationConnectorDto> getVirtualizationConnectors(@Context HttpHeaders headers) { logger.info("Listing Virtualization Connectors"); this.userContext.setUser(OscAuthFilter.getUsername(headers)); @SuppressWarnings("unchecked") ListResponse<VirtualizationConnectorDto> response = (ListResponse<VirtualizationConnectorDto>) this.apiUtil .getListResponse(this.listVirtualizationConnectorService, new BaseRequest<>(true)); return response.getList(); } @ApiOperation(value = "Retrieves the Virtualization Connector by Id", notes = "Password information is not returned as it is sensitive information", response = VirtualizationConnectorDto.class) @ApiResponses(value = { @ApiResponse(code = 200, message = "Successful operation"), @ApiResponse(code = 400, message = "In case of any error", response = ErrorCodeDto.class) }) @Path("/{vcId}") @GET public VirtualizationConnectorDto getVirtualizationConnector(@Context HttpHeaders headers, @ApiParam(value = "The Virtualization Connector Id") @PathParam("vcId") Long vcId) { logger.info("getting Virtualization Connector " + vcId); this.userContext.setUser(OscAuthFilter.getUsername(headers)); GetDtoFromEntityRequest getDtoRequest = new GetDtoFromEntityRequest(); getDtoRequest.setEntityId(vcId); getDtoRequest.setEntityName("VirtualizationConnector"); return this.apiUtil .submitBaseRequestToService(this.getDtoFromEntityServiceFactory.getService(VirtualizationConnectorDto.class), getDtoRequest) .getDto(); } /** * Creates a Virtualization connector * * @param vcRequest * @return */ // Virtualization Connector APIS @ApiOperation(value = "Creates a Virtualization Connector", notes = "Creates a Virtualization Connector<br/>" + "If we are unable to connect to the endpoint using the credentials provided, this call will fail.<br/>" + "To skip validation of IP and credentials 'skipRemoteValidation' flag can be used.", response = BaseResponse.class) @ApiResponses(value = { @ApiResponse(code = 200, message = "Successful operation"), @ApiResponse(code = 400, message = "In case of any error", response = ErrorCodeDto.class) }) @POST public Response createVirtualizationConnector(@Context HttpHeaders headers, @ApiParam(required = true) VirtualizationConnectorRequest vcRequest) { logger.info("Creating Virtualization Connector..."); this.userContext.setUser(OscAuthFilter.getUsername(headers)); return this.apiUtil.getResponseForBaseRequest(this.addVirtualizationConnectorService, new DryRunRequest<>(vcRequest, vcRequest.isSkipRemoteValidation())); } /** * Updates a Virtualization connector * * @return the Id of the updated virtualization connector */ @ApiOperation(value = "Updates a Virtualization Connector.", notes = "Updates a Virtualization Connector.<br/>" + "If we are unable to connect to the endpoint using the credentials provided, this call will fail.<br/>" + "To skip validation of IP and credentials 'skipRemoteValidation' flag can be used.<br/>" + "Controller type changes from NONE->new-type is allowed unconditionally. " + "For all other cases (current-type->NONE, current-type->new-type), there should not be any" + "virtual systems using it.<br/> Password information is Optional for update requests as OSC will use " + "the current password information.", response = BaseResponse.class) @ApiResponses(value = { @ApiResponse(code = 200, message = "Successful operation"), @ApiResponse(code = 400, message = "In case of any error", response = ErrorCodeDto.class) }) @Path("/{vcId}") @PUT public Response updateVirtualizationConnector(@Context HttpHeaders headers, @ApiParam(value = "The Virtualization Connector Id") @PathParam("vcId") Long vcId, @ApiParam(required = true) VirtualizationConnectorRequest vcRequest) { logger.info("Updating Virtualization Connector " + vcId); this.userContext.setUser(OscAuthFilter.getUsername(headers)); this.apiUtil.setIdOrThrow(vcRequest, vcId, "Virtualization Connector"); return this.apiUtil.getResponseForBaseRequest(this.updateVirtualizationConnectorService, new DryRunRequest<>(vcRequest, vcRequest.isSkipRemoteValidation())); } /** * Delete a Virtualization connector * * @param vcId * @return */ @ApiOperation(value = "Deletes a Virtualization Connector", notes = "Deletes a Virtualization Connector if not referenced by any Virtual Systems") @ApiResponses(value = { @ApiResponse(code = 200, message = "Successful operation"), @ApiResponse(code = 400, message = "In case of any error", response = ErrorCodeDto.class) }) @Path("/{vcId}") @DELETE public Response deleteVirtualizationConnector(@Context HttpHeaders headers, @ApiParam(value = "The Virtualization Connector Id") @PathParam("vcId") Long vcId) { logger.info("Deleting Virtualization Connector " + vcId); this.userContext.setUser(OscAuthFilter.getUsername(headers)); return this.apiUtil.getResponseForBaseRequest(this.deleteVirtualizationConnectorService, new BaseIdRequest(vcId)); } // Security Group APIs @ApiOperation(value = "Lists Security Groups", notes = "Lists Security Groups owned by the Virtualization Connector", response = SecurityGroupDto.class, responseContainer = "Set") @ApiResponses(value = { @ApiResponse(code = 200, message = "Successful operation"), @ApiResponse(code = 400, message = "In case of any error", response = ErrorCodeDto.class) }) @Path("/{vcId}/securityGroups") @GET public List<SecurityGroupDto> getSecurityGroupByVirtualiazationConnector(@Context HttpHeaders headers, @ApiParam(value = "The Virtualization Connector Id") @PathParam("vcId") Long vcId) { logger.info("Listing Security groups"); this.userContext.setUser(OscAuthFilter.getUsername(headers)); @SuppressWarnings("unchecked") ListResponse<SecurityGroupDto> response = (ListResponse<SecurityGroupDto>) this.apiUtil .getListResponse(this.listSecurityGroupByVcService, new BaseIdRequest(vcId)); return response.getList(); } @ApiOperation(value = "Retrieves a Security Group", notes = "Retrieves the Security Group owned by Virtualization Connector provided and by the specified Security Group Id", response = SecurityGroupDto.class) @ApiResponses(value = { @ApiResponse(code = 200, message = "Successful operation"), @ApiResponse(code = 400, message = "In case of any error", response = ErrorCodeDto.class) }) @Path("/{vcId}/securityGroups/{sgId}") @GET public SecurityGroupDto getSecurityGroup(@Context HttpHeaders headers, @ApiParam(value = "The Virtualization Connector Id") @PathParam("vcId") Long vcId, @ApiParam(value = "The Security Group Id") @PathParam("sgId") Long sgId) { logger.info("getting Security Group " + sgId); this.userContext.setUser(OscAuthFilter.getUsername(headers)); GetDtoFromEntityRequest getDtoRequest = new GetDtoFromEntityRequest(); getDtoRequest.setEntityId(sgId); getDtoRequest.setEntityName("SecurityGroup"); GetDtoFromEntityServiceApi<SecurityGroupDto> getDtoService = this.getDtoFromEntityServiceFactory.getService(SecurityGroupDto.class); SecurityGroupDto dto = this.apiUtil.submitBaseRequestToService(getDtoService, getDtoRequest).getDto(); this.apiUtil.validateParentIdMatches(dto, vcId, "SecurityGroup"); return dto; } @ApiOperation(value = "Creates a Security Group", notes = "Creates a Security Group owned by Virtualization Connector provided and kicks off a " + "sync job", response = BaseJobResponse.class) @ApiResponses(value = { @ApiResponse(code = 200, message = "Successful operation"), @ApiResponse(code = 400, message = "In case of any error", response = ErrorCodeDto.class) }) @Path("/{vcId}/securityGroups") @POST public Response createSecurityGroup(@Context HttpHeaders headers, @ApiParam(value = "The Virtualization Connector Id") @PathParam("vcId") Long vcId, @ApiParam(required = true) SecurityGroupDto sgDto) { logger.info("Creating Security Group ..."); this.userContext.setUser(OscAuthFilter.getUsername(headers)); this.apiUtil.setIdAndParentIdOrThrow(sgDto, null, vcId, "Security Group"); AddOrUpdateSecurityGroupRequest request = new AddOrUpdateSecurityGroupRequest(); request.setDto(sgDto); return this.apiUtil.getResponseForBaseRequest(this.addSecurityGroupService, request); } @ApiOperation(value = "Updates a Security Group", notes = "Updates the Security Group owned by Virtualization Connector provided and kicks off a sync job", response = BaseJobResponse.class) @ApiResponses(value = { @ApiResponse(code = 200, message = "Successful operation"), @ApiResponse(code = 400, message = "In case of any error", response = ErrorCodeDto.class) }) @Path("/{vcId}/securityGroups/{sgId}") @PUT public Response updateSecurityGroup(@Context HttpHeaders headers, @ApiParam(value = "The Virtualization Connector Id") @PathParam("vcId") Long vcId, @ApiParam(value = "The Security Group Id") @PathParam("sgId") Long sgId, @ApiParam(required = true) SecurityGroupDto sgDto) { logger.info("Updating Security Group " + sgId); this.userContext.setUser(OscAuthFilter.getUsername(headers)); this.apiUtil.setIdAndParentIdOrThrow(sgDto, sgId, vcId, "Security Group"); AddOrUpdateSecurityGroupRequest request = new AddOrUpdateSecurityGroupRequest(); request.setDto(sgDto); return this.apiUtil.getResponseForBaseRequest(this.updateSecurityGroupPropertiesService, request); } @ApiOperation(value = "Deletes a Security Group", notes = "Deletes the Security Group owned by Virtualization Connector provided and kicks off a sync job", response = BaseJobResponse.class) @ApiResponses(value = { @ApiResponse(code = 200, message = "Successful operation"), @ApiResponse(code = 400, message = "In case of any error", response = ErrorCodeDto.class) }) @Path("/{vcId}/securityGroups/{sgId}") @DELETE public Response deleteSecurityGroup(@Context HttpHeaders headers, @ApiParam(value = "The Virtualization Connector Id") @PathParam("vcId") Long vcId, @ApiParam(value = "The Security Group Id") @PathParam("sgId") Long sgId) { logger.info("Deleting Security Group.. " + sgId); this.userContext.setUser(OscAuthFilter.getUsername(headers)); return this.apiUtil.getResponseForBaseRequest(this.deleteSecurityGroupService, new BaseDeleteRequest(sgId, vcId, false)); // false as this is not force delete } @ApiOperation(value = "Force Delete a Security Group", notes = "Force Deletes a Security Group owned by Virtualization Connector provided and kicks off a sync job.<br/>" + "Warning: Force delete just deletes the entity from OSC, please make sure to clean the related entities outside of OSC.", response = BaseJobResponse.class) @ApiResponses(value = { @ApiResponse(code = 200, message = "Successful operation"), @ApiResponse(code = 400, message = "In case of any error", response = ErrorCodeDto.class) }) @Path("/{vcId}/securityGroups/{sgId}/force") @DELETE public Response forceDeleteSecurityGroup(@Context HttpHeaders headers, @ApiParam(value = "The Virtualization Connector Id") @PathParam("vcId") Long vcId, @ApiParam(value = "The Security Group Id") @PathParam("sgId") Long sgId) { logger.info("Deleting Security Group.. " + sgId); this.userContext.setUser(OscAuthFilter.getUsername(headers)); return this.apiUtil.getResponseForBaseRequest(this.deleteSecurityGroupService, new BaseDeleteRequest(sgId, vcId, true)); } @ApiOperation(value = "Sync a Security Group", notes = "Sync a Security Group Object", response = BaseJobResponse.class) @ApiResponses(value = { @ApiResponse(code = 200, message = "Successful operation"), @ApiResponse(code = 400, message = "In case of any error", response = ErrorCodeDto.class) }) @Path("/{vcId}/securityGroups/{sgId}/sync") @PUT public Response syncSecurityGroup(@Context HttpHeaders headers, @ApiParam(value = "The Virtualization Connector Id") @PathParam("vcId") Long vcId, @ApiParam(value = "The Security Group Id") @PathParam("sgId") Long sgId) { logger.info("Sync Security Group" + sgId); this.userContext.setUser(OscAuthFilter.getUsername(headers)); return this.apiUtil.getResponseForBaseRequest(this.syncSecurityGroupService, new BaseIdRequest(sgId, vcId)); } // Security Group member APIs @ApiOperation(value = "Lists Security Group Members", notes = "Lists Security Group Member owned by Security Group and Virtualization Connector provided.", response = SecurityGroupMemberItemDto.class, responseContainer = "Set") @ApiResponses(value = { @ApiResponse(code = 200, message = "Successful operation"), @ApiResponse(code = 400, message = "In case of any error", response = ErrorCodeDto.class) }) @Path("/{vcId}/securityGroups/{sgId}/members") @GET public Set<SecurityGroupMemberItemDto> getSecurityGroupMembers(@Context HttpHeaders headers, @ApiParam(value = "The Virtualization Connector Id") @PathParam("vcId") Long vcId, @ApiParam(value = "The Security Group Id") @PathParam("sgId") Long sgId) { logger.info("Listing Members for Security Group - " + sgId); this.userContext.setUser(OscAuthFilter.getUsername(headers)); GetDtoFromEntityRequest getDtoRequest = new GetDtoFromEntityRequest(); getDtoRequest.setEntityId(sgId); getDtoRequest.setEntityName("SecurityGroup"); GetDtoFromEntityServiceApi<SecurityGroupDto> getDtoService = this.getDtoFromEntityServiceFactory.getService(SecurityGroupDto.class); SecurityGroupDto dto = this.apiUtil.submitBaseRequestToService(getDtoService, getDtoRequest).getDto(); this.apiUtil.validateParentIdMatches(dto, vcId, "SecurityGroup"); @SuppressWarnings("unchecked") SetResponse<SecurityGroupMemberItemDto> memberList = (SetResponse<SecurityGroupMemberItemDto>) this.apiUtil .getSetResponse(this.listSecurityGroupMembersBySgService, new BaseIdRequest(sgId)); return memberList.getSet(); } @ApiOperation(value = "Updates the Security Group Members", notes = "Updates the member list in a Security Group.", response = BaseJobResponse.class) @ApiResponses(value = { @ApiResponse(code = 200, message = "Successful operation"), @ApiResponse(code = 400, message = "In case of any error", response = ErrorCodeDto.class) }) @Path("/{vcId}/securityGroups/{sgId}/members") @PUT public Response updateSecurityGroupMembers(@Context HttpHeaders headers, @ApiParam(value = "The Virtualization Connector Id") @PathParam("vcId") Long vcId, @ApiParam(value = "The Security Group Id") @PathParam("sgId") Long sgId, @ApiParam(required = true) UpdateSecurityGroupMemberRequest sgUpdateRequest) { logger.info("Updating Security Group " + sgId); this.userContext.setUser(OscAuthFilter.getUsername(headers)); if (!sgId.equals(sgUpdateRequest.getId())) { throw this.apiUtil.createIdMismatchException(sgUpdateRequest.getId(), "Security Group"); } else if (!vcId.equals(sgUpdateRequest.getParentId())) { throw this.apiUtil.createParentChildMismatchException(sgUpdateRequest.getParentId(), "Security Group"); } AddOrUpdateSecurityGroupRequest request = new AddOrUpdateSecurityGroupRequest(); request.setMembers(sgUpdateRequest.getMembers()); request.setDto(new SecurityGroupDto()); request.getDto().setId(sgId); request.getDto().setParentId(vcId); return this.apiUtil.getResponseForBaseRequest(this.updateSecurityGroupService, request); } // SG Interface APIS @ApiOperation(value = "Retrieves the Security Group Bindings", notes = "Retrieves the all available Security Group Bindings to Security Function Service(Distributed Appliance).<br/>" + "The binded flag indicates whether the binding is active.", response = VirtualSystemPolicyBindingDto.class, responseContainer = "Set") @ApiResponses(value = { @ApiResponse(code = 200, message = "Successful operation"), @ApiResponse(code = 400, message = "In case of any error", response = ErrorCodeDto.class) }) @Path("/{vcId}/securityGroups/{sgId}/bindings") @GET public BindSecurityGroupResponse getVirtualSecurityPolicyBindings(@Context HttpHeaders headers, @ApiParam(value = "The Virtualization Connector Id") @PathParam("vcId") Long vcId, @ApiParam(value = "The Security Group Id") @PathParam("sgId") Long sgId) { logger.info("Listing Bindings for Security Group - " + sgId); this.userContext.setUser(OscAuthFilter.getUsername(headers)); GetDtoFromEntityRequest getDtoRequest = new GetDtoFromEntityRequest(); getDtoRequest.setEntityId(sgId); getDtoRequest.setEntityName("SecurityGroup"); SecurityGroupDto dto = this.apiUtil .submitBaseRequestToService(this.getDtoFromEntityServiceFactory.getService(SecurityGroupDto.class), getDtoRequest).getDto(); this.apiUtil.validateParentIdMatches(dto, vcId, "SecurityGroup"); return this.apiUtil.submitBaseRequestToService(this.listSecurityGroupBindingsBySgService, new BaseIdRequest(sgId)); } @ApiOperation(value = "Set Security Group Bindings", notes = "Adds/Update/Remove Security Group Bindings to Security Function Services.<br/>" + "To Remove all services, pass in empty json.<br/>" + "To update services binded to, pass in the updated list of services.<br/>", response = BaseJobResponse.class) @ApiResponses(value = { @ApiResponse(code = 200, message = "Successful operation"), @ApiResponse(code = 400, message = "In case of any error", response = ErrorCodeDto.class) }) @Path("/{vcId}/securityGroups/{sgId}/bindings") @PUT public Response updateVirtualSecurityPolicyBindings(@Context HttpHeaders headers, @ApiParam(value = "The Virtualization Connector Id") @PathParam("vcId") Long vcId, @ApiParam(value = "The Security Group Id") @PathParam("sgId") Long sgId, @ApiParam(value = "List of Bindings", required = true) Set<VirtualSystemPolicyBindingDto> bindings) { logger.info("Update Bindings for Security Group - " + sgId); this.userContext.setUser(OscAuthFilter.getUsername(headers)); BindSecurityGroupRequest bindRequest = new BindSecurityGroupRequest(); bindRequest.setVcId(vcId); bindRequest.setSecurityGroupId(sgId); bindRequest.setBindSfc(false); for (VirtualSystemPolicyBindingDto vsBinding : bindings) { bindRequest.addServiceToBindTo(vsBinding); } return this.apiUtil.getResponseForBaseRequest(this.bindSecurityGroupService, bindRequest); } @ApiOperation(value = "Set Security Group Bindings with Service Function Chain (Openstack Only)", notes = "Adds/Updates Security Group Bindings to Security Function Services.<br/>" + "To Remove all policies, pass in empty json.<br/>" + "To update services binded to, pass in the updated list of services.<br/>", response = BaseJobResponse.class) @ApiResponses(value = { @ApiResponse(code = 200, message = "Successful operation"), @ApiResponse(code = 400, message = "In case of any error", response = ErrorCodeDto.class) }) @Path("/{vcId}/securityGroups/{sgId}/sfc/{sfcId}/bindings") @PUT public Response updateVirtualSecurityPolicyBindingsWithSfc(@Context HttpHeaders headers, @ApiParam(value = "The Virtualization Connector Id") @PathParam("vcId") Long vcId, @ApiParam(value = "The Security Group Id") @PathParam("sgId") Long sgId, @ApiParam(value = "Service Function Chain Id, required = false") @PathParam("sfcId") Long sfcId, @ApiParam(value = "List of Policies", required = true) Set<BaseVirtualSystemPoliciesDto> policies) { logger.info("Update Binding SFC and Policies for Security Group - " + sgId); this.userContext.setUser(OscAuthFilter.getUsername(headers)); BindSecurityGroupRequest bindRequest = new BindSecurityGroupRequest(); bindRequest.setVcId(vcId); bindRequest.setSecurityGroupId(sgId); bindRequest.setSfcId(sfcId); bindRequest.setBindSfc(true); // stub SFC virtual system polices into VirtualSystemPolicyBindingDto for(BaseVirtualSystemPoliciesDto policy : policies) { VirtualSystemPolicyBindingDto bindDto = new VirtualSystemPolicyBindingDto(policy.getVirtualSystemId(), policy.getName(), policy.getPolicyIds(), policy.getPolicies()); bindRequest.addServiceToBindTo(bindDto); } return this.apiUtil.getResponseForBaseRequest(this.bindSecurityGroupService, bindRequest); } @ApiOperation(value = "Deletes a Service Function Chain binding with Security Group", notes = "Unbind a Serice Function Chain from a Given Security group and Virtualization Connector") @ApiResponses(value = { @ApiResponse(code = 200, message = "Successful operation"), @ApiResponse(code = 400, message = "In case of any error", response = ErrorCodeDto.class) }) @Path("/{vcId}/securityGroups/{sgId}/sfc") @DELETE public Response unbindSecurityGroupWithServiceFunctionChain(@Context HttpHeaders headers, @ApiParam(value = "The Virtualization Connector Id") @PathParam("vcId") Long vcId, @ApiParam(value = "Security Group Id") @PathParam("sgId") Long sgId) { logger.info("Unbind Security Group Id " + sgId + "with Service Function Chain "); this.userContext.setUser(OscAuthFilter.getUsername(headers)); BindSecurityGroupRequest bindRequest = new BindSecurityGroupRequest(); bindRequest.setVcId(vcId); bindRequest.setSecurityGroupId(sgId); bindRequest.setBindSfc(true); return this.apiUtil.getResponseForBaseRequest(this.bindSecurityGroupService, bindRequest); } // Service Function Chain APIs @ApiOperation(value = "Creates a Service Function Chain", notes = "Creates a Service Fucntion Chain owned by Virtualization Connector provided and kicks off a " + "sync job", response = BaseJobResponse.class) @ApiResponses(value = { @ApiResponse(code = 200, message = "Successful operation"), @ApiResponse(code = 400, message = "In case of any error", response = ErrorCodeDto.class) }) @Path("/{vcId}/serviceFunctionChain") @POST public Response createServiceFunctionChain(@Context HttpHeaders headers, @ApiParam(value = "The Virtualization Connector Id") @PathParam("vcId") Long vcId, @ApiParam(required = true) AddOrUpdateServiceFunctionChainRequest sfcAddRequest) { logger.info("Creating Service Function Chain ..."); this.userContext.setUser(OscAuthFilter.getUsername(headers)); if (sfcAddRequest.getDto() == null) { sfcAddRequest.setDto(new BaseDto()); } this.apiUtil.setIdAndParentIdOrThrow(sfcAddRequest.getDto(), null, vcId, "Service Function Chain"); return this.apiUtil.getResponseForBaseRequest(this.addServiceFunctionChainService, sfcAddRequest); } @ApiOperation(value = "Lists Service Function Chains", notes = "Lists Service Function Chains owned by the Virtualization Connector", response = ServiceFunctionChainDto.class, responseContainer = "Set") @ApiResponses(value = { @ApiResponse(code = 200, message = "Successful operation"), @ApiResponse(code = 400, message = "In case of any error", response = ErrorCodeDto.class) }) @Path("/{vcId}/serviceFunctionChain") @GET public List<ServiceFunctionChainDto> getServiceFunctionChainByVirtualiazationConnector(@Context HttpHeaders headers, @ApiParam(value = "The Virtualization Connector Id") @PathParam("vcId") Long vcId) { logger.info("Listing Service Function Chains"); this.userContext.setUser(OscAuthFilter.getUsername(headers)); @SuppressWarnings("unchecked") ListResponse<ServiceFunctionChainDto> response = (ListResponse<ServiceFunctionChainDto>) this.apiUtil .getListResponse(this.listServiceFunctionChainByVcService, new BaseIdRequest(vcId)); return response.getList(); } @ApiOperation(value = "Retrieves a Service Function Chain", notes = "Retrieves the Service Function Chain owned by Virtualization Connector provided and by the specified Service Function Chain Id", response = ServiceFunctionChainDto.class) @ApiResponses(value = { @ApiResponse(code = 200, message = "Successful operation"), @ApiResponse(code = 400, message = "In case of any error", response = ErrorCodeDto.class) }) @Path("/{vcId}/serviceFunctionChain/{sfcId}") @GET public ServiceFunctionChainDto getServiceFunctionChain(@Context HttpHeaders headers, @ApiParam(value = "The Virtualization Connector Id") @PathParam("vcId") Long vcId, @ApiParam(value = "The Service Function Chain Id") @PathParam("sfcId") Long sfcId) { logger.info("getting Service Function Chain " + sfcId); this.userContext.setUser(OscAuthFilter.getUsername(headers)); GetDtoFromEntityRequest getDtoRequest = new GetDtoFromEntityRequest(); getDtoRequest.setEntityId(sfcId); getDtoRequest.setEntityName("ServiceFunctionChain"); GetDtoFromEntityServiceApi<ServiceFunctionChainDto> getDtoService = this.getDtoFromEntityServiceFactory.getService(ServiceFunctionChainDto.class); ServiceFunctionChainDto dto = this.apiUtil.submitBaseRequestToService(getDtoService, getDtoRequest).getDto(); this.apiUtil.validateParentIdMatches(dto, vcId, "ServiceFunctionChain"); return dto; } @ApiOperation(value = "Deletes a Service Function Chain", notes = "Delete a Service Function Chain owned by a Virutlaization Connector") @ApiResponses(value = { @ApiResponse(code = 200, message = "Successful operation"), @ApiResponse(code = 400, message = "In case of any error", response = ErrorCodeDto.class) }) @Path("/{vcId}/serviceFunctionChain/{sfcId}") @DELETE public Response deleteServiceFunctionChain(@Context HttpHeaders headers, @ApiParam(value = "The Virtualization Connector Id") @PathParam("vcId") Long vcId, @ApiParam(value = "The Service Function Chain Id") @PathParam("sfcId") Long sfcId) { logger.info("Deleting Service Function Chain " + sfcId); this.userContext.setUser(OscAuthFilter.getUsername(headers)); return this.apiUtil.getResponseForBaseRequest(this.deleteServiceFunctionChainService, new BaseIdRequest(sfcId, vcId)); } @ApiOperation(value = "Update a Service Function Chain", notes = "Update a Service Fucntion Chain owned by Virtualization Connector provided and kicks off a " + "sync job", response = BaseJobResponse.class) @ApiResponses(value = { @ApiResponse(code = 200, message = "Successful operation"), @ApiResponse(code = 400, message = "In case of any error", response = ErrorCodeDto.class) }) @Path("/{vcId}/serviceFunctionChain/{sfcId}") @PUT public Response updateServiceFunctionChain(@Context HttpHeaders headers, @ApiParam(value = "The Virtualization Connector Id") @PathParam("vcId") Long vcId, @ApiParam(value = "The Service Function Chain Id") @PathParam("sfcId") Long sfcId, @ApiParam(required = true) AddOrUpdateServiceFunctionChainRequest sfcUpdateRequest) { logger.info("Update Service Function Chain ..."); this.userContext.setUser(OscAuthFilter.getUsername(headers)); if (sfcUpdateRequest.getDto() == null) { sfcUpdateRequest.setDto(new BaseDto()); } this.apiUtil.setIdAndParentIdOrThrow(sfcUpdateRequest.getDto(), sfcId, vcId, "Service Function Chain"); return this.apiUtil.getResponseForBaseRequest(this.updateServiceFunctionChainService, sfcUpdateRequest); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zeppelin.interpreter.launcher; import org.junit.Test; import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.Properties; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class K8sRemoteInterpreterProcessTest { @Test public void testGetHostPort() { // given Kubectl kubectl = mock(Kubectl.class); when(kubectl.getNamespace()).thenReturn("default"); Properties properties = new Properties(); HashMap<String, String> envs = new HashMap<String, String>(); K8sRemoteInterpreterProcess intp = new K8sRemoteInterpreterProcess( kubectl, new File(".skip"), "interpreter-container:1.0", "shared_process", "sh", "shell", properties, envs, "zeppelin.server.hostname", "12320", false, "spark-container:1.0", 10, false); // then assertEquals(String.format("%s.%s.svc", intp.getPodName(), kubectl.getNamespace()), intp.getHost()); assertEquals(12321, intp.getPort()); } @Test public void testPredefinedPortNumbers() { // given Kubectl kubectl = mock(Kubectl.class); when(kubectl.getNamespace()).thenReturn("default"); Properties properties = new Properties(); HashMap<String, String> envs = new HashMap<String, String>(); K8sRemoteInterpreterProcess intp = new K8sRemoteInterpreterProcess( kubectl, new File(".skip"), "interpreter-container:1.0", "shared_process", "sh", "shell", properties, envs, "zeppelin.server.hostname", "12320", false, "spark-container:1.0", 10, false); // following values are hardcoded in k8s/interpreter/100-interpreter.yaml. // when change those values, update the yaml file as well. assertEquals(12321, intp.getPort()); assertEquals(22321, intp.getSparkDriverPort()); assertEquals(22322, intp.getSparkBlockmanagerPort()); } @Test public void testGetTemplateBindings() throws IOException { // given Kubectl kubectl = mock(Kubectl.class); when(kubectl.getNamespace()).thenReturn("default"); Properties properties = new Properties(); properties.put("my.key1", "v1"); HashMap<String, String> envs = new HashMap<String, String>(); envs.put("MY_ENV1", "V1"); K8sRemoteInterpreterProcess intp = new K8sRemoteInterpreterProcess( kubectl, new File(".skip"), "interpreter-container:1.0", "shared_process", "sh", "shell", properties, envs, "zeppelin.server.service", "12320", false, "spark-container:1.0", 10, false); // when Properties p = intp.getTemplateBindings(); // then assertEquals("default", p.get("zeppelin.k8s.namespace")); assertEquals(intp.getPodName(), p.get("zeppelin.k8s.interpreter.pod.name")); assertEquals("sh", p.get("zeppelin.k8s.interpreter.container.name")); assertEquals("interpreter-container:1.0", p.get("zeppelin.k8s.interpreter.container.image")); assertEquals("shared_process", p.get("zeppelin.k8s.interpreter.group.id")); assertEquals("sh", p.get("zeppelin.k8s.interpreter.group.name")); assertEquals("shell", p.get("zeppelin.k8s.interpreter.setting.name")); assertEquals(true , p.containsKey("zeppelin.k8s.interpreter.localRepo")); assertEquals("12321:12321" , p.get("zeppelin.k8s.interpreter.rpc.portRange")); assertEquals("zeppelin.server.service" , p.get("zeppelin.k8s.server.rpc.service")); assertEquals("12320" , p.get("zeppelin.k8s.server.rpc.portRange")); assertEquals("v1", p.get("my.key1")); assertEquals("V1", envs.get("MY_ENV1")); envs = (HashMap<String, String>) p.get("zeppelin.k8s.envs"); assertTrue(envs.containsKey("SERVICE_DOMAIN")); assertTrue(envs.containsKey("ZEPPELIN_HOME")); } @Test public void testGetTemplateBindingsForSpark() throws IOException { // given Kubectl kubectl = mock(Kubectl.class); when(kubectl.getNamespace()).thenReturn("default"); Properties properties = new Properties(); properties.put("my.key1", "v1"); properties.put("master", "k8s://http://api"); HashMap<String, String> envs = new HashMap<String, String>(); envs.put("MY_ENV1", "V1"); envs.put("SPARK_SUBMIT_OPTIONS", "my options"); envs.put("SERVICE_DOMAIN", "mydomain"); K8sRemoteInterpreterProcess intp = new K8sRemoteInterpreterProcess( kubectl, new File(".skip"), "interpreter-container:1.0", "shared_process", "spark", "myspark", properties, envs, "zeppelin.server.service", "12320", false, "spark-container:1.0", 10, false); // when intp.start("mytestUser"); Properties p = intp.getTemplateBindings(); // then assertEquals("spark-container:1.0", p.get("zeppelin.k8s.spark.container.image")); assertEquals(String.format("//4040-%s.%s", intp.getPodName(), "mydomain"), p.get("zeppelin.spark.uiWebUrl")); envs = (HashMap<String, String>) p.get("zeppelin.k8s.envs"); assertTrue( envs.containsKey("SPARK_HOME")); String sparkSubmitOptions = envs.get("SPARK_SUBMIT_OPTIONS"); assertTrue(sparkSubmitOptions.startsWith("my options ")); assertTrue(sparkSubmitOptions.contains("spark.kubernetes.namespace=" + kubectl.getNamespace())); assertTrue(sparkSubmitOptions.contains("spark.kubernetes.driver.pod.name=" + intp.getPodName())); assertTrue(sparkSubmitOptions.contains("spark.kubernetes.container.image=spark-container:1.0")); assertTrue(sparkSubmitOptions.contains("spark.driver.host=" + intp.getHost())); assertTrue(sparkSubmitOptions.contains("spark.driver.port=" + intp.getSparkDriverPort())); assertTrue(sparkSubmitOptions.contains("spark.blockManager.port=" + intp.getSparkBlockmanagerPort())); assertFalse(sparkSubmitOptions.contains("--proxy-user")); assertTrue(intp.isSpark()); } @Test public void testGetTemplateBindingsForSparkWithProxyUser() throws IOException { // given Kubectl kubectl = mock(Kubectl.class); when(kubectl.getNamespace()).thenReturn("default"); Properties properties = new Properties(); properties.put("my.key1", "v1"); properties.put("master", "k8s://http://api"); HashMap<String, String> envs = new HashMap<String, String>(); envs.put("MY_ENV1", "V1"); envs.put("SPARK_SUBMIT_OPTIONS", "my options"); envs.put("SERVICE_DOMAIN", "mydomain"); K8sRemoteInterpreterProcess intp = new K8sRemoteInterpreterProcess( kubectl, new File(".skip"), "interpreter-container:1.0", "shared_process", "spark", "myspark", properties, envs, "zeppelin.server.service", "12320", false, "spark-container:1.0", 10, true); // when intp.start("mytestUser"); Properties p = intp.getTemplateBindings(); // then assertEquals("spark-container:1.0", p.get("zeppelin.k8s.spark.container.image")); assertEquals(String.format("//4040-%s.%s", intp.getPodName(), "mydomain"), p.get("zeppelin.spark.uiWebUrl")); envs = (HashMap<String, String>) p.get("zeppelin.k8s.envs"); assertTrue( envs.containsKey("SPARK_HOME")); String sparkSubmitOptions = envs.get("SPARK_SUBMIT_OPTIONS"); assertTrue(sparkSubmitOptions.startsWith("my options ")); assertTrue(sparkSubmitOptions.contains("spark.kubernetes.namespace=" + kubectl.getNamespace())); assertTrue(sparkSubmitOptions.contains("spark.kubernetes.driver.pod.name=" + intp.getPodName())); assertTrue(sparkSubmitOptions.contains("spark.kubernetes.container.image=spark-container:1.0")); assertTrue(sparkSubmitOptions.contains("spark.driver.host=" + intp.getHost())); assertTrue(sparkSubmitOptions.contains("spark.driver.port=" + intp.getSparkDriverPort())); assertTrue(sparkSubmitOptions.contains("spark.blockManager.port=" + intp.getSparkBlockmanagerPort())); assertTrue(sparkSubmitOptions.contains("--proxy-user mytestUser")); assertTrue(intp.isSpark()); } @Test public void testGetTemplateBindingsForSparkWithProxyUserAnonymous() throws IOException { // given Kubectl kubectl = mock(Kubectl.class); when(kubectl.getNamespace()).thenReturn("default"); Properties properties = new Properties(); properties.put("my.key1", "v1"); properties.put("master", "k8s://http://api"); HashMap<String, String> envs = new HashMap<String, String>(); envs.put("MY_ENV1", "V1"); envs.put("SPARK_SUBMIT_OPTIONS", "my options"); envs.put("SERVICE_DOMAIN", "mydomain"); K8sRemoteInterpreterProcess intp = new K8sRemoteInterpreterProcess( kubectl, new File(".skip"), "interpreter-container:1.0", "shared_process", "spark", "myspark", properties, envs, "zeppelin.server.service", "12320", false, "spark-container:1.0", 10, true); // when intp.start("anonymous"); Properties p = intp.getTemplateBindings(); // then assertEquals("spark-container:1.0", p.get("zeppelin.k8s.spark.container.image")); assertEquals(String.format("//4040-%s.%s", intp.getPodName(), "mydomain"), p.get("zeppelin.spark.uiWebUrl")); envs = (HashMap<String, String>) p.get("zeppelin.k8s.envs"); assertTrue( envs.containsKey("SPARK_HOME")); String sparkSubmitOptions = envs.get("SPARK_SUBMIT_OPTIONS"); assertFalse(sparkSubmitOptions.contains("--proxy-user")); assertTrue(intp.isSpark()); } @Test public void testSparkUiWebUrlTemplate() { // given Kubectl kubectl = mock(Kubectl.class); when(kubectl.getNamespace()).thenReturn("default"); Properties properties = new Properties(); HashMap<String, String> envs = new HashMap<String, String>(); envs.put("SERVICE_DOMAIN", "mydomain"); K8sRemoteInterpreterProcess intp = new K8sRemoteInterpreterProcess( kubectl, new File(".skip"), "interpreter-container:1.0", "shared_process", "spark", "myspark", properties, envs, "zeppelin.server.service", "12320", false, "spark-container:1.0", 10, false); // when non template url assertEquals("static.url", intp.sparkUiWebUrlFromTemplate( "static.url", 4040, "zeppelin-server", "my.domain.com")); // when template url assertEquals("//4040-zeppelin-server.my.domain.com", intp.sparkUiWebUrlFromTemplate( "//{{PORT}}-{{SERVICE_NAME}}.{{SERVICE_DOMAIN}}", 4040, "zeppelin-server", "my.domain.com")); } @Test public void testSparkPodResources() { // given Kubectl kubectl = mock(Kubectl.class); when(kubectl.getNamespace()).thenReturn("default"); Properties properties = new Properties(); properties.put("spark.driver.memory", "1g"); properties.put("spark.driver.cores", "1"); HashMap<String, String> envs = new HashMap<String, String>(); envs.put("SERVICE_DOMAIN", "mydomain"); K8sRemoteInterpreterProcess intp = new K8sRemoteInterpreterProcess( kubectl, new File(".skip"), "interpreter-container:1.0", "shared_process", "spark", "myspark", properties, envs, "zeppelin.server.service", "12320", false, "spark-container:1.0", 10, false); // when Properties p = intp.getTemplateBindings(); // then assertEquals("1", p.get("zeppelin.k8s.interpreter.cores")); assertEquals("1408Mi", p.get("zeppelin.k8s.interpreter.memory")); } @Test public void testSparkPodResourcesMemoryOverhead() { // given Kubectl kubectl = mock(Kubectl.class); when(kubectl.getNamespace()).thenReturn("default"); Properties properties = new Properties(); properties.put("spark.driver.memory", "1g"); properties.put("spark.driver.memoryOverhead", "256m"); properties.put("spark.driver.cores", "5"); HashMap<String, String> envs = new HashMap<String, String>(); envs.put("SERVICE_DOMAIN", "mydomain"); K8sRemoteInterpreterProcess intp = new K8sRemoteInterpreterProcess( kubectl, new File(".skip"), "interpreter-container:1.0", "shared_process", "spark", "myspark", properties, envs, "zeppelin.server.service", "12320", false, "spark-container:1.0", 10, false); // when Properties p = intp.getTemplateBindings(); // then assertEquals("5", p.get("zeppelin.k8s.interpreter.cores")); assertEquals("1280Mi", p.get("zeppelin.k8s.interpreter.memory")); } }
/* * Copyright (C) 2015 AppTik Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: protos/phone.proto package io.apptik.comm.jus.converter; import com.google.protobuf.AbstractMessage; public final class PhoneProtos { private PhoneProtos() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface PhoneOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional string number = 1; /** * <code>optional string number = 1;</code> */ boolean hasNumber(); /** * <code>optional string number = 1;</code> */ String getNumber(); /** * <code>optional string number = 1;</code> */ com.google.protobuf.ByteString getNumberBytes(); } /** * Protobuf type {@code retrofit.Phone} */ public static final class Phone extends com.google.protobuf.GeneratedMessage implements PhoneOrBuilder { // Use Phone.newBuilder() to construct. private Phone(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private Phone(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final Phone defaultInstance; public static Phone getDefaultInstance() { return defaultInstance; } public Phone getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private Phone( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { bitField0_ |= 0x00000001; number_ = input.readBytes(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static com.google.protobuf.Descriptors.Descriptor getDescriptor() { return PhoneProtos.internal_static_retrofit_Phone_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return PhoneProtos.internal_static_retrofit_Phone_fieldAccessorTable .ensureFieldAccessorsInitialized( PhoneProtos.Phone.class, PhoneProtos.Phone.Builder.class); } public static com.google.protobuf.Parser<Phone> PARSER = new com.google.protobuf.AbstractParser<Phone>() { public Phone parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new Phone(input, extensionRegistry); } }; @Override public com.google.protobuf.Parser<Phone> getParserForType() { return PARSER; } private int bitField0_; // optional string number = 1; public static final int NUMBER_FIELD_NUMBER = 1; private Object number_; /** * <code>optional string number = 1;</code> */ public boolean hasNumber() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string number = 1;</code> */ public String getNumber() { Object ref = number_; if (ref instanceof String) { return (String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { number_ = s; } return s; } } /** * <code>optional string number = 1;</code> */ public com.google.protobuf.ByteString getNumberBytes() { Object ref = number_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (String) ref); number_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private void initFields() { number_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getNumberBytes()); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getNumberBytes()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @Override protected Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } public static PhoneProtos.Phone parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static PhoneProtos.Phone parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static PhoneProtos.Phone parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static PhoneProtos.Phone parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static PhoneProtos.Phone parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static PhoneProtos.Phone parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static PhoneProtos.Phone parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static PhoneProtos.Phone parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static PhoneProtos.Phone parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static PhoneProtos.Phone parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(PhoneProtos.Phone prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code retrofit.Phone} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements PhoneProtos.PhoneOrBuilder { public static com.google.protobuf.Descriptors.Descriptor getDescriptor() { return PhoneProtos.internal_static_retrofit_Phone_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return PhoneProtos.internal_static_retrofit_Phone_fieldAccessorTable .ensureFieldAccessorsInitialized( PhoneProtos.Phone.class, PhoneProtos.Phone.Builder.class); } // Construct using retrofit.PhoneProtos.Phone.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); number_ = ""; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return PhoneProtos.internal_static_retrofit_Phone_descriptor; } public PhoneProtos.Phone getDefaultInstanceForType() { return PhoneProtos.Phone.getDefaultInstance(); } public PhoneProtos.Phone build() { PhoneProtos.Phone result = buildPartial(); if (!result.isInitialized()) { throw AbstractMessage.Builder.newUninitializedMessageException(result); } return result; } public PhoneProtos.Phone buildPartial() { PhoneProtos.Phone result = new PhoneProtos.Phone(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.number_ = number_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof PhoneProtos.Phone) { return mergeFrom((PhoneProtos.Phone)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(PhoneProtos.Phone other) { if (other == PhoneProtos.Phone.getDefaultInstance()) return this; if (other.hasNumber()) { bitField0_ |= 0x00000001; number_ = other.number_; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { PhoneProtos.Phone parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (PhoneProtos.Phone) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // optional string number = 1; private Object number_ = ""; /** * <code>optional string number = 1;</code> */ public boolean hasNumber() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional string number = 1;</code> */ public String getNumber() { Object ref = number_; if (!(ref instanceof String)) { String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); number_ = s; return s; } else { return (String) ref; } } /** * <code>optional string number = 1;</code> */ public com.google.protobuf.ByteString getNumberBytes() { Object ref = number_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (String) ref); number_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string number = 1;</code> */ public Builder setNumber( String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; number_ = value; onChanged(); return this; } /** * <code>optional string number = 1;</code> */ public Builder clearNumber() { bitField0_ = (bitField0_ & ~0x00000001); number_ = getDefaultInstance().getNumber(); onChanged(); return this; } /** * <code>optional string number = 1;</code> */ public Builder setNumberBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; number_ = value; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:retrofit.Phone) } static { defaultInstance = new Phone(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:retrofit.Phone) } private static com.google.protobuf.Descriptors.Descriptor internal_static_retrofit_Phone_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_retrofit_Phone_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { String[] descriptorData = { "\n\022protos/phone.proto\022\010retrofit\"\027\n\005Phone\022" + "\016\n\006number\030\001 \001(\tB!\n\022retrofit.converterB\013P" + "honeProtos" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; internal_static_retrofit_Phone_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_retrofit_Phone_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_retrofit_Phone_descriptor, new String[] { "Number", }); return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } // @@protoc_insertion_point(outer_class_scope) }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.seqno; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.single.shard.SingleShardRequest; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.Objects; /** * This class holds all actions related to retention leases. Note carefully that these actions are executed under a primary permit. Care is * taken to thread the listener through the invocations so that for the sync APIs we do not notify the listener until these APIs have * responded with success. Additionally, note the use of * {@link TransportSingleShardAction#asyncShardOperation(SingleShardRequest, ShardId, ActionListener)} to handle the case when acquiring * permits goes asynchronous because acquiring permits is blocked */ public class RetentionLeaseActions { public static final long RETAIN_ALL = -1; abstract static class TransportRetentionLeaseAction<T extends Request<T>> extends TransportSingleShardAction<T, Response> { private final IndicesService indicesService; @Inject TransportRetentionLeaseAction( final String name, final ThreadPool threadPool, final ClusterService clusterService, final TransportService transportService, final ActionFilters actionFilters, final IndexNameExpressionResolver indexNameExpressionResolver, final IndicesService indicesService, final Writeable.Reader<T> requestSupplier) { super( name, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, requestSupplier, ThreadPool.Names.MANAGEMENT); this.indicesService = Objects.requireNonNull(indicesService); } @Override protected ShardsIterator shards(final ClusterState state, final InternalRequest request) { return state .routingTable() .shardRoutingTable(request.concreteIndex(), request.request().getShardId().id()) .primaryShardIt(); } @Override protected void asyncShardOperation(T request, ShardId shardId, final ActionListener<Response> listener) { final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); final IndexShard indexShard = indexService.getShard(shardId.id()); indexShard.acquirePrimaryOperationPermit( ActionListener.delegateFailure(listener, (delegatedListener, releasable) -> { try (Releasable ignore = releasable) { doRetentionLeaseAction(indexShard, request, delegatedListener); } }), ThreadPool.Names.SAME, request); } @Override protected Response shardOperation(final T request, final ShardId shardId) { throw new UnsupportedOperationException(); } abstract void doRetentionLeaseAction(IndexShard indexShard, T request, ActionListener<Response> listener); @Override protected Writeable.Reader<Response> getResponseReader() { return Response::new; } @Override protected boolean resolveIndex(final T request) { return false; } } public static class Add extends ActionType<Response> { public static final Add INSTANCE = new Add(); public static final String ACTION_NAME = "indices:admin/seq_no/add_retention_lease"; private Add() { super(ACTION_NAME, Response::new); } public static class TransportAction extends TransportRetentionLeaseAction<AddRequest> { @Inject public TransportAction( final ThreadPool threadPool, final ClusterService clusterService, final TransportService transportService, final ActionFilters actionFilters, final IndexNameExpressionResolver indexNameExpressionResolver, final IndicesService indicesService) { super( ACTION_NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, indicesService, AddRequest::new); } @Override void doRetentionLeaseAction(final IndexShard indexShard, final AddRequest request, final ActionListener<Response> listener) { indexShard.addRetentionLease( request.getId(), request.getRetainingSequenceNumber(), request.getSource(), ActionListener.map(listener, r -> new Response())); } @Override protected Writeable.Reader<Response> getResponseReader() { return Response::new; } } } public static class Renew extends ActionType<Response> { public static final Renew INSTANCE = new Renew(); public static final String ACTION_NAME = "indices:admin/seq_no/renew_retention_lease"; private Renew() { super(ACTION_NAME, Response::new); } public static class TransportAction extends TransportRetentionLeaseAction<RenewRequest> { @Inject public TransportAction( final ThreadPool threadPool, final ClusterService clusterService, final TransportService transportService, final ActionFilters actionFilters, final IndexNameExpressionResolver indexNameExpressionResolver, final IndicesService indicesService) { super( ACTION_NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, indicesService, RenewRequest::new); } @Override void doRetentionLeaseAction(final IndexShard indexShard, final RenewRequest request, final ActionListener<Response> listener) { indexShard.renewRetentionLease(request.getId(), request.getRetainingSequenceNumber(), request.getSource()); listener.onResponse(new Response()); } } } public static class Remove extends ActionType<Response> { public static final Remove INSTANCE = new Remove(); public static final String ACTION_NAME = "indices:admin/seq_no/remove_retention_lease"; private Remove() { super(ACTION_NAME, Response::new); } public static class TransportAction extends TransportRetentionLeaseAction<RemoveRequest> { @Inject public TransportAction( final ThreadPool threadPool, final ClusterService clusterService, final TransportService transportService, final ActionFilters actionFilters, final IndexNameExpressionResolver indexNameExpressionResolver, final IndicesService indicesService) { super( ACTION_NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, indicesService, RemoveRequest::new); } @Override void doRetentionLeaseAction(final IndexShard indexShard, final RemoveRequest request, final ActionListener<Response> listener) { indexShard.removeRetentionLease( request.getId(), ActionListener.map(listener, r -> new Response())); } } } private abstract static class Request<T extends SingleShardRequest<T>> extends SingleShardRequest<T> { private final ShardId shardId; public ShardId getShardId() { return shardId; } private final String id; public String getId() { return id; } Request(StreamInput in) throws IOException { super(in); shardId = new ShardId(in); id = in.readString(); } Request(final ShardId shardId, final String id) { super(Objects.requireNonNull(shardId).getIndexName()); this.shardId = shardId; this.id = Objects.requireNonNull(id); } @Override public ActionRequestValidationException validate() { return null; } @Override public void writeTo(final StreamOutput out) throws IOException { super.writeTo(out); shardId.writeTo(out); out.writeString(id); } } private abstract static class AddOrRenewRequest<T extends SingleShardRequest<T>> extends Request<T> { private final long retainingSequenceNumber; public long getRetainingSequenceNumber() { return retainingSequenceNumber; } private final String source; public String getSource() { return source; } AddOrRenewRequest(StreamInput in) throws IOException { super(in); retainingSequenceNumber = in.readZLong(); source = in.readString(); } AddOrRenewRequest(final ShardId shardId, final String id, final long retainingSequenceNumber, final String source) { super(shardId, id); if (retainingSequenceNumber < 0 && retainingSequenceNumber != RETAIN_ALL) { throw new IllegalArgumentException("retaining sequence number [" + retainingSequenceNumber + "] out of range"); } this.retainingSequenceNumber = retainingSequenceNumber; this.source = Objects.requireNonNull(source); } @Override public void writeTo(final StreamOutput out) throws IOException { super.writeTo(out); out.writeZLong(retainingSequenceNumber); out.writeString(source); } } public static class AddRequest extends AddOrRenewRequest<AddRequest> { AddRequest(StreamInput in) throws IOException { super(in); } public AddRequest(final ShardId shardId, final String id, final long retainingSequenceNumber, final String source) { super(shardId, id, retainingSequenceNumber, source); } } public static class RenewRequest extends AddOrRenewRequest<RenewRequest> { RenewRequest(StreamInput in) throws IOException { super(in); } public RenewRequest(final ShardId shardId, final String id, final long retainingSequenceNumber, final String source) { super(shardId, id, retainingSequenceNumber, source); } } public static class RemoveRequest extends Request<RemoveRequest> { RemoveRequest(StreamInput in) throws IOException { super(in); } public RemoveRequest(final ShardId shardId, final String id) { super(shardId, id); } } public static class Response extends ActionResponse { public Response() {} Response(final StreamInput in) throws IOException { super(in); } @Override public void writeTo(StreamOutput out) throws IOException {} } }
/** * */ package net.sf.jabb.dstream.kinesis; import java.math.BigInteger; import java.time.Duration; import java.time.Instant; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; import net.sf.jabb.dstream.*; import net.sf.jabb.dstream.ex.DataStreamInfrastructureException; import net.sf.jabb.util.attempt.AttemptStrategy; import net.sf.jabb.util.attempt.StopStrategies; import net.sf.jabb.util.ex.ExceptionUncheckUtility; import net.sf.jabb.util.parallel.BackoffStrategies; import net.sf.jabb.util.parallel.WaitStrategies; import net.sf.jabb.util.parallel.WaitStrategy; import org.apache.commons.lang3.Validate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.services.kinesis.AmazonKinesisClient; import com.amazonaws.services.kinesis.clientlibrary.types.UserRecord; import com.amazonaws.services.kinesis.model.DescribeStreamRequest; import com.amazonaws.services.kinesis.model.DescribeStreamResult; import com.amazonaws.services.kinesis.model.GetRecordsRequest; import com.amazonaws.services.kinesis.model.GetRecordsResult; import com.amazonaws.services.kinesis.model.GetShardIteratorResult; import com.amazonaws.services.kinesis.model.ProvisionedThroughputExceededException; import com.amazonaws.services.kinesis.model.Record; import com.amazonaws.services.kinesis.model.Shard; import com.amazonaws.services.kinesis.model.ShardIteratorType; /** * StreamDataSupplier backed by AWS Kinesis. * One KinesisStreamDataSupplier instance covers only one shard of a Kinesis stream. * It supports user record de-aggregation. See http://docs.aws.amazon.com/kinesis/latest/dev/kinesis-kpl-consumer-deaggregation.html * It defines position range as (startPosition, endPosition]. * @author James Hu * */ public class KinesisStreamDataSupplier<M> implements StreamDataSupplier<M> { private static final Logger logger = LoggerFactory.getLogger(KinesisStreamDataSupplier.class); private static final int MAX_GET_RECORDS_LIMIT = 1000; // per getRecords(...) as specified by AWS private static final long RETRY_INTERVAL_AFTER_THRESHOLD_EXCEEDED = 2000L; // 2 seconds private static final long DEFAULT_RETRY_INTERVAL_BASE = 1000L; private static final int LAST_POSITION_POLL_SECONDS = 3; // number of seconds to wait for polling the last position protected Function<UserRecord, M> messageConverter; protected AmazonKinesisClient client; // let's assume that it is thread safe protected String streamName; protected String shardId; protected long pollInterval; // number of milliseconds to wait for data to be available before next poll from within fetch(...) and receive(...) methods protected int fetchBatchSize; // the "limit" used in client.getRecords(...) from within fetch(...) methods protected int receiveBatchSize; // the "limit" used in client.getRecords(...) from within receive(...) methods protected WaitStrategy waitStrategy = WaitStrategies.threadSleepStrategy(); protected AttemptStrategy attemptStrategy = new AttemptStrategy() .withWaitStrategy(waitStrategy) .withBackoffStrategy(BackoffStrategies.fibonacciBackoff(DEFAULT_RETRY_INTERVAL_BASE, DEFAULT_RETRY_INTERVAL_BASE*5)) .withStopStrategy(StopStrategies.stopAfterTotalDuration(Duration.ofMillis(DEFAULT_RETRY_INTERVAL_BASE*20))); public KinesisStreamDataSupplier(AmazonKinesisClient client, String streamName, String shardId, Function<UserRecord, M> messageConverter, long pollInterval, int fetchBatchSize, int receiveBatchSize){ this.client = client; this.streamName = streamName; this.shardId = shardId; this.messageConverter = messageConverter; this.pollInterval = pollInterval; Validate.isTrue(fetchBatchSize <= MAX_GET_RECORDS_LIMIT, "fetchBatchSize should not be greater than %d: %d", MAX_GET_RECORDS_LIMIT, fetchBatchSize); this.fetchBatchSize = fetchBatchSize; Validate.isTrue(receiveBatchSize <= MAX_GET_RECORDS_LIMIT, "receiveBatchSize should not be greater than %d: %d", MAX_GET_RECORDS_LIMIT, receiveBatchSize); this.receiveBatchSize = receiveBatchSize; } static class Position{ private String sequenceNumber; private long subSequenceNumber; private boolean isLastUserRecord; Position(String position){ if (isBeforeTheVeryFirst(position)){ }else{ int i = position.indexOf('/'); sequenceNumber = position.substring(0, i); if (position.charAt(position.length() - 1) == '/'){ isLastUserRecord = true; subSequenceNumber = Long.parseLong(position.substring(i + 1, position.length() - 1)); }else{ subSequenceNumber = Long.parseLong(position.substring(i + 1)); } } } static Position of(String position){ return new Position(position); } @Override public String toString(){ return toString(sequenceNumber, subSequenceNumber, isLastUserRecord); } static String toString(String sequenceNumber, long subSequenceNumber, boolean isLastUserRecord){ if (isLastUserRecord){ return sequenceNumber + "/" + subSequenceNumber + "/"; }else{ return sequenceNumber + "/" + subSequenceNumber; } } static String toString(Position position){ return toString(position.sequenceNumber, position.subSequenceNumber, position.isLastUserRecord); } static String getSequenceNumber(String position){ int i = position.indexOf('/'); return position.substring(0, i); } static boolean isBeforeTheVeryFirst(String position){ return position == null || position.length() == 0 || position.equals("-1"); } public boolean isBeforeTheVeryFirst(){ return sequenceNumber == null; } public BigInteger getSequenceNumberAsBigInteger(){ return new BigInteger(sequenceNumber); } public String getSequenceNumber() { return sequenceNumber; } public long getSubSequenceNumber() { return subSequenceNumber; } public boolean isLastUserRecord() { return isLastUserRecord; } } /** * Create StreamDataSupplierWithIds from a Kinesis stream. * @param <M> type of the elements in the stream * @param endpoint The endpoint (ex: "kinesis.us-east-1.amazonaws.com") or a full URL, including the protocol (ex: "https://kinesis.us-east-1.amazonaws.com") * See http://docs.aws.amazon.com/general/latest/gr/rande.html#ak_region * @param streamName name of the Kinesis stream * @param messageConverter the message converter to transform Kinesis records into desired objects * @param pollInterval number of milliseconds to wait for data to be available before next poll * @param fetchBatchSize the batch size for getting records from Kinesis from within fetch(...) methods * @param receiveBatchSize the batch size for getting records from Kinesis from within receive(...) methods * @return list of StreamDataSupplierWithIds covering all shards of the Kinesis stream. */ public static <M> List<StreamDataSupplierWithId<M>> create(String endpoint, String streamName, Function<UserRecord, M> messageConverter, long pollInterval, int fetchBatchSize, int receiveBatchSize){ return create(null, null, endpoint, streamName, messageConverter, pollInterval, fetchBatchSize, receiveBatchSize); } /** * Create StreamDataSupplierWithIds from a Kinesis stream. * @param <M> type of the elements in the stream * @param awsAccessKeyId explicitly specified AWS access key id, or null if the default should be used * @param awsSecretKey explicitly specified AWS secret key, or null if the default should be used * @param endpoint The endpoint (ex: "kinesis.us-east-1.amazonaws.com") or a full URL, including the protocol (ex: "https://kinesis.us-east-1.amazonaws.com") * See http://docs.aws.amazon.com/general/latest/gr/rande.html#ak_region * @param streamName name of the Kinesis stream * @param messageConverter the message converter to transform Kinesis records into desired objects * @param pollInterval number of milliseconds to wait for data to be available before next poll * @param fetchBatchSize the batch size for getting records from Kinesis from within fetch(...) methods * @param receiveBatchSize the batch size for getting records from Kinesis from within receive(...) methods * @return list of StreamDataSupplierWithIds covering all shards of the Kinesis stream. */ public static <M> List<StreamDataSupplierWithId<M>> create(String awsAccessKeyId, String awsSecretKey, String endpoint, String streamName, Function<UserRecord, M> messageConverter, long pollInterval, int fetchBatchSize, int receiveBatchSize){ AmazonKinesisClient client; if (awsAccessKeyId != null && awsSecretKey != null){ client = new AmazonKinesisClient(new BasicAWSCredentials(awsAccessKeyId, awsSecretKey)); }else{ client = new AmazonKinesisClient(); } client.setEndpoint(endpoint); DescribeStreamRequest describeStreamRequest = new DescribeStreamRequest(); describeStreamRequest.setStreamName( streamName ); List<Shard> shards = new ArrayList<>(); String exclusiveStartShardId = null; do { describeStreamRequest.setExclusiveStartShardId( exclusiveStartShardId ); DescribeStreamResult describeStreamResult = client.describeStream( describeStreamRequest ); shards.addAll( describeStreamResult.getStreamDescription().getShards() ); if (describeStreamResult.getStreamDescription().getHasMoreShards() && shards.size() > 0) { exclusiveStartShardId = shards.get(shards.size() - 1).getShardId(); } else { exclusiveStartShardId = null; } } while ( exclusiveStartShardId != null ); return shards.stream().map(shard->{ String shardId = shard.getShardId(); return new KinesisStreamDataSupplier<>(client, streamName, shardId, messageConverter, pollInterval, fetchBatchSize, receiveBatchSize) .withId(shardId); }).collect(Collectors.toList()); } protected String streamNameAndShardId(){ return streamName + "/" + shardId; } /** * This method always returns "-1" */ @Override public String firstPosition() { return "-1"; } @Override public String firstPosition(Instant enqueuedAfter, Duration waitForArrival) throws InterruptedException, DataStreamInfrastructureException { throw new UnsupportedOperationException("Seeking by enqueuedAfter is not supported by Kinesis"); } /** * Get one Kinesis stream record according to the shard iterator * @param shardIterator the shard iterator specifying the position of the record * @param waitSeconds number of seconds to wait for the record to be available * @return the record of null if no such record * @throws DataStreamInfrastructureException if Kinesis replies with any error message */ protected Record getOneRecord(String shardIterator, int waitSeconds) throws DataStreamInfrastructureException { GetRecordsRequest getRecordsRequest = new GetRecordsRequest(); getRecordsRequest.setShardIterator(shardIterator); getRecordsRequest.setLimit(1); return ExceptionUncheckUtility.getThrowingUnchecked(()->new AttemptStrategy(attemptStrategy) .retryIfException(ProvisionedThroughputExceededException.class) .callThrowingSuppressed(()->{ int i = waitSeconds; do{ GetRecordsResult getRecordsResult = client.getRecords(getRecordsRequest); List<Record> records = getRecordsResult.getRecords(); if (records != null && records.size() > 0){ return records.get(0); }else{ String nextIterator = getRecordsResult.getNextShardIterator(); if (nextIterator != null){ getRecordsRequest.setShardIterator(nextIterator); }else{ break; } } }while (i-- > 0); return null; })); } /** * Get the user record in Kinesis stream record by subSequenceNumber * @param record the original Kinesis stream record * @param subSequenceNumber the sub-sequence-number * @return the user record or null if not found */ protected UserRecord getUserRecord(Record record, long subSequenceNumber){ List<UserRecord> userRecords = UserRecord.deaggregate(Collections.singletonList(record)); int i = (int) subSequenceNumber; if (i < userRecords.size()){ UserRecord q = userRecords.get(i); if (q.getSubSequenceNumber() == subSequenceNumber){ return q; } } for (UserRecord r: userRecords){ if (r.getSubSequenceNumber() == subSequenceNumber){ return r; } } return null; } @Override public String lastPosition() throws DataStreamInfrastructureException { return lastPosition(LAST_POSITION_POLL_SECONDS); } public String lastPosition(int maxPollSeconds) throws DataStreamInfrastructureException { String shardIterator = client.getShardIterator(streamName, shardId, ShardIteratorType.LATEST.name()).getShardIterator(); if (shardIterator != null){ Record record = getOneRecord(shardIterator, maxPollSeconds); if (record != null){ List<UserRecord> userRecords = UserRecord.deaggregate(Collections.singletonList(record)); return Position.toString(record.getSequenceNumber(), userRecords.get(userRecords.size() - 1).getSubSequenceNumber(), true) ; }else{ String actualFirstPosition = actualFirstPosition(); if (actualFirstPosition == null){ return null; }else{ throw new DataStreamInfrastructureException("No record had been received in the last " + LAST_POSITION_POLL_SECONDS + " seconds, the first position is: " + actualFirstPosition); } } }else{ throw new DataStreamInfrastructureException("Failed to get shard iterator for " + streamNameAndShardId() + " at the end of the stream"); } } /** * Get the actual first position which is at trim_horizon * @return the actual first position, or null if there is no data in this shard * @throws DataStreamInfrastructureException if unable to get it from Kinesis */ public String actualFirstPosition() throws DataStreamInfrastructureException { String shardIterator = client.getShardIterator(streamName, shardId, ShardIteratorType.TRIM_HORIZON.name()).getShardIterator(); if (shardIterator != null){ Record record = getOneRecord(shardIterator, 0); if (record != null){ List<UserRecord> userRecords = UserRecord.deaggregate(Collections.singletonList(record)); return Position.toString(record.getSequenceNumber(), 0, userRecords.size() == 1) ; }else{ return null; } }else{ throw new DataStreamInfrastructureException("Failed to get shard iterator for " + streamNameAndShardId() + " at the start of the stream"); } } @Override public Instant enqueuedTime(String position) throws DataStreamInfrastructureException { String shardIterator = client.getShardIterator(streamName, shardId, ShardIteratorType.AT_SEQUENCE_NUMBER.name(), Position.getSequenceNumber(position)).getShardIterator(); if (shardIterator != null){ Record record = getOneRecord(shardIterator, 0); // all user records in this record share the same approximateArrivalTimestamp return record == null ? null : record.getApproximateArrivalTimestamp().toInstant(); }else{ throw new DataStreamInfrastructureException("Failed to get shard iterator for " + streamNameAndShardId() + " starting at " + position); } } @Override public String nextStartPosition(String previousEndPosition) { return previousEndPosition; } @Override public int checkInRange(String position, String endPosition) { Validate.isTrue(position != null, "position cannot be null"); if (endPosition == null){ return -1; }else{ if (Position.isBeforeTheVeryFirst(position)){ return -1; } Position pos = Position.of(position); Position endPos = Position.of(endPosition); BigInteger sequenceNumber = pos.getSequenceNumberAsBigInteger(); BigInteger endSequenceNumber = endPos.getSequenceNumberAsBigInteger(); switch (sequenceNumber.compareTo(endSequenceNumber)){ case -1: return -1; case 1: return 1; default: //case 0: long l = pos.getSubSequenceNumber() - endPos.getSubSequenceNumber(); if (l < 0){ return -1; }else if (l > 0){ return 1; }else{ return 0; } } } } /** * Get shard iterator by start position * @param startPosition the start position, exclusive. If it is null or "" or "-1", that means the position before the first one * @return the iterator or null if there is no data available * @throws DataStreamInfrastructureException if Kinesis replies with any error message */ protected String getShardIterator(Position startPosition) throws DataStreamInfrastructureException{ GetShardIteratorResult getShardIteratorResult; try{ if (startPosition.isBeforeTheVeryFirst()){ getShardIteratorResult = client.getShardIterator(streamName, shardId, ShardIteratorType.TRIM_HORIZON.name()); }else{ getShardIteratorResult = client.getShardIterator(streamName, shardId, startPosition.isLastUserRecord() ? ShardIteratorType.AFTER_SEQUENCE_NUMBER.name() : ShardIteratorType.AT_SEQUENCE_NUMBER.name(), startPosition.getSequenceNumber()); } }catch(Exception e){ throw new DataStreamInfrastructureException("Failed to get shard iterator for " + streamNameAndShardId() + " starting from " + startPosition, e); } return getShardIteratorResult.getShardIterator(); } /** * Fetch records * @param list the list that received data will be put into * @param startPosition the start position, exclusive * @param inRangePredicate in range checker * @param maxItems maximum number of records that can be returned * @param timeoutMillis maximum number of milliseconds for this operation * @return receive status * @throws InterruptedException if interrupted * @throws DataStreamInfrastructureException if exception happened in the infrastructure */ protected SimpleReceiveStatus fetch(List<? super M> list, String startPosition, Predicate<Record> inRangePredicate, int maxItems, long timeoutMillis) throws InterruptedException, DataStreamInfrastructureException { Position startPos = Position.of(startPosition); SimpleReceiveStatus status = new SimpleReceiveStatus(); String shardIterator = getShardIterator(startPos); int limit = maxItems; GetRecordsRequest getRecordsRequest = new GetRecordsRequest(); long timeout = System.currentTimeMillis() + timeoutMillis; while (shardIterator != null && limit > 0 && System.currentTimeMillis() < timeout){ getRecordsRequest.setShardIterator(shardIterator); getRecordsRequest.setLimit(limit > fetchBatchSize ? fetchBatchSize : limit); GetRecordsResult getRecordsResult; try{ getRecordsResult = client.getRecords(getRecordsRequest); }catch(ProvisionedThroughputExceededException e){ logger.debug("ProvisionedThroughputExceeded, will retry after " + RETRY_INTERVAL_AFTER_THRESHOLD_EXCEEDED + "ms"); waitStrategy.await(RETRY_INTERVAL_AFTER_THRESHOLD_EXCEEDED); // will retry later continue; }catch(Exception e){ throw new DataStreamInfrastructureException("Failed to get records", e); } List<Record> resultRecords = getRecordsResult.getRecords(); if (resultRecords != null && resultRecords.size() > 0){ boolean isStartPositionClear = startPos.isBeforeTheVeryFirst() // should include every user record || startPos.isLastUserRecord(); // no overlap for (Record resultRecord: resultRecords){ boolean isInTheStartRecord = resultRecord.getSequenceNumber().equals(startPos.getSequenceNumber()); // not in the same kinesis stream record List<UserRecord> records = UserRecord.deaggregate(Collections.singletonList(resultRecord)); for (int i = 0; i < records.size(); i ++){ UserRecord record = records.get(i); if (isStartPositionClear || !isInTheStartRecord || record.getSubSequenceNumber() > startPos.getSubSequenceNumber()) // after the sub sequence number { if (!inRangePredicate.test(record)){ status.setOutOfRangeReached(true); return status; } list.add(messageConverter.apply(record)); status.setLastPosition(Position.toString(record.getSequenceNumber(), record.getSubSequenceNumber(), i == records.size() - 1)); status.setLastEnqueuedTime(record.getApproximateArrivalTimestamp().toInstant()); if (--limit <= 0){ return status; } } } } }else{ // wait a while before next poll waitStrategy.await(pollInterval); } shardIterator = getRecordsResult.getNextShardIterator(); } return status; } @Override public ReceiveStatus fetch(List<? super M> list, String startPosition, String endPosition, int maxItems, Duration timeoutDuration) throws InterruptedException, DataStreamInfrastructureException { return fetch(list, startPosition, record->isInRange(record.getSequenceNumber(), endPosition), maxItems, timeoutDuration.toMillis()); } @Override public ReceiveStatus fetch(List<? super M> list, Instant startEnqueuedTime, Instant endEnqueuedTime, int maxItems, Duration timeoutDuration) throws InterruptedException, DataStreamInfrastructureException { throw new UnsupportedOperationException("Fetching by startEnqueuedTime is not supported by Kinesis"); } @Override public ReceiveStatus fetch(List<? super M> list, String startPosition, Instant endEnqueuedTime, int maxItems, Duration timeoutDuration) throws InterruptedException, DataStreamInfrastructureException { return fetch(list, startPosition, record->isInRange(record.getApproximateArrivalTimestamp().toInstant(), endEnqueuedTime), maxItems, timeoutDuration.toMillis()); } @Override public String startAsyncReceiving(Consumer<M> receiver, String startPosition) throws DataStreamInfrastructureException { throw new UnsupportedOperationException("Not implemented yet"); } @Override public String startAsyncReceiving(Consumer<M> receiver, Instant startEnqueuedTime) throws DataStreamInfrastructureException { throw new UnsupportedOperationException("Not implemented yet"); } @Override public void stopAsyncReceiving(String id) { throw new UnsupportedOperationException("Not implemented yet"); } protected SimpleReceiveStatus receive(Function<M, Long> receiver, String startPosition, Predicate<Record> inRangePredicate) throws DataStreamInfrastructureException { Position startPos = Position.of(startPosition); SimpleReceiveStatus status = new SimpleReceiveStatus(); String shardIterator = getShardIterator(startPos); try{ GetRecordsRequest getRecordsRequest = new GetRecordsRequest(); long timeout = System.currentTimeMillis() + receiver.apply(null); while (shardIterator != null && System.currentTimeMillis() < timeout){ getRecordsRequest.setShardIterator(shardIterator); getRecordsRequest.setLimit(receiveBatchSize); GetRecordsResult getRecordsResult; try{ getRecordsResult = client.getRecords(getRecordsRequest); }catch(ProvisionedThroughputExceededException e){ logger.debug("ProvisionedThroughputExceeded, will retry after " + RETRY_INTERVAL_AFTER_THRESHOLD_EXCEEDED + "ms"); waitStrategy.await(RETRY_INTERVAL_AFTER_THRESHOLD_EXCEEDED); continue; // will retry }catch(Exception e){ throw new DataStreamInfrastructureException("Failed to get records from " + streamNameAndShardId() + " starting from " + startPosition, e); } List<Record> resultRecords = getRecordsResult.getRecords(); if (resultRecords != null && resultRecords.size() > 0){ boolean isStartPositionClear = startPos.isBeforeTheVeryFirst() // should include every user record || startPos.isLastUserRecord(); // no overlap for (Record resultRecord: resultRecords){ boolean isInTheStartRecord = resultRecord.getSequenceNumber().equals(startPos.getSequenceNumber()); // not in the same kinesis stream record List<UserRecord> records = UserRecord.deaggregate(Collections.singletonList(resultRecord)); for (int i = 0; i < records.size(); i ++){ UserRecord record = records.get(i); if (isStartPositionClear || !isInTheStartRecord || record.getSubSequenceNumber() > startPos.getSubSequenceNumber()) // after the sub sequence number { if (!inRangePredicate.test(record)){ status.setOutOfRangeReached(true); return status; } long remainingTime = receiver.apply(messageConverter.apply(record)); status.setLastPosition(Position.toString(record.getSequenceNumber(), record.getSubSequenceNumber(), i == records.size() - 1)); status.setLastEnqueuedTime(record.getApproximateArrivalTimestamp().toInstant()); if (remainingTime <= 0){ return status; }else{ timeout = System.currentTimeMillis() + remainingTime; } } } } }else{ // wait a while before next poll waitStrategy.await(pollInterval); } shardIterator = getRecordsResult.getNextShardIterator(); } }catch(InterruptedException e){ throw new DataStreamInfrastructureException("Interrupted while receiving from " + streamNameAndShardId() + " starting from " + startPosition, e); }catch(DataStreamInfrastructureException e){ throw e; }catch(Exception e){ throw new DataStreamInfrastructureException("Failed to receive from " + streamNameAndShardId() + " starting from " + startPosition, e); } return status; } @Override public ReceiveStatus receive(Function<M, Long> receiver, String startPosition, String endPosition) throws DataStreamInfrastructureException { return receive(receiver, startPosition, record->isInRange(record.getSequenceNumber(), endPosition)); } @Override public ReceiveStatus receive(Function<M, Long> receiver, Instant startEnqueuedTime, Instant endEnqueuedTime) throws DataStreamInfrastructureException { throw new UnsupportedOperationException("Receiving by startEnqueuedTime is not supported by Kinesis"); } @Override public ReceiveStatus receive(Function<M, Long> receiver, String startPosition, Instant endEnqueuedTime) throws DataStreamInfrastructureException { return receive(receiver, startPosition, record->isInRange(record.getApproximateArrivalTimestamp().toInstant(), endEnqueuedTime)); } @Override public ReceiveStatus receive(Function<M, Long> receiver, Instant startEnqueuedTime, String endPosition) throws DataStreamInfrastructureException { throw new UnsupportedOperationException("Receiving by startEnqueuedTime is not supported by Kinesis"); } @Override public void start() throws Exception { } @Override public void stop() throws Exception { } }
/* * Copyright 2014 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.apps.muzei.gallery; import android.Manifest; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.annotation.TargetApi; import android.content.ActivityNotFoundException; import android.content.ClipData; import android.content.ComponentName; import android.content.ContentProviderOperation; import android.content.ContentUris; import android.content.ContentValues; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.OperationApplicationException; import android.content.ServiceConnection; import android.content.SharedPreferences; import android.content.pm.ActivityInfo; import android.content.pm.ApplicationInfo; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.database.Cursor; import android.graphics.drawable.ColorDrawable; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.os.HandlerThread; import android.os.IBinder; import android.os.RemoteException; import android.provider.BaseColumns; import android.provider.DocumentsContract; import android.provider.Settings; import android.support.annotation.NonNull; import android.support.design.widget.Snackbar; import android.support.v4.app.ActivityCompat; import android.support.v4.app.LoaderManager; import android.support.v4.content.ContextCompat; import android.support.v4.content.CursorLoader; import android.support.v4.content.Loader; import android.support.v4.view.OnApplyWindowInsetsListener; import android.support.v4.view.ViewCompat; import android.support.v4.view.WindowInsetsCompat; import android.support.v7.app.AlertDialog; import android.support.v7.app.AppCompatActivity; import android.support.v7.util.DiffUtil; import android.support.v7.widget.DefaultItemAnimator; import android.support.v7.widget.GridLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.Toolbar; import android.text.TextUtils; import android.util.Log; import android.util.SparseIntArray; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.MotionEvent; import android.view.View; import android.view.ViewAnimationUtils; import android.view.ViewGroup; import android.view.ViewTreeObserver; import android.widget.Button; import android.widget.ImageView; import android.widget.TextView; import android.widget.Toast; import android.widget.ViewAnimator; import com.google.android.apps.muzei.util.MultiSelectionController; import com.squareup.picasso.Picasso; import java.util.ArrayList; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Queue; import java.util.Random; import java.util.Set; import static com.google.android.apps.muzei.gallery.GalleryArtSource.ACTION_PUBLISH_NEXT_GALLERY_ITEM; import static com.google.android.apps.muzei.gallery.GalleryArtSource.EXTRA_FORCE_URI; public class GallerySettingsActivity extends AppCompatActivity implements LoaderManager.LoaderCallbacks<Cursor> { private static final String TAG = "GallerySettingsActivity"; private static final String DOCUMENTS_UI_PACKAGE_NAME = "com.android.documentsui"; private static final String SHARED_PREF_NAME = "GallerySettingsActivity"; private static final String SHOW_INTERNAL_STORAGE_MESSAGE = "show_internal_storage_message"; private static final int REQUEST_CHOOSE_PHOTOS = 1; private static final int REQUEST_CHOOSE_FOLDER = 2; private static final int REQUEST_STORAGE_PERMISSION = 3; private static final String STATE_SELECTION = "selection"; private final ServiceConnection mServiceConnection = new ServiceConnection() { @Override public void onServiceConnected(final ComponentName name, final IBinder service) { } @Override public void onServiceDisconnected(final ComponentName name) { } }; private Cursor mChosenUris; private Toolbar mSelectionToolbar; private HandlerThread mHandlerThread; private Handler mHandler; private RecyclerView mPhotoGridView; private int mItemSize = 10; private final MultiSelectionController<Uri> mMultiSelectionController = new MultiSelectionController<>(STATE_SELECTION); private ColorDrawable mPlaceholderDrawable; private ColorDrawable mPlaceholderSmallDrawable; private static final SparseIntArray sRotateMenuIdsByMin = new SparseIntArray(); private static final SparseIntArray sRotateMinsByMenuId = new SparseIntArray(); static { sRotateMenuIdsByMin.put(0, R.id.action_rotate_interval_none); sRotateMenuIdsByMin.put(60, R.id.action_rotate_interval_1h); sRotateMenuIdsByMin.put(60 * 3, R.id.action_rotate_interval_3h); sRotateMenuIdsByMin.put(60 * 6, R.id.action_rotate_interval_6h); sRotateMenuIdsByMin.put(60 * 24, R.id.action_rotate_interval_24h); sRotateMenuIdsByMin.put(60 * 72, R.id.action_rotate_interval_72h); for (int i = 0; i < sRotateMenuIdsByMin.size(); i++) { sRotateMinsByMenuId.put(sRotateMenuIdsByMin.valueAt(i), sRotateMenuIdsByMin.keyAt(i)); } } private List<ActivityInfo> mGetContentActivites = new ArrayList<>(); private int mUpdatePosition = -1; private View mAddButton; private View mAddToolbar; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.gallery_activity); Toolbar appBar = (Toolbar) findViewById(R.id.app_bar); setSupportActionBar(appBar); getSupportLoaderManager().initLoader(0, null, this); bindService(new Intent(this, GalleryArtSource.class).setAction(GalleryArtSource.ACTION_BIND_GALLERY), mServiceConnection, BIND_AUTO_CREATE); mPlaceholderDrawable = new ColorDrawable(ContextCompat.getColor(this, R.color.gallery_chosen_photo_placeholder)); mPlaceholderSmallDrawable = new ColorDrawable(ContextCompat.getColor(this, R.color.gallery_chosen_photo_placeholder)); mPhotoGridView = (RecyclerView) findViewById(R.id.photo_grid); DefaultItemAnimator itemAnimator = new DefaultItemAnimator(); itemAnimator.setSupportsChangeAnimations(false); mPhotoGridView.setItemAnimator(itemAnimator); setupMultiSelect(); final GridLayoutManager gridLayoutManager = new GridLayoutManager( GallerySettingsActivity.this, 1); mPhotoGridView.setLayoutManager(gridLayoutManager); final ViewTreeObserver vto = mPhotoGridView.getViewTreeObserver(); vto.addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() { @Override public void onGlobalLayout() { int width = mPhotoGridView.getWidth() - mPhotoGridView.getPaddingStart() - mPhotoGridView.getPaddingEnd(); if (width <= 0) { return; } // Compute number of columns int maxItemWidth = getResources().getDimensionPixelSize( R.dimen.gallery_chosen_photo_grid_max_item_size); int numColumns = 1; while (true) { if (width / numColumns > maxItemWidth) { ++numColumns; } else { break; } } int spacing = getResources().getDimensionPixelSize( R.dimen.gallery_chosen_photo_grid_spacing); mItemSize = (width - spacing * (numColumns - 1)) / numColumns; // Complete setup gridLayoutManager.setSpanCount(numColumns); mChosenPhotosAdapter.setHasStableIds(true); mPhotoGridView.setAdapter(mChosenPhotosAdapter); mPhotoGridView.getViewTreeObserver().removeOnGlobalLayoutListener(this); tryUpdateSelection(false); } }); ViewCompat.setOnApplyWindowInsetsListener(mPhotoGridView, new OnApplyWindowInsetsListener() { @Override public WindowInsetsCompat onApplyWindowInsets(final View v, final WindowInsetsCompat insets) { int gridSpacing = getResources() .getDimensionPixelSize(R.dimen.gallery_chosen_photo_grid_spacing); ViewCompat.onApplyWindowInsets(v, insets.replaceSystemWindowInsets( insets.getSystemWindowInsetLeft() + gridSpacing, gridSpacing, insets.getSystemWindowInsetRight() + gridSpacing, insets.getSystemWindowInsetBottom() + insets.getSystemWindowInsetTop() + gridSpacing + getResources().getDimensionPixelSize(R.dimen.gallery_fab_space))); return insets; } }); Button enableRandomImages = (Button) findViewById(R.id.gallery_enable_random); enableRandomImages.setOnClickListener(new View.OnClickListener() { @Override public void onClick(final View view) { ActivityCompat.requestPermissions(GallerySettingsActivity.this, new String[] { Manifest.permission.READ_EXTERNAL_STORAGE }, REQUEST_STORAGE_PERMISSION); } }); Button permissionSettings = (Button) findViewById(R.id.gallery_edit_permission_settings); permissionSettings.setOnClickListener(new View.OnClickListener() { @Override public void onClick(final View view) { Intent intent = new Intent(Settings.ACTION_APPLICATION_DETAILS_SETTINGS, Uri.fromParts("package", getPackageName(), null)); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); startActivity(intent); } }); mAddButton = findViewById(R.id.add_fab); mAddButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { // On Lollipop and higher, we show the add toolbar to allow users to add either // individual photos or a whole directory showAddToolbar(); } else { requestPhotos(); } } }); mAddToolbar = findViewById(R.id.add_toolbar); findViewById(R.id.add_photos).setOnClickListener(new View.OnClickListener() { @Override public void onClick(final View v) { requestPhotos(); } }); findViewById(R.id.add_folder).setOnClickListener(new View.OnClickListener() { @TargetApi(Build.VERSION_CODES.LOLLIPOP) @Override public void onClick(final View v) { Intent intent = new Intent(Intent.ACTION_OPEN_DOCUMENT_TREE); try { startActivityForResult(intent, REQUEST_CHOOSE_FOLDER); SharedPreferences preferences = getSharedPreferences(SHARED_PREF_NAME, Context.MODE_PRIVATE); if (preferences.getBoolean(SHOW_INTERNAL_STORAGE_MESSAGE, true)) { Toast.makeText(GallerySettingsActivity.this, R.string.gallery_internal_storage_message, Toast.LENGTH_LONG).show(); } } catch (ActivityNotFoundException e) { Snackbar.make(mPhotoGridView, R.string.gallery_add_folder_error, Snackbar.LENGTH_LONG).show(); hideAddToolbar(true); } } }); } private void requestPhotos() { // Use ACTION_OPEN_DOCUMENT by default for adding photos. // This allows us to use persistent URI permissions to access the underlying photos // meaning we don't need to use additional storage space and will pull in edits automatically // in addition to syncing deletions. // (There's a separate 'Import photos' option which uses ACTION_GET_CONTENT to support legacy apps) Intent intent = new Intent(Intent.ACTION_OPEN_DOCUMENT); intent.setType("image/*"); intent.addCategory(Intent.CATEGORY_OPENABLE); intent.putExtra(Intent.EXTRA_ALLOW_MULTIPLE, true); try { startActivityForResult(intent, REQUEST_CHOOSE_PHOTOS); } catch (ActivityNotFoundException e) { Snackbar.make(mPhotoGridView, R.string.gallery_add_photos_error, Snackbar.LENGTH_LONG).show(); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { hideAddToolbar(true); } } } @Override public void onRequestPermissionsResult(final int requestCode, @NonNull final String[] permissions, @NonNull final int[] grantResults) { super.onRequestPermissionsResult(requestCode, permissions, grantResults); if (requestCode != REQUEST_STORAGE_PERMISSION) { return; } onDataSetChanged(); } @Override protected void onResume() { super.onResume(); // Permissions might have changed in the background onDataSetChanged(); } @Override protected void onDestroy() { super.onDestroy(); if (mHandlerThread != null) { mHandlerThread.quitSafely(); mHandlerThread = null; } unbindService(mServiceConnection); } @Override public boolean onCreateOptionsMenu(final Menu menu) { super.onCreateOptionsMenu(menu); getMenuInflater().inflate(R.menu.gallery_activity, menu); int rotateIntervalMin = GalleryArtSource.getSharedPreferences(this) .getInt(GalleryArtSource.PREF_ROTATE_INTERVAL_MIN, GalleryArtSource.DEFAULT_ROTATE_INTERVAL_MIN); int menuId = sRotateMenuIdsByMin.get(rotateIntervalMin); if (menuId != 0) { MenuItem item = menu.findItem(menuId); if (item != null) { item.setChecked(true); } } return true; } @Override public boolean onPrepareOptionsMenu(final Menu menu) { super.onPrepareOptionsMenu(menu); // Make sure the 'Import photos' MenuItem is set up properly based on the number of // activities that handle ACTION_GET_CONTENT // 0 = hide the MenuItem // 1 = show 'Import photos from APP_NAME' to go to the one app that exists // 2 = show 'Import photos...' to have the user pick which app to import photos from Intent intent = new Intent(Intent.ACTION_GET_CONTENT); intent.setType("image/*"); intent.addCategory(Intent.CATEGORY_OPENABLE); List<ResolveInfo> getContentActivities = getPackageManager().queryIntentActivities(intent, 0); mGetContentActivites.clear(); for (ResolveInfo info : getContentActivities) { // Filter out the default system UI if (TextUtils.equals(info.activityInfo.packageName, "com.android.documentsui")) { continue; } // Filter out non-exported activities if (!info.activityInfo.exported) { continue; } // Filter out activities we don't have permission to start if (!TextUtils.isEmpty(info.activityInfo.permission) && getPackageManager().checkPermission(info.activityInfo.permission, getPackageName()) != PackageManager.PERMISSION_GRANTED) { continue; } mGetContentActivites.add(info.activityInfo); } // Hide the 'Import photos' action if there are no activities found MenuItem importPhotosMenuItem = menu.findItem(R.id.action_import_photos); importPhotosMenuItem.setVisible(!mGetContentActivites.isEmpty()); // If there's only one app that supports ACTION_GET_CONTENT, tell the user what that app is if (mGetContentActivites.size() == 1) { importPhotosMenuItem.setTitle(getString(R.string.gallery_action_import_photos_from, mGetContentActivites.get(0).loadLabel(getPackageManager()))); } else { importPhotosMenuItem.setTitle(R.string.gallery_action_import_photos); } return true; } @Override public boolean onOptionsItemSelected(final MenuItem item) { int itemId = item.getItemId(); int rotateMin = sRotateMinsByMenuId.get(itemId, -1); if (rotateMin != -1) { GalleryArtSource.getSharedPreferences(GallerySettingsActivity.this).edit() .putInt(GalleryArtSource.PREF_ROTATE_INTERVAL_MIN, rotateMin) .apply(); item.setChecked(true); return true; } if (itemId == R.id.action_import_photos) { if (mGetContentActivites.size() == 1) { // Just start the one ACTION_GET_CONTENT app requestGetContent(mGetContentActivites.get(0)); } else { // Let the user pick which app they want to import photos from PackageManager packageManager = getPackageManager(); final CharSequence[] items = new CharSequence[mGetContentActivites.size()]; for (int h = 0; h < mGetContentActivites.size(); h++) { items[h] = mGetContentActivites.get(h).loadLabel(packageManager); } new AlertDialog.Builder(this) .setTitle(R.string.gallery_import_dialog_title) .setItems(items, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { requestGetContent(mGetContentActivites.get(which)); } }) .show(); } return true; } else if (itemId == R.id.action_clear_photos) { runOnHandlerThread(new Runnable() { @Override public void run() { getContentResolver().delete(GalleryContract.ChosenPhotos.CONTENT_URI, null, null); } }); return true; } return super.onOptionsItemSelected(item); } private void requestGetContent(ActivityInfo info) { Intent intent = new Intent(Intent.ACTION_GET_CONTENT); intent.setType("image/*"); intent.addCategory(Intent.CATEGORY_OPENABLE); intent.setClassName(info.packageName, info.name); intent.putExtra(Intent.EXTRA_ALLOW_MULTIPLE, true); startActivityForResult(intent, REQUEST_CHOOSE_PHOTOS); } private void runOnHandlerThread(Runnable runnable) { if (mHandlerThread == null) { mHandlerThread = new HandlerThread("GallerySettingsActivity"); mHandlerThread.start(); mHandler = new Handler(mHandlerThread.getLooper()); } mHandler.post(runnable); } private int mLastTouchPosition; private int mLastTouchX, mLastTouchY; private void setupMultiSelect() { // Set up toolbar mSelectionToolbar = (Toolbar) findViewById(R.id.selection_toolbar); mSelectionToolbar.setNavigationOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { mMultiSelectionController.reset(true); } }); mSelectionToolbar.inflateMenu(R.menu.gallery_selection); mSelectionToolbar.setOnMenuItemClickListener(new Toolbar.OnMenuItemClickListener() { @Override public boolean onMenuItemClick(MenuItem item) { int itemId = item.getItemId(); if (itemId == R.id.action_force_now) { Set<Uri> selection = mMultiSelectionController.getSelection(); if (selection.size() > 0) { Uri selectedUri = selection.iterator().next(); // Check to see if it is tree URI, if so, force a random photo from the tree Cursor data = getContentResolver().query(selectedUri, new String[] { GalleryContract.ChosenPhotos.COLUMN_NAME_IS_TREE_URI, GalleryContract.ChosenPhotos.COLUMN_NAME_URI }, null, null, null); if (data != null && data.moveToNext()) { boolean isTreeUri = data.getInt(0) != 0; if (isTreeUri) { Uri treeUri = Uri.parse(data.getString(1)); List<Uri> photoUris = getImagesFromTreeUri(treeUri, Integer.MAX_VALUE); selectedUri = photoUris.get(new Random().nextInt(photoUris.size())); } } if (data != null) { data.close(); } startService( new Intent(GallerySettingsActivity.this, GalleryArtSource.class) .setAction(ACTION_PUBLISH_NEXT_GALLERY_ITEM) .putExtra(EXTRA_FORCE_URI, selectedUri)); Toast.makeText(GallerySettingsActivity.this, R.string.gallery_temporary_force_image, Toast.LENGTH_SHORT).show(); } mMultiSelectionController.reset(true); return true; } else if (itemId == R.id.action_remove) { final ArrayList<Uri> removeUris = new ArrayList<>( mMultiSelectionController.getSelection()); runOnHandlerThread(new Runnable() { @Override public void run() { // Update chosen URIs ArrayList<ContentProviderOperation> operations = new ArrayList<>(); for (Uri uri : removeUris) { operations.add(ContentProviderOperation.newDelete(uri) .build()); } try { getContentResolver().applyBatch(GalleryContract.AUTHORITY, operations); } catch (RemoteException | OperationApplicationException e) { Log.e(TAG, "Error deleting URIs from the ContentProvider", e); } } }); mMultiSelectionController.reset(true); return true; } return false; } }); // Set up controller mMultiSelectionController.setCallbacks(new MultiSelectionController.Callbacks() { @Override public void onSelectionChanged(boolean restored, boolean fromUser) { tryUpdateSelection(!restored); } }); } @Override public void onBackPressed() { if (mMultiSelectionController.getSelectedCount() > 0) { mMultiSelectionController.reset(true); } else if (mAddToolbar.getVisibility() == View.VISIBLE) { hideAddToolbar(true); } else { super.onBackPressed(); } } @TargetApi(Build.VERSION_CODES.LOLLIPOP) private void showAddToolbar() { // Divide by two since we're doing two animations but we want the total time to the short animation time final int duration = getResources().getInteger(android.R.integer.config_shortAnimTime) / 2; // Hide the add button mAddButton.animate() .scaleX(0f) .scaleY(0f) .translationY(getResources().getDimension(R.dimen.gallery_fab_margin)) .setDuration(duration) .withEndAction(new Runnable() { @TargetApi(Build.VERSION_CODES.LOLLIPOP) @Override public void run() { mAddButton.setVisibility(View.INVISIBLE); // Then show the toolbar mAddToolbar.setVisibility(View.VISIBLE); ViewAnimationUtils.createCircularReveal( mAddToolbar, mAddToolbar.getWidth() / 2, mAddToolbar.getHeight() / 2, 0, mAddToolbar.getWidth() / 2) .setDuration(duration) .start(); } }); } @TargetApi(Build.VERSION_CODES.LOLLIPOP) private void hideAddToolbar(final boolean showAddButton) { // Divide by two since we're doing two animations but we want the total time to the short animation time final int duration = getResources().getInteger(android.R.integer.config_shortAnimTime) / 2; // Hide the toolbar Animator hideAnimator = ViewAnimationUtils.createCircularReveal( mAddToolbar, mAddToolbar.getWidth() / 2, mAddToolbar.getHeight() / 2, mAddToolbar.getWidth() / 2, 0).setDuration(showAddButton ? duration : duration * 2); hideAnimator.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(final Animator animation) { mAddToolbar.setVisibility(View.INVISIBLE); if (showAddButton) { mAddButton.setVisibility(View.VISIBLE); mAddButton.animate() .scaleX(1f) .scaleY(1f) .translationY(0) .setDuration(duration); } else { // Just reset the translationY mAddButton.setTranslationY(0); } } }); hideAnimator.start(); } private void tryUpdateSelection(boolean allowAnimate) { final View selectionToolbarContainer = findViewById(R.id.selection_toolbar_container); if (mUpdatePosition >= 0) { mChosenPhotosAdapter.notifyItemChanged(mUpdatePosition); mUpdatePosition = -1; } else { mChosenPhotosAdapter.notifyDataSetChanged(); } int selectedCount = mMultiSelectionController.getSelectedCount(); final boolean toolbarVisible = selectedCount > 0; boolean showForceNow = selectedCount == 1; if (showForceNow) { // Double check to make sure we can force a URI for the selected URI Uri selectedUri = mMultiSelectionController.getSelection().iterator().next(); Cursor data = getContentResolver().query(selectedUri, new String[] { GalleryContract.ChosenPhotos.COLUMN_NAME_IS_TREE_URI, GalleryContract.ChosenPhotos.COLUMN_NAME_URI }, null, null, null); if (data != null && data.moveToNext()) { boolean isTreeUri = data.getInt(0) != 0; // Only show the force now icon if it isn't a tree URI or there is at least one image in the tree showForceNow = !isTreeUri || !getImagesFromTreeUri(Uri.parse(data.getString(1)), 1).isEmpty(); } if (data != null) { data.close(); } } mSelectionToolbar.getMenu().findItem(R.id.action_force_now).setVisible( showForceNow); Boolean previouslyVisible = (Boolean) selectionToolbarContainer.getTag(0xDEADBEEF); if (previouslyVisible == null) { previouslyVisible = Boolean.FALSE; } if (previouslyVisible != toolbarVisible) { selectionToolbarContainer.setTag(0xDEADBEEF, toolbarVisible); int duration = allowAnimate ? getResources().getInteger(android.R.integer.config_shortAnimTime) : 0; if (toolbarVisible) { selectionToolbarContainer.setVisibility(View.VISIBLE); selectionToolbarContainer.setTranslationY( -selectionToolbarContainer.getHeight()); selectionToolbarContainer.animate() .translationY(0f) .setDuration(duration) .withEndAction(null); if (mAddToolbar.getVisibility() == View.VISIBLE) { hideAddToolbar(false); } else { mAddButton.animate() .scaleX(0f) .scaleY(0f) .setDuration(duration) .withEndAction(new Runnable() { @Override public void run() { mAddButton.setVisibility(View.INVISIBLE); } }); } } else { selectionToolbarContainer.animate() .translationY(-selectionToolbarContainer.getHeight()) .setDuration(duration) .withEndAction(new Runnable() { @Override public void run() { selectionToolbarContainer.setVisibility(View.INVISIBLE); } }); mAddButton.setVisibility(View.VISIBLE); mAddButton.animate() .scaleY(1f) .scaleX(1f) .setDuration(duration) .withEndAction(null); } } if (toolbarVisible) { String title = Integer.toString(selectedCount); if (selectedCount == 1) { // If they've selected a tree URI, show the DISPLAY_NAME instead of just '1' Uri selectedUri = mMultiSelectionController.getSelection().iterator().next(); Cursor data = getContentResolver().query(selectedUri, new String[] { GalleryContract.ChosenPhotos.COLUMN_NAME_IS_TREE_URI, GalleryContract.ChosenPhotos.COLUMN_NAME_URI }, null, null, null); if (data != null && data.moveToNext()) { boolean isTreeUri = data.getInt(0) != 0; if (isTreeUri) { String displayName = getDisplayNameForTreeUri(Uri.parse(data.getString(1))); if (!TextUtils.isEmpty(displayName)) { title = displayName; } } } if (data != null) { data.close(); } } mSelectionToolbar.setTitle(title); } } @TargetApi(Build.VERSION_CODES.LOLLIPOP) private String getDisplayNameForTreeUri(Uri treeUri) { Uri documentUri = DocumentsContract.buildDocumentUriUsingTree(treeUri, DocumentsContract.getTreeDocumentId(treeUri)); Cursor data = getContentResolver().query(documentUri, new String[] { DocumentsContract.Document.COLUMN_DISPLAY_NAME }, null, null, null); String displayName = null; if (data != null && data.moveToNext()) { displayName = data.getString(data.getColumnIndex(DocumentsContract.Document.COLUMN_DISPLAY_NAME)); } if (data != null) { data.close(); } return displayName; } private void onDataSetChanged() { View emptyView = findViewById(android.R.id.empty); TextView emptyDescription = (TextView) findViewById(R.id.empty_description); if (mChosenUris != null && mChosenUris.getCount() > 0) { emptyView.setVisibility(View.GONE); // We have at least one image, so consider the Gallery source properly setup setResult(RESULT_OK); } else { // No chosen images, show the empty View emptyView.setVisibility(View.VISIBLE); ViewAnimator animator = (ViewAnimator) findViewById(R.id.empty_animator); if (ContextCompat.checkSelfPermission(this, Manifest.permission.READ_EXTERNAL_STORAGE) == PackageManager.PERMISSION_GRANTED) { // Permission is granted, we can show the random camera photos image animator.setDisplayedChild(0); emptyDescription.setText(R.string.gallery_empty); setResult(RESULT_OK); } else { // We have no images until they enable the permission setResult(RESULT_CANCELED); if (ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.READ_EXTERNAL_STORAGE)) { // We should show rationale on why they should enable the storage permission and // random camera photos animator.setDisplayedChild(1); emptyDescription.setText(R.string.gallery_permission_rationale); } else { // The user has permanently denied the storage permission. Give them a link to app settings animator.setDisplayedChild(2); emptyDescription.setText(R.string.gallery_denied_explanation); } } } } @Override protected void onPostCreate(Bundle savedInstanceState) { super.onPostCreate(savedInstanceState); mMultiSelectionController.restoreInstanceState(savedInstanceState); } abstract static class CheckableViewHolder extends RecyclerView.ViewHolder { final View mRootView; final View mCheckedOverlayView; CheckableViewHolder(View root) { super(root); mRootView = root; mCheckedOverlayView = root.findViewById(R.id.checked_overlay); } } static class PhotoViewHolder extends CheckableViewHolder { final ImageView mThumbView; PhotoViewHolder(View root) { super(root); mThumbView = (ImageView) root.findViewById(R.id.thumbnail); } } static class TreeViewHolder extends CheckableViewHolder { final List<ImageView> mThumbViews = new ArrayList<>(); TreeViewHolder(View root) { super(root); mThumbViews.add((ImageView) root.findViewById(R.id.thumbnail1)); mThumbViews.add((ImageView) root.findViewById(R.id.thumbnail2)); mThumbViews.add((ImageView) root.findViewById(R.id.thumbnail3)); mThumbViews.add((ImageView) root.findViewById(R.id.thumbnail4)); } } private final RecyclerView.Adapter<CheckableViewHolder> mChosenPhotosAdapter = new RecyclerView.Adapter<CheckableViewHolder>() { @Override public int getItemViewType(final int position) { mChosenUris.moveToPosition(position); // This will return 1 for tree URIs and 0 for photo URIs return mChosenUris.getInt(mChosenUris.getColumnIndex(GalleryContract.ChosenPhotos.COLUMN_NAME_IS_TREE_URI)); } @Override public CheckableViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { boolean isTreeUri = viewType != 0; View v; final CheckableViewHolder vh; if (isTreeUri) { v = LayoutInflater.from(GallerySettingsActivity.this) .inflate(R.layout.gallery_chosen_photo_tree_item, parent, false); vh = new TreeViewHolder(v); } else { v = LayoutInflater.from(GallerySettingsActivity.this) .inflate(R.layout.gallery_chosen_photo_item, parent, false); vh = new PhotoViewHolder(v); } v.getLayoutParams().height = mItemSize; v.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View view, MotionEvent motionEvent) { if (motionEvent.getActionMasked() != MotionEvent.ACTION_CANCEL) { mLastTouchPosition = vh.getAdapterPosition(); mLastTouchX = (int) motionEvent.getX(); mLastTouchY = (int) motionEvent.getY(); } return false; } }); v.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { mUpdatePosition = vh.getAdapterPosition(); if (mUpdatePosition != RecyclerView.NO_POSITION) { Uri contentUri = ContentUris.withAppendedId(GalleryContract.ChosenPhotos.CONTENT_URI, getItemId(mUpdatePosition)); mMultiSelectionController.toggle(contentUri, true); } } }); return vh; } @Override public void onBindViewHolder(final CheckableViewHolder vh, int position) { mChosenUris.moveToPosition(position); Uri contentUri = ContentUris.withAppendedId(GalleryContract.ChosenPhotos.CONTENT_URI, mChosenUris.getLong(mChosenUris.getColumnIndex(BaseColumns._ID))); boolean isTreeUri = getItemViewType(position) != 0; if (isTreeUri) { TreeViewHolder treeVh = (TreeViewHolder) vh; int maxImages = treeVh.mThumbViews.size(); Uri imageUri = Uri.parse(mChosenUris.getString( mChosenUris.getColumnIndex(GalleryContract.ChosenPhotos.COLUMN_NAME_URI))); List<Uri> images = getImagesFromTreeUri(imageUri, maxImages); int numImages = images.size(); for (int h=0; h<numImages; h++) { Picasso.with(GallerySettingsActivity.this) .load(images.get(h)) .resize(mItemSize / 2, mItemSize / 2) .centerCrop() .placeholder(mPlaceholderSmallDrawable) .into(treeVh.mThumbViews.get(h)); } for (int h=numImages; h<maxImages; h++) { treeVh.mThumbViews.get(h).setImageDrawable(mPlaceholderSmallDrawable); } } else { PhotoViewHolder photoVh = (PhotoViewHolder) vh; Picasso.with(GallerySettingsActivity.this) .load(contentUri) .resize(mItemSize, mItemSize) .centerCrop() .placeholder(mPlaceholderDrawable) .into(photoVh.mThumbView); } final boolean checked = mMultiSelectionController.isSelected(contentUri); vh.mRootView.setTag(R.id.gallery_viewtag_position, position); if (mLastTouchPosition == vh.getAdapterPosition() && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { new Handler().post(new Runnable() { @TargetApi(Build.VERSION_CODES.LOLLIPOP) @Override public void run() { if (!vh.mCheckedOverlayView.isAttachedToWindow()) { // Can't animate detached Views vh.mCheckedOverlayView.setVisibility( checked ? View.VISIBLE : View.GONE); return; } if (checked) { vh.mCheckedOverlayView.setVisibility(View.VISIBLE); } // find the smallest radius that'll cover the item float coverRadius = maxDistanceToCorner( mLastTouchX, mLastTouchY, 0, 0, vh.mRootView.getWidth(), vh.mRootView.getHeight()); Animator revealAnim = ViewAnimationUtils.createCircularReveal( vh.mCheckedOverlayView, mLastTouchX, mLastTouchY, checked ? 0 : coverRadius, checked ? coverRadius : 0) .setDuration(150); if (!checked) { revealAnim.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { vh.mCheckedOverlayView.setVisibility(View.GONE); } }); } revealAnim.start(); } }); } else { vh.mCheckedOverlayView.setVisibility( checked ? View.VISIBLE : View.GONE); } } private float maxDistanceToCorner(int x, int y, int left, int top, int right, int bottom) { float maxDistance = 0; maxDistance = Math.max(maxDistance, (float) Math.hypot(x - left, y - top)); maxDistance = Math.max(maxDistance, (float) Math.hypot(x - right, y - top)); maxDistance = Math.max(maxDistance, (float) Math.hypot(x - left, y - bottom)); maxDistance = Math.max(maxDistance, (float) Math.hypot(x - right, y - bottom)); return maxDistance; } @Override public int getItemCount() { return mChosenUris != null ? mChosenUris.getCount() : 0; } @Override public long getItemId(int position) { mChosenUris.moveToPosition(position); return mChosenUris.getLong(mChosenUris.getColumnIndex(BaseColumns._ID)); } }; @TargetApi(Build.VERSION_CODES.LOLLIPOP) private List<Uri> getImagesFromTreeUri(final Uri treeUri, final int maxImages) { List<Uri> images = new ArrayList<>(); Queue<String> directories = new LinkedList<>(); directories.add(DocumentsContract.getTreeDocumentId(treeUri)); while (images.size() < maxImages && !directories.isEmpty()) { String parentDocumentId = directories.poll(); final Uri childrenUri = DocumentsContract.buildChildDocumentsUriUsingTree(treeUri, parentDocumentId); Cursor children; try { children = getContentResolver().query(childrenUri, new String[]{DocumentsContract.Document.COLUMN_DOCUMENT_ID, DocumentsContract.Document.COLUMN_MIME_TYPE}, null, null, null); } catch (SecurityException e) { // No longer can read this URI, which means no images from this URI // This a temporary state as the next onLoadFinished() will remove this item entirely children = null; } if (children == null) { continue; } while (children.moveToNext()) { String documentId = children.getString( children.getColumnIndex(DocumentsContract.Document.COLUMN_DOCUMENT_ID)); String mimeType = children.getString( children.getColumnIndex(DocumentsContract.Document.COLUMN_MIME_TYPE)); if (DocumentsContract.Document.MIME_TYPE_DIR.equals(mimeType)) { directories.add(documentId); } else if (mimeType != null && mimeType.startsWith("image/")) { // Add images to the list images.add(DocumentsContract.buildDocumentUriUsingTree(treeUri, documentId)); } if (images.size() == maxImages) { break; } } children.close(); } return images; } @Override protected void onActivityResult(int requestCode, int resultCode, Intent result) { super.onActivityResult(requestCode, resultCode, result); if (requestCode != REQUEST_CHOOSE_PHOTOS && requestCode != REQUEST_CHOOSE_FOLDER) { return; } if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { if (!mAddToolbar.isAttachedToWindow()) { // Can't animate detached Views mAddToolbar.setVisibility(View.INVISIBLE); mAddButton.setVisibility(View.VISIBLE); } else { hideAddToolbar(true); } } if (resultCode != RESULT_OK) { return; } if (result == null) { return; } if (requestCode == REQUEST_CHOOSE_FOLDER) { SharedPreferences preferences = getSharedPreferences(SHARED_PREF_NAME, Context.MODE_PRIVATE); preferences.edit().putBoolean(SHOW_INTERNAL_STORAGE_MESSAGE, false).apply(); } // Add chosen items final Set<Uri> uris = new HashSet<>(); if (result.getData() != null) { uris.add(result.getData()); } // When selecting multiple images, "Photos" returns the first URI in getData and all URIs // in getClipData. ClipData clipData = result.getClipData(); if (clipData != null) { int count = clipData.getItemCount(); for (int i = 0; i < count; i++) { Uri uri = clipData.getItemAt(i).getUri(); if (uri != null) { uris.add(uri); } } } if (uris.isEmpty()) { // Nothing to do, so we can avoid posting the runnable at all return; } // Update chosen URIs runOnHandlerThread(new Runnable() { @Override public void run() { ArrayList<ContentProviderOperation> operations = new ArrayList<>(); for (Uri uri : uris) { ContentValues values = new ContentValues(); values.put(GalleryContract.ChosenPhotos.COLUMN_NAME_URI, uri.toString()); operations.add(ContentProviderOperation.newInsert(GalleryContract.ChosenPhotos.CONTENT_URI) .withValues(values).build()); } try { getContentResolver().applyBatch(GalleryContract.AUTHORITY, operations); } catch (RemoteException | OperationApplicationException e) { Log.e(TAG, "Error writing uris to ContentProvider", e); } } }); } @Override public Loader<Cursor> onCreateLoader(int id, Bundle args) { return new CursorLoader(this, GalleryContract.ChosenPhotos.CONTENT_URI, new String[] {BaseColumns._ID, GalleryContract.ChosenPhotos.COLUMN_NAME_URI, GalleryContract.ChosenPhotos.COLUMN_NAME_IS_TREE_URI }, null, null, null); } @Override public void onLoadFinished(Loader<Cursor> loader, final Cursor data) { if (mChosenUris == data) { return; } final Cursor previousData = mChosenUris; mChosenUris = data; DiffUtil.calculateDiff(new DiffUtil.Callback() { @Override public int getOldListSize() { return previousData != null ? previousData.getCount() : 0; } @Override public int getNewListSize() { return data.getCount(); } @Override public boolean areItemsTheSame(final int oldItemPosition, final int newItemPosition) { previousData.moveToPosition(oldItemPosition); String oldImageUri = previousData.getString( previousData.getColumnIndex(GalleryContract.ChosenPhotos.COLUMN_NAME_URI)); data.moveToPosition(newItemPosition); String newImageUri = data.getString( data.getColumnIndex(GalleryContract.ChosenPhotos.COLUMN_NAME_URI)); return oldImageUri.equals(newImageUri); } @Override public boolean areContentsTheSame(final int oldItemPosition, final int newItemPosition) { // If the items are the same (same image URI), then they are equivalent and // no change animation is needed return true; } }).dispatchUpdatesTo(mChosenPhotosAdapter); onDataSetChanged(); } @Override public void onLoaderReset(Loader<Cursor> loader) { mChosenUris = null; mChosenPhotosAdapter.notifyItemRangeRemoved(0, mChosenPhotosAdapter.getItemCount()); onDataSetChanged(); } @Override protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); mMultiSelectionController.saveInstanceState(outState); } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.jvm.java.abi; import com.google.common.base.Preconditions; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Set; import javax.annotation.Nullable; import org.objectweb.asm.Attribute; import org.objectweb.asm.ClassReader; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.FieldVisitor; import org.objectweb.asm.Handle; import org.objectweb.asm.Label; import org.objectweb.asm.MethodVisitor; import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; /** A {@link ClassVisitor} that only passes to its delegate events for the class's ABI. */ class AbiFilteringClassVisitor extends ClassVisitor { private final List<String> methodsWithRetainedBody; @Nullable private final Set<String> referencedClassNames; @Nullable private String name; @Nullable private String outerName = null; private int classAccess; private int classVersion; private boolean hasVisibleConstructor = false; private Set<String> includedInnerClasses = new HashSet<>(); private List<String> nestMembers = new ArrayList<>(); public AbiFilteringClassVisitor(ClassVisitor cv, List<String> methodsWithRetainedBody) { this(cv, methodsWithRetainedBody, null); } public AbiFilteringClassVisitor( ClassVisitor cv, List<String> methodsWithRetainedBody, @Nullable Set<String> referencedClassNames) { super(Opcodes.ASM7, cv); this.methodsWithRetainedBody = methodsWithRetainedBody; this.referencedClassNames = referencedClassNames; } @Override public void visit( int version, int access, String name, String signature, String superName, String[] interfaces) { this.name = name; classAccess = access; classVersion = version; super.visit(version, access, name, signature, superName, interfaces); } @Override public void visitInnerClass(String name, String outerName, String innerName, int access) { if (name.equals(this.name)) { this.outerName = outerName; } if (!shouldIncludeInnerClass(access, name, outerName)) { return; } includedInnerClasses.add(name); super.visitInnerClass(name, outerName, innerName, access); } @Override public void visitNestMember(String nestMember) { Preconditions.checkState(classVersion >= Opcodes.V11); nestMembers.add(nestMember); } @Override @Nullable public FieldVisitor visitField( int access, String name, String desc, String signature, Object value) { if (!shouldInclude(access)) { return null; } return super.visitField(access, name, desc, signature, value); } @Override @Nullable public MethodVisitor visitMethod( int access, String name, String desc, String signature, String[] exceptions) { if (methodsWithRetainedBody.contains(name) || (name.endsWith("$default") && methodsWithRetainedBody.contains(name.substring(0, name.length() - 8)))) { if (name.equals("<init>") && (access & (Opcodes.ACC_PRIVATE | Opcodes.ACC_SYNTHETIC)) == 0) { hasVisibleConstructor = true; } return super.visitMethod(access, name, desc, signature, exceptions); } // Per JVMS8 2.9, "Class and interface initialization methods are invoked // implicitly by the Java Virtual Machine; they are never invoked directly from any // Java Virtual Machine instruction, but are invoked only indirectly as part of the class // initialization process." Thus we don't need to emit a stub of <clinit>. if (!shouldInclude(access) || (name.equals("<clinit>") && (access & Opcodes.ACC_STATIC) > 0)) { return null; } // We don't stub private constructors, but if stripping these constructors results in no // constructors at all, we want to include a default private constructor. This is because // removing all these private methods will make the class look like it has no constructors at // all, which is not possible. We track if this class has a public, non-synthetic constructor // and is not an interface or annotation to determine if a default private constructor is // generated when visitEnd() is called. if (name.equals("<init>") && (access & Opcodes.ACC_SYNTHETIC) == 0) { hasVisibleConstructor = true; } // Bridge methods are created by the compiler, and don't appear in source. It would be nice to // skip them, but they're used by the compiler to cover the fact that type erasure has occurred. // Normally the compiler adds these as public methods, but if you're compiling against a stub // produced using our ABI generator, we don't want people calling it accidentally. Oh well, I // guess it happens IRL too. // // Synthetic methods are also generated by the compiler, unless it's one of the methods named in // section 4.7.8 of the JVM spec, which are "<init>" and "Enum.valueOf()" and "Enum.values". // None of these are actually harmful to the ABI, so we allow synthetic methods through. // http://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.7.8 return new SkipCodeMethodVisitor( Opcodes.ASM7, super.visitMethod(access, name, desc, signature, exceptions)); } @Override public void visitEnd() { if (!hasVisibleConstructor && !isInterface(classAccess) && !isAnnotation(classAccess)) { String desc; if (isEnum(classAccess)) { desc = Type.getMethodType( Type.VOID_TYPE, Type.getObjectType("java/lang/String"), Type.INT_TYPE) .getDescriptor(); } else { desc = outerName == null ? Type.getMethodType(Type.VOID_TYPE).getDescriptor() : Type.getMethodType(Type.VOID_TYPE, Type.getObjectType(outerName)).getDescriptor(); } super.visitMethod(Opcodes.ACC_PRIVATE, "<init>", desc, null, null); } // Filter nest members to included inner classes. Other nest members don't matter for ABI // purposes. Even though other members will point to this class as their nest host, this // asymmetry only matters at runtime when access control checks happen. for (String nestMember : nestMembers) { if (includedInnerClasses.contains(nestMember)) { super.visitNestMember(nestMember); } } super.visitEnd(); } private boolean shouldIncludeInnerClass(int access, String name, @Nullable String outerName) { if (referencedClassNames == null || referencedClassNames.contains(name)) { // Either it's the first pass, and we're not filtering inner classes yet, // or it's the second one, and this inner class is part of the ABI and should // therefore be included return true; } String currentClassName = Objects.requireNonNull(this.name); if (name.equals(currentClassName)) { // Must always include the entry for our own class, since that's what makes it an inner class. return true; } boolean isAnonymousOrLocalClass = (outerName == null); if (isAnonymousOrLocalClass) { // Anonymous and local classes are never part of the ABI. return false; } if ((access & (Opcodes.ACC_SYNTHETIC | Opcodes.ACC_BRIDGE)) == Opcodes.ACC_SYNTHETIC) { // Don't include synthetic classes return false; } return currentClassName.equals(outerName); } private boolean shouldInclude(int access) { if ((access & Opcodes.ACC_PRIVATE) == Opcodes.ACC_PRIVATE) { return false; } return (access & (Opcodes.ACC_SYNTHETIC | Opcodes.ACC_BRIDGE)) != Opcodes.ACC_SYNTHETIC; } private boolean isInterface(int access) { return (access & Opcodes.ACC_INTERFACE) > 0; } private boolean isAnnotation(int access) { return (access & Opcodes.ACC_ANNOTATION) > 0; } private boolean isEnum(int access) { return (access & Opcodes.ACC_ENUM) > 0; } /** A {@link MethodVisitor} that replicates the behavior of {@link ClassReader#SKIP_CODE}. */ private static class SkipCodeMethodVisitor extends MethodVisitor { public SkipCodeMethodVisitor(int api, MethodVisitor methodVisitor) { super(api, methodVisitor); } @Override public void visitAttribute(Attribute attribute) {} @Override public void visitCode() {} @Override public void visitFrame(int type, int numLocal, Object[] local, int numStack, Object[] stack) {} @Override public void visitInsn(int opcode) {} @Override public void visitIntInsn(int opcode, int operand) {} @Override public void visitVarInsn(int opcode, int var) {} @Override public void visitTypeInsn(int opcode, String type) {} @Override public void visitFieldInsn(int opcode, String owner, String name, String descriptor) {} /** @deprecated */ @Override @Deprecated public void visitMethodInsn(int opcode, String owner, String name, String descriptor) {} @Override public void visitMethodInsn( int opcode, String owner, String name, String descriptor, boolean isInterface) {} @Override public void visitInvokeDynamicInsn( String name, String descriptor, Handle bootstrapMethodHandle, Object... bootstrapMethodArguments) {} @Override public void visitJumpInsn(int opcode, Label label) {} @Override public void visitLabel(Label label) {} @Override public void visitLdcInsn(Object value) {} @Override public void visitIincInsn(int var, int increment) {} @Override public void visitTableSwitchInsn(int min, int max, Label dflt, Label... labels) {} @Override public void visitLookupSwitchInsn(Label dflt, int[] keys, Label[] labels) {} @Override public void visitMultiANewArrayInsn(String descriptor, int numDimensions) {} @Override public void visitTryCatchBlock(Label start, Label end, Label handler, String type) {} @Override public void visitLocalVariable( String name, String descriptor, String signature, Label start, Label end, int index) {} @Override public void visitLineNumber(int line, Label start) {} @Override public void visitMaxs(int maxStack, int maxLocals) {} } }
/*L * Copyright Ekagra Software Technologies Ltd. * Copyright SAIC, SAIC-Frederick * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/cacore-sdk/LICENSE.txt for details. */ package test.gov.nih.nci.cacoresdk.domain.onetoone.multipleassociation; import gov.nih.nci.cacoresdk.domain.onetoone.multipleassociation.Parent; import javax.ws.rs.core.Response; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.InputStream; import java.io.FileWriter; import java.io.File; import java.util.List; import org.apache.http.HttpResponse; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpDelete; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.entity.FileEntity; import org.apache.cxf.jaxrs.client.WebClient; import org.apache.cxf.common.util.Base64Utility; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Context; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.ResponseBuilder; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.UriInfo; import java.lang.reflect.Modifier; import java.util.Collection; import java.util.Iterator; import test.gov.nih.nci.cacoresdk.SDKRESTfulTestBase; import gov.nih.nci.system.applicationservice.ApplicationException; public class ParentResourceTest extends SDKRESTfulTestBase { public static String getTestCaseName() { return "Parent RESTful Resource Test Case"; } /** * Uses Nested Search Criteria for search * Verifies that the results are returned * Verifies size of the result set * Verifies that none of the attributes are null * * @throws Exception */ public void testGet() throws Exception { try { Parent searchObject = new Parent(); Collection results = getApplicationService().search("gov.nih.nci.cacoresdk.domain.onetoone.multipleassociation.Parent",searchObject ); String id = ""; if(results != null && results.size() > 0) { Parent obj = (Parent) ((List)results).get(0); Integer idVal = obj.getId(); id = new Integer(idVal).toString(); } else return; if(id.equals("")) return; String url = baseURL + "/rest/Parent/"+id; WebClient client = WebClient.create(url); client.type("application/xml").accept("application/xml"); Response response = client.get(); if (response.getStatus() == Status.NOT_ACCEPTABLE.getStatusCode()) { InputStream is = (InputStream) response.getEntity(); org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder( false); org.jdom.Document jDoc = builder.build(is); assertEquals(jDoc.getRootElement().getName(), "response"); } else if (response.getStatus() == Status.NOT_FOUND.getStatusCode()) { InputStream is = (InputStream) response.getEntity(); org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder( false); org.jdom.Document jDoc = builder.build(is); assertEquals(jDoc.getRootElement().getName(), "response"); } else if (response.getStatus() != 200) { throw new RuntimeException("Failed : HTTP error code : " + response.getStatus()); } File myFile = new File("Parent"+"XML.xml"); System.out.println("writing data to file "+myFile.getAbsolutePath()); FileWriter myWriter = new FileWriter(myFile); BufferedReader br = new BufferedReader( new InputStreamReader(((InputStream)response.getEntity()))); String output; System.out.println("Output from Server .... \n"); while ((output = br.readLine()) != null) { myWriter.write(output); System.out.println(output); } myWriter.flush(); myWriter.close(); } catch (Exception e) { e.printStackTrace(); throw e; } } public void testSearch() throws Exception { try { String url = baseURL + "/rest/Parent/search;id=*"; WebClient client = WebClient.create(url); client.type("application/xml").accept("application/xml"); Response response = client.get(); if (response.getStatus() == Status.NOT_ACCEPTABLE.getStatusCode()) { InputStream is = (InputStream) response.getEntity(); org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder( false); org.jdom.Document jDoc = builder.build(is); assertEquals(jDoc.getRootElement().getName(), "response"); } else if (response.getStatus() == Status.NOT_FOUND.getStatusCode()) { InputStream is = (InputStream) response.getEntity(); org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder( false); org.jdom.Document jDoc = builder.build(is); assertEquals(jDoc.getRootElement().getName(), "response"); } else if (response.getStatus() != 200) { throw new RuntimeException("Failed : HTTP error code : " + response.getStatus()); } File myFile = new File("Parent_Search"+"XML.xml"); System.out.println("writing data to file "+myFile.getAbsolutePath()); FileWriter myWriter = new FileWriter(myFile); BufferedReader br = new BufferedReader( new InputStreamReader(((InputStream)response.getEntity()))); String output; System.out.println("Output from Server .... \n"); while ((output = br.readLine()) != null) { myWriter.write(output); System.out.println(output); } myWriter.flush(); myWriter.close(); } catch (Exception e) { e.printStackTrace(); } } //*************************************************** //********************************************************End public void testDelete() throws Exception { try { Parent searchObject = new Parent(); Collection results = getApplicationService().search("gov.nih.nci.cacoresdk.domain.onetoone.multipleassociation.Parent",searchObject ); String id = ""; if(results != null && results.size() > 0) { Parent obj = (Parent) ((List)results).get(0); Integer idVal = obj.getId(); id = new Integer(idVal).toString(); } else return; if(id.equals("")) return; String url = baseURL + "/rest/Parent/"+id; WebClient client = WebClient.create(url); Response response = client.delete(); if (response.getStatus() == Status.NOT_ACCEPTABLE.getStatusCode()) { InputStream is = (InputStream) response.getEntity(); org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder( false); org.jdom.Document jDoc = builder.build(is); assertEquals(jDoc.getRootElement().getName(), "response"); } else if (response.getStatus() == Status.NOT_FOUND.getStatusCode()) { InputStream is = (InputStream) response.getEntity(); org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder( false); org.jdom.Document jDoc = builder.build(is); assertEquals(jDoc.getRootElement().getName(), "response"); } else if (response.getStatus() != 200) { throw new RuntimeException("Failed : HTTP error code : " + response.getStatus()); } } catch (Exception e) { e.printStackTrace(); throw e; } } public void testPost() throws Exception { try { DefaultHttpClient httpClient = new DefaultHttpClient(); String url = baseURL + "/rest/Parent"; WebClient client = WebClient.create(url); HttpPost postRequest = new HttpPost(url); File myFile = new File("Parent"+"XML.xml"); if(!myFile.exists()) { testGet(); myFile = new File("Parent"+"XML.xml"); if(!myFile.exists()) return; } FileEntity input = new FileEntity(myFile); input.setContentType("application/xml"); System.out.println("input: "+myFile); postRequest.setEntity(input); HttpResponse response = httpClient.execute(postRequest); BufferedReader br = new BufferedReader( new InputStreamReader((response.getEntity().getContent()))); String output; System.out.println("Output from Server .... \n"); while ((output = br.readLine()) != null) { System.out.println(output); } httpClient.getConnectionManager().shutdown(); } catch (Exception e) { e.printStackTrace(); throw e; } } public void testPut() throws Exception { try { DefaultHttpClient httpClient = new DefaultHttpClient(); String url = baseURL + "/rest/Parent"; HttpPut putRequest = new HttpPut(url); File myFile = new File("Parent"+"XML.xml"); if(!myFile.exists()) { testGet(); myFile = new File("Parent"+"XML.xml"); if(!myFile.exists()) return; } FileEntity input = new FileEntity(myFile); input.setContentType("application/xml"); putRequest.setEntity(input); HttpResponse response = httpClient.execute(putRequest); if(response.getEntity() != null) { BufferedReader br = new BufferedReader( new InputStreamReader((response.getEntity().getContent()))); String output; System.out.println("Output from Server .... \n"); while ((output = br.readLine()) != null) { System.out.println(output); } } httpClient.getConnectionManager().shutdown(); } catch (Exception e) { e.printStackTrace(); throw e; } } }
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.elasticfilesystem.model; import java.io.Serializable; import com.amazonaws.AmazonWebServiceRequest; /** * */ public class CreateTagsRequest extends AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * String. The ID of the file system whose tags you want to modify. This * operation modifies only the tags and not the file system. * </p> */ private String fileSystemId; /** * <p> * An array of <code>Tag</code> objects to add. Each <code>Tag</code> object * is a key-value pair. * </p> */ private com.amazonaws.internal.SdkInternalList<Tag> tags; /** * <p> * String. The ID of the file system whose tags you want to modify. This * operation modifies only the tags and not the file system. * </p> * * @param fileSystemId * String. The ID of the file system whose tags you want to modify. * This operation modifies only the tags and not the file system. */ public void setFileSystemId(String fileSystemId) { this.fileSystemId = fileSystemId; } /** * <p> * String. The ID of the file system whose tags you want to modify. This * operation modifies only the tags and not the file system. * </p> * * @return String. The ID of the file system whose tags you want to modify. * This operation modifies only the tags and not the file system. */ public String getFileSystemId() { return this.fileSystemId; } /** * <p> * String. The ID of the file system whose tags you want to modify. This * operation modifies only the tags and not the file system. * </p> * * @param fileSystemId * String. The ID of the file system whose tags you want to modify. * This operation modifies only the tags and not the file system. * @return Returns a reference to this object so that method calls can be * chained together. */ public CreateTagsRequest withFileSystemId(String fileSystemId) { setFileSystemId(fileSystemId); return this; } /** * <p> * An array of <code>Tag</code> objects to add. Each <code>Tag</code> object * is a key-value pair. * </p> * * @return An array of <code>Tag</code> objects to add. Each * <code>Tag</code> object is a key-value pair. */ public java.util.List<Tag> getTags() { if (tags == null) { tags = new com.amazonaws.internal.SdkInternalList<Tag>(); } return tags; } /** * <p> * An array of <code>Tag</code> objects to add. Each <code>Tag</code> object * is a key-value pair. * </p> * * @param tags * An array of <code>Tag</code> objects to add. Each <code>Tag</code> * object is a key-value pair. */ public void setTags(java.util.Collection<Tag> tags) { if (tags == null) { this.tags = null; return; } this.tags = new com.amazonaws.internal.SdkInternalList<Tag>(tags); } /** * <p> * An array of <code>Tag</code> objects to add. Each <code>Tag</code> object * is a key-value pair. * </p> * * @param tags * An array of <code>Tag</code> objects to add. Each <code>Tag</code> * object is a key-value pair. * @return Returns a reference to this object so that method calls can be * chained together. */ public CreateTagsRequest withTags(Tag... tags) { if (this.tags == null) { setTags(new com.amazonaws.internal.SdkInternalList<Tag>(tags.length)); } for (Tag ele : tags) { this.tags.add(ele); } return this; } /** * <p> * An array of <code>Tag</code> objects to add. Each <code>Tag</code> object * is a key-value pair. * </p> * * @param tags * An array of <code>Tag</code> objects to add. Each <code>Tag</code> * object is a key-value pair. * @return Returns a reference to this object so that method calls can be * chained together. */ public CreateTagsRequest withTags(java.util.Collection<Tag> tags) { setTags(tags); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getFileSystemId() != null) sb.append("FileSystemId: " + getFileSystemId() + ","); if (getTags() != null) sb.append("Tags: " + getTags()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CreateTagsRequest == false) return false; CreateTagsRequest other = (CreateTagsRequest) obj; if (other.getFileSystemId() == null ^ this.getFileSystemId() == null) return false; if (other.getFileSystemId() != null && other.getFileSystemId().equals(this.getFileSystemId()) == false) return false; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getFileSystemId() == null) ? 0 : getFileSystemId() .hashCode()); hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); return hashCode; } @Override public CreateTagsRequest clone() { return (CreateTagsRequest) super.clone(); } }
// Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.android_webview.gfx; import android.graphics.Rect; import androidx.annotation.IntDef; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.util.Arrays; import java.util.List; /** * Utility functions for calculating Rectangle properties (i.e. Area of a single Rect) */ public final class RectUtils { private RectUtils() {} public static int getRectArea(Rect rect) { return rect.width() * rect.height(); } /** Creates a new {@link Rect} with the same bounds as the given {@link Rect}. */ public static Rect copyRect(Rect rect) { return new Rect(rect.left, rect.top, rect.right, rect.bottom); } /** * Segment Type Constants */ @Retention(RetentionPolicy.SOURCE) @IntDef({SegmentType.START, SegmentType.END}) private @interface SegmentType { int START = 0; int END = 1; } private static int compareSegmentTypes(int s1, int s2) { if (s1 == s2) { return 0; } else if (s1 == SegmentType.START && s2 == SegmentType.END) { return -1; } else { return 1; } } private static class HorizontalSegment implements Comparable<HorizontalSegment> { public int mX; public int mTop; public int mBottom; public @SegmentType int mSegmentType; public HorizontalSegment() { set(0, 0, 0, SegmentType.START); } public void set(int x, int top, int bottom, @SegmentType int segmentType) { this.mX = x; this.mTop = top; this.mBottom = bottom; this.mSegmentType = segmentType; } @Override public int compareTo(HorizontalSegment other) { if (mX == other.mX) { return compareSegmentTypes(mSegmentType, other.mSegmentType); } return mX - other.mX; } } private static class VerticalSegment implements Comparable<VerticalSegment> { public int mY; public @SegmentType int mSegmentType; public VerticalSegment() { set(0, SegmentType.START); } public void set(int y, @SegmentType int segmentType) { this.mY = y; this.mSegmentType = segmentType; } public void set(VerticalSegment other) { set(other.mY, other.mSegmentType); } @Override public int compareTo(VerticalSegment other) { if (mY == other.mY) { return compareSegmentTypes(mSegmentType, other.mSegmentType); } return mY - other.mY; } } private static void insertSorted( VerticalSegment arr[], int n, VerticalSegment verticalSegment, int capacity) { assert n < capacity; int i; for (i = n - 1; (i >= 0 && arr[i].compareTo(verticalSegment) > 0); i--) { arr[i + 1].set(arr[i]); } int insert_index = i + 1; assert insert_index >= 0 && insert_index < capacity; arr[insert_index].set(verticalSegment); } private static int deleteElement( VerticalSegment arr[], int n, VerticalSegment verticalSegment) { int pos = Arrays.binarySearch(arr, 0, n, verticalSegment); if (pos < 0) { return -1; } // In the case of duplicate values, either one can be removed for (int i = pos + 1; i < n; i++) { arr[i - 1].set(arr[i]); } return n - 1; } private static int getCoverageOfVerticalSegments( VerticalSegment vSegments[], int numVerticalSegments) { int scanCount = 0; int coveredPixels = 0; int start = -1; for (int i = 0; i < numVerticalSegments; i++) { VerticalSegment verticalSegment = vSegments[i]; if (scanCount == 0 && verticalSegment.mSegmentType == SegmentType.START) { start = verticalSegment.mY; } else if (scanCount == 1 && verticalSegment.mSegmentType == SegmentType.END) { coveredPixels += verticalSegment.mY - start; } scanCount += verticalSegment.mSegmentType == SegmentType.START ? 1 : -1; } return coveredPixels; } private static HorizontalSegment sHorizontalSegments[]; private static VerticalSegment sVerticalSegments[]; private static VerticalSegment sVerticalSegment1 = new VerticalSegment(); private static VerticalSegment sVerticalSegment2 = new VerticalSegment(); private static Rect sClippedRects[]; /* This is a 2d extension of the 1d range intersection problem. In one dimension we are interested in calculating the intersected set of ranges for an input. To do this we decompose each input range into a start and an end position, plus whether it is entering a range or leaving it. Once these decomposed positions are sorted, we can compute the intersection by iterating over the list and recording transitions from not being in a range to being in a range, and vice versa. E.g. [1,4] U [2,5] U [7,9] -> [1,+1] [2,+1] [4,-1] [5,-1] [7,+1] [9,-1]. Then, summing the second component as we traverse, and looking for 0->1 and 1->0 transitions, we end up finding the union ranges [1,5], [7,9] In order to extend this to 2d axis aligned rectangles, we decompose rectangles into top and bottom edges that add or remove a range from the 1d data data structure. Before we add or remove a range to the 1d data structure we accumulate area equal to the current 1d coverage multiplied by the delta-y from the last point at which we updated the coverage. 1 4 7 11 14 18 1 +------+ [4,+1], [11,-1] cov=7 area += 0 2 +----------------+ [1,+1], [4,+1], [11,-1], [18,-1], cov=17, rea += 7*1 3 | | +------+ | [1,+1], [4,+1], [7,+1], [11,-1], [14,-1], [18,-1], cov=17 area += 17*1 4 | +------+ | | [1,+1], [7,+1], [14,-1], [18,-1], cov=17 area += 17*1 5 +----------------+ [7,+1], [14,-1] cov=7 area += 17*1 6 +------+ [] area += 7*1 */ public static int calculatePixelsOfCoverage(Rect screenRect, List<Rect> coverageRects) { if (coverageRects.size() == 0) { return 0; } // Always allocate enough space for all passed rects and never trim allocations as a result // of clipping if ((sClippedRects == null ? 0 : sClippedRects.length) < coverageRects.size()) { sClippedRects = new Rect[coverageRects.size()]; } int numClippedRects = 0; for (int i = 0; i < coverageRects.size(); i++) { Rect clipRect = coverageRects.get(i); if (clipRect.intersect(screenRect)) { // This line may modify the value of the passed // in coverage rects sClippedRects[numClippedRects++] = clipRect; } } if (numClippedRects == 0) { return 0; } int maxSegments = numClippedRects * 2; int numVerticalSegments = 0; if ((sHorizontalSegments == null ? 0 : sHorizontalSegments.length) < maxSegments) { sHorizontalSegments = new HorizontalSegment[maxSegments]; sVerticalSegments = new VerticalSegment[maxSegments]; for (int i = 0; i < maxSegments; i++) { sHorizontalSegments[i] = new HorizontalSegment(); sVerticalSegments[i] = new VerticalSegment(); } } for (int i = 0; i < maxSegments; i += 2) { Rect coverageRect = sClippedRects[i / 2]; sHorizontalSegments[i].set( coverageRect.left, coverageRect.top, coverageRect.bottom, SegmentType.START); sHorizontalSegments[i + 1].set( coverageRect.right, coverageRect.top, coverageRect.bottom, SegmentType.END); } Arrays.sort(sHorizontalSegments, 0, maxSegments); int prev_x = -1; int coveredPixels = 0; for (int i = 0; i < maxSegments; i++) { HorizontalSegment hSegment = sHorizontalSegments[i]; coveredPixels += getCoverageOfVerticalSegments(sVerticalSegments, numVerticalSegments) * (hSegment.mX - prev_x); sVerticalSegment1.set(hSegment.mTop, SegmentType.START); sVerticalSegment2.set(hSegment.mBottom, SegmentType.END); if (hSegment.mSegmentType == SegmentType.START) { insertSorted( sVerticalSegments, numVerticalSegments, sVerticalSegment1, maxSegments); numVerticalSegments++; insertSorted( sVerticalSegments, numVerticalSegments, sVerticalSegment2, maxSegments); numVerticalSegments++; } else { int ret; ret = deleteElement(sVerticalSegments, numVerticalSegments, sVerticalSegment1); assert ret != -1; numVerticalSegments = ret; ret = deleteElement(sVerticalSegments, numVerticalSegments, sVerticalSegment2); assert ret != -1; numVerticalSegments = ret; } prev_x = hSegment.mX; } return coveredPixels; } }
/* * Copyright (c) 2013, 2020, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package java.util.zip; import java.nio.ByteBuffer; import java.nio.file.attribute.FileTime; import java.time.DateTimeException; import java.time.Instant; import java.time.LocalDateTime; import java.time.ZoneId; import java.util.Date; import java.util.concurrent.TimeUnit; import static java.util.zip.ZipConstants.ENDHDR; import jdk.internal.misc.Unsafe; class ZipUtils { // used to adjust values between Windows and java epoch private static final long WINDOWS_EPOCH_IN_MICROSECONDS = -11644473600000000L; // used to indicate the corresponding windows time is not available public static final long WINDOWS_TIME_NOT_AVAILABLE = Long.MIN_VALUE; // static final ByteBuffer defaultBuf = ByteBuffer.allocateDirect(0); static final ByteBuffer defaultBuf = ByteBuffer.allocate(0); /** * Converts Windows time (in microseconds, UTC/GMT) time to FileTime. */ public static final FileTime winTimeToFileTime(long wtime) { return FileTime.from(wtime / 10 + WINDOWS_EPOCH_IN_MICROSECONDS, TimeUnit.MICROSECONDS); } /** * Converts FileTime to Windows time. */ public static final long fileTimeToWinTime(FileTime ftime) { return (ftime.to(TimeUnit.MICROSECONDS) - WINDOWS_EPOCH_IN_MICROSECONDS) * 10; } /** * The upper bound of the 32-bit unix time, the "year 2038 problem". */ public static final long UPPER_UNIXTIME_BOUND = 0x7fffffff; /** * Converts "standard Unix time"(in seconds, UTC/GMT) to FileTime */ public static final FileTime unixTimeToFileTime(long utime) { return FileTime.from(utime, TimeUnit.SECONDS); } /** * Converts FileTime to "standard Unix time". */ public static final long fileTimeToUnixTime(FileTime ftime) { return ftime.to(TimeUnit.SECONDS); } /** * Converts DOS time to Java time (number of milliseconds since epoch). */ public static long dosToJavaTime(long dtime) { int year = (int) (((dtime >> 25) & 0x7f) + 1980); int month = (int) ((dtime >> 21) & 0x0f); int day = (int) ((dtime >> 16) & 0x1f); int hour = (int) ((dtime >> 11) & 0x1f); int minute = (int) ((dtime >> 5) & 0x3f); int second = (int) ((dtime << 1) & 0x3e); if (month > 0 && month < 13 && day > 0 && hour < 24 && minute < 60 && second < 60) { try { LocalDateTime ldt = LocalDateTime.of(year, month, day, hour, minute, second); return TimeUnit.MILLISECONDS.convert(ldt.toEpochSecond( ZoneId.systemDefault().getRules().getOffset(ldt)), TimeUnit.SECONDS); } catch (DateTimeException dte) { // ignore } } return overflowDosToJavaTime(year, month, day, hour, minute, second); } /* * Deal with corner cases where an arguably mal-formed DOS time is used */ @SuppressWarnings("deprecation") // Use of Date constructor private static long overflowDosToJavaTime(int year, int month, int day, int hour, int minute, int second) { return new Date(year - 1900, month - 1, day, hour, minute, second).getTime(); } /** * Converts extended DOS time to Java time, where up to 1999 milliseconds * might be encoded into the upper half of the returned long. * * @param xdostime the extended DOS time value * @return milliseconds since epoch */ public static long extendedDosToJavaTime(long xdostime) { long time = dosToJavaTime(xdostime); return time + (xdostime >> 32); } /** * Converts Java time to DOS time. */ private static long javaToDosTime(LocalDateTime ldt) { int year = ldt.getYear() - 1980; return (year << 25 | ldt.getMonthValue() << 21 | ldt.getDayOfMonth() << 16 | ldt.getHour() << 11 | ldt.getMinute() << 5 | ldt.getSecond() >> 1) & 0xffffffffL; } /** * Converts Java time to DOS time, encoding any milliseconds lost * in the conversion into the upper half of the returned long. * * @param time milliseconds since epoch * @return DOS time with 2s remainder encoded into upper half */ static long javaToExtendedDosTime(long time) { LocalDateTime ldt = javaEpochToLocalDateTime(time); if (ldt.getYear() >= 1980) { return javaToDosTime(ldt) + ((time % 2000) << 32); } return ZipEntry.DOSTIME_BEFORE_1980; } static LocalDateTime javaEpochToLocalDateTime(long time) { Instant instant = Instant.ofEpochMilli(time); return LocalDateTime.ofInstant(instant, ZoneId.systemDefault()); } /** * Fetches unsigned 16-bit value from byte array at specified offset. * The bytes are assumed to be in Intel (little-endian) byte order. */ public static final int get16(byte b[], int off) { return (b[off] & 0xff) | ((b[off + 1] & 0xff) << 8); } /** * Fetches unsigned 32-bit value from byte array at specified offset. * The bytes are assumed to be in Intel (little-endian) byte order. */ public static final long get32(byte b[], int off) { return (get16(b, off) | ((long)get16(b, off+2) << 16)) & 0xffffffffL; } /** * Fetches signed 64-bit value from byte array at specified offset. * The bytes are assumed to be in Intel (little-endian) byte order. */ public static final long get64(byte b[], int off) { return get32(b, off) | (get32(b, off+4) << 32); } /** * Fetches signed 32-bit value from byte array at specified offset. * The bytes are assumed to be in Intel (little-endian) byte order. * */ public static final int get32S(byte b[], int off) { return (get16(b, off) | (get16(b, off+2) << 16)); } // fields access methods static final int CH(byte[] b, int n) { return b[n] & 0xff ; } static final int SH(byte[] b, int n) { return (b[n] & 0xff) | ((b[n + 1] & 0xff) << 8); } static final long LG(byte[] b, int n) { return ((SH(b, n)) | (SH(b, n + 2) << 16)) & 0xffffffffL; } static final long LL(byte[] b, int n) { return (LG(b, n)) | (LG(b, n + 4) << 32); } static final long GETSIG(byte[] b) { return LG(b, 0); } /* * File attribute compatibility types of CEN field "version made by" */ static final int FILE_ATTRIBUTES_UNIX = 3; // Unix /* * Base values for CEN field "version made by" */ static final int VERSION_MADE_BY_BASE_UNIX = FILE_ATTRIBUTES_UNIX << 8; // Unix // local file (LOC) header fields static final long LOCSIG(byte[] b) { return LG(b, 0); } // signature static final int LOCVER(byte[] b) { return SH(b, 4); } // version needed to extract static final int LOCFLG(byte[] b) { return SH(b, 6); } // general purpose bit flags static final int LOCHOW(byte[] b) { return SH(b, 8); } // compression method static final long LOCTIM(byte[] b) { return LG(b, 10);} // modification time static final long LOCCRC(byte[] b) { return LG(b, 14);} // crc of uncompressed data static final long LOCSIZ(byte[] b) { return LG(b, 18);} // compressed data size static final long LOCLEN(byte[] b) { return LG(b, 22);} // uncompressed data size static final int LOCNAM(byte[] b) { return SH(b, 26);} // filename length static final int LOCEXT(byte[] b) { return SH(b, 28);} // extra field length // extra local (EXT) header fields static final long EXTCRC(byte[] b) { return LG(b, 4);} // crc of uncompressed data static final long EXTSIZ(byte[] b) { return LG(b, 8);} // compressed size static final long EXTLEN(byte[] b) { return LG(b, 12);} // uncompressed size // end of central directory header (END) fields static final int ENDSUB(byte[] b) { return SH(b, 8); } // number of entries on this disk static final int ENDTOT(byte[] b) { return SH(b, 10);} // total number of entries static final long ENDSIZ(byte[] b) { return LG(b, 12);} // central directory size static final long ENDOFF(byte[] b) { return LG(b, 16);} // central directory offset static final int ENDCOM(byte[] b) { return SH(b, 20);} // size of zip file comment static final int ENDCOM(byte[] b, int off) { return SH(b, off + 20);} // zip64 end of central directory recoder fields static final long ZIP64_ENDTOD(byte[] b) { return LL(b, 24);} // total number of entries on disk static final long ZIP64_ENDTOT(byte[] b) { return LL(b, 32);} // total number of entries static final long ZIP64_ENDSIZ(byte[] b) { return LL(b, 40);} // central directory size static final long ZIP64_ENDOFF(byte[] b) { return LL(b, 48);} // central directory offset static final long ZIP64_LOCOFF(byte[] b) { return LL(b, 8);} // zip64 end offset // central directory header (CEN) fields static final long CENSIG(byte[] b, int pos) { return LG(b, pos + 0); } static final int CENVEM(byte[] b, int pos) { return SH(b, pos + 4); } static final int CENVEM_FA(byte[] b, int pos) { return CH(b, pos + 5); } // file attribute compatibility static final int CENVER(byte[] b, int pos) { return SH(b, pos + 6); } static final int CENFLG(byte[] b, int pos) { return SH(b, pos + 8); } static final int CENHOW(byte[] b, int pos) { return SH(b, pos + 10);} static final long CENTIM(byte[] b, int pos) { return LG(b, pos + 12);} static final long CENCRC(byte[] b, int pos) { return LG(b, pos + 16);} static final long CENSIZ(byte[] b, int pos) { return LG(b, pos + 20);} static final long CENLEN(byte[] b, int pos) { return LG(b, pos + 24);} static final int CENNAM(byte[] b, int pos) { return SH(b, pos + 28);} static final int CENEXT(byte[] b, int pos) { return SH(b, pos + 30);} static final int CENCOM(byte[] b, int pos) { return SH(b, pos + 32);} static final int CENDSK(byte[] b, int pos) { return SH(b, pos + 34);} static final int CENATT(byte[] b, int pos) { return SH(b, pos + 36);} static final long CENATX(byte[] b, int pos) { return LG(b, pos + 38);} static final int CENATX_PERMS(byte[] b, int pos) { return SH(b, pos + 40);} // posix permission data static final long CENOFF(byte[] b, int pos) { return LG(b, pos + 42);} // The END header is followed by a variable length comment of size < 64k. static final long END_MAXLEN = 0xFFFF + ENDHDR; static final int READBLOCKSZ = 128; /** * Loads zip native library, if not already laoded */ static void loadLibrary() { jdk.internal.loader.BootLoader.loadLibrary("zip"); } private static final Unsafe unsafe = Unsafe.getUnsafe(); private static final long byteBufferArrayOffset = unsafe.objectFieldOffset(ByteBuffer.class, "hb"); private static final long byteBufferOffsetOffset = unsafe.objectFieldOffset(ByteBuffer.class, "offset"); static byte[] getBufferArray(ByteBuffer byteBuffer) { return (byte[]) unsafe.getReference(byteBuffer, byteBufferArrayOffset); } static int getBufferOffset(ByteBuffer byteBuffer) { return unsafe.getInt(byteBuffer, byteBufferOffsetOffset); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.operator; import com.facebook.presto.Session; import com.facebook.presto.spi.Page; import com.facebook.presto.spi.PageBuilder; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.block.BlockBuilder; import com.facebook.presto.spi.block.SortOrder; import com.facebook.presto.spi.type.Type; import com.facebook.presto.sql.gen.JoinCompiler; import com.facebook.presto.sql.gen.JoinCompiler.LookupSourceSupplierFactory; import com.facebook.presto.sql.gen.JoinFilterFunctionCompiler.JoinFilterFunctionFactory; import com.facebook.presto.sql.gen.OrderingCompiler; import com.google.common.collect.ImmutableList; import io.airlift.log.Logger; import io.airlift.slice.Slice; import io.airlift.units.DataSize; import it.unimi.dsi.fastutil.Swapper; import it.unimi.dsi.fastutil.longs.LongArrayList; import it.unimi.dsi.fastutil.objects.ObjectArrayList; import java.util.List; import java.util.Optional; import java.util.function.Supplier; import java.util.stream.IntStream; import static com.facebook.presto.operator.SyntheticAddress.decodePosition; import static com.facebook.presto.operator.SyntheticAddress.decodeSliceIndex; import static com.facebook.presto.operator.SyntheticAddress.encodeSyntheticAddress; import static com.facebook.presto.util.ImmutableCollectors.toImmutableList; import static com.google.common.base.MoreObjects.toStringHelper; import static io.airlift.slice.SizeOf.sizeOf; import static io.airlift.units.DataSize.Unit.BYTE; import static java.util.Objects.requireNonNull; /** * PagesIndex a low-level data structure which contains the address of every value position of every channel. * This data structure is not general purpose and is designed for a few specific uses: * <ul> * <li>Sort via the {@link #sort} method</li> * <li>Hash build via the {@link #createLookupSourceSupplier} method</li> * <li>Positional output via the {@link #appendTo} method</li> * </ul> */ public class PagesIndex implements Swapper { private static final Logger log = Logger.get(PagesIndex.class); // todo this should be a services assigned in the constructor private static final OrderingCompiler orderingCompiler = new OrderingCompiler(); // todo this should be a services assigned in the constructor private static final JoinCompiler joinCompiler = new JoinCompiler(); private final List<Type> types; private final LongArrayList valueAddresses; private final ObjectArrayList<Block>[] channels; private int nextBlockToCompact; private int positionCount; private long pagesMemorySize; private long estimatedSize; public PagesIndex(List<Type> types, int expectedPositions) { this.types = ImmutableList.copyOf(requireNonNull(types, "types is null")); this.valueAddresses = new LongArrayList(expectedPositions); //noinspection rawtypes channels = (ObjectArrayList<Block>[]) new ObjectArrayList[types.size()]; for (int i = 0; i < channels.length; i++) { channels[i] = ObjectArrayList.wrap(new Block[1024], 0); } } public List<Type> getTypes() { return types; } public int getPositionCount() { return positionCount; } public LongArrayList getValueAddresses() { return valueAddresses; } public ObjectArrayList<Block> getChannel(int channel) { return channels[channel]; } public void clear() { for (ObjectArrayList<Block> channel : channels) { channel.clear(); } valueAddresses.clear(); positionCount = 0; pagesMemorySize = 0; estimatedSize = calculateEstimatedSize(); } public void addPage(Page page) { // ignore empty pages if (page.getPositionCount() == 0) { return; } positionCount += page.getPositionCount(); int pageIndex = (channels.length > 0) ? channels[0].size() : 0; for (int i = 0; i < channels.length; i++) { Block block = page.getBlock(i); channels[i].add(block); pagesMemorySize += block.getRetainedSizeInBytes(); } for (int position = 0; position < page.getPositionCount(); position++) { long sliceAddress = encodeSyntheticAddress(pageIndex, position); valueAddresses.add(sliceAddress); } estimatedSize = calculateEstimatedSize(); } public DataSize getEstimatedSize() { return new DataSize(estimatedSize, BYTE); } public void compact() { for (int channel = 0; channel < types.size(); channel++) { ObjectArrayList<Block> blocks = channels[channel]; for (int i = nextBlockToCompact; i < blocks.size(); i++) { Block block = blocks.get(i); if (block.getSizeInBytes() < block.getRetainedSizeInBytes()) { // Copy the block to compact its size Block compactedBlock = block.copyRegion(0, block.getPositionCount()); blocks.set(i, compactedBlock); pagesMemorySize -= block.getRetainedSizeInBytes(); pagesMemorySize += compactedBlock.getRetainedSizeInBytes(); } } } nextBlockToCompact = channels[0].size(); estimatedSize = calculateEstimatedSize(); } private long calculateEstimatedSize() { long elementsSize = (channels.length > 0) ? sizeOf(channels[0].elements()) : 0; long channelsArraySize = elementsSize * channels.length; long addressesArraySize = sizeOf(valueAddresses.elements()); return pagesMemorySize + channelsArraySize + addressesArraySize; } public Type getType(int channel) { return types.get(channel); } @Override public void swap(int a, int b) { long[] elements = valueAddresses.elements(); long temp = elements[a]; elements[a] = elements[b]; elements[b] = temp; } public int buildPage(int position, int[] outputChannels, PageBuilder pageBuilder) { while (!pageBuilder.isFull() && position < positionCount) { long pageAddress = valueAddresses.getLong(position); int blockIndex = decodeSliceIndex(pageAddress); int blockPosition = decodePosition(pageAddress); // append the row pageBuilder.declarePosition(); for (int i = 0; i < outputChannels.length; i++) { int outputChannel = outputChannels[i]; Type type = types.get(outputChannel); Block block = this.channels[outputChannel].get(blockIndex); type.appendTo(block, blockPosition, pageBuilder.getBlockBuilder(i)); } position++; } return position; } public void appendTo(int channel, int position, BlockBuilder output) { long pageAddress = valueAddresses.getLong(position); Type type = types.get(channel); Block block = channels[channel].get(decodeSliceIndex(pageAddress)); int blockPosition = decodePosition(pageAddress); type.appendTo(block, blockPosition, output); } public boolean isNull(int channel, int position) { long pageAddress = valueAddresses.getLong(position); Block block = channels[channel].get(decodeSliceIndex(pageAddress)); int blockPosition = decodePosition(pageAddress); return block.isNull(blockPosition); } public boolean getBoolean(int channel, int position) { long pageAddress = valueAddresses.getLong(position); Block block = channels[channel].get(decodeSliceIndex(pageAddress)); int blockPosition = decodePosition(pageAddress); return types.get(channel).getBoolean(block, blockPosition); } public long getLong(int channel, int position) { long pageAddress = valueAddresses.getLong(position); Block block = channels[channel].get(decodeSliceIndex(pageAddress)); int blockPosition = decodePosition(pageAddress); return types.get(channel).getLong(block, blockPosition); } public double getDouble(int channel, int position) { long pageAddress = valueAddresses.getLong(position); Block block = channels[channel].get(decodeSliceIndex(pageAddress)); int blockPosition = decodePosition(pageAddress); return types.get(channel).getDouble(block, blockPosition); } public Slice getSlice(int channel, int position) { long pageAddress = valueAddresses.getLong(position); Block block = channels[channel].get(decodeSliceIndex(pageAddress)); int blockPosition = decodePosition(pageAddress); return types.get(channel).getSlice(block, blockPosition); } public void sort(List<Integer> sortChannels, List<SortOrder> sortOrders) { sort(sortChannels, sortOrders, 0, getPositionCount()); } public void sort(List<Integer> sortChannels, List<SortOrder> sortOrders, int startPosition, int endPosition) { createPagesIndexComparator(sortChannels, sortOrders).sort(this, startPosition, endPosition); } public boolean positionEqualsPosition(PagesHashStrategy partitionHashStrategy, int leftPosition, int rightPosition) { long leftAddress = valueAddresses.getLong(leftPosition); int leftPageIndex = decodeSliceIndex(leftAddress); int leftPagePosition = decodePosition(leftAddress); long rightAddress = valueAddresses.getLong(rightPosition); int rightPageIndex = decodeSliceIndex(rightAddress); int rightPagePosition = decodePosition(rightAddress); return partitionHashStrategy.positionEqualsPosition(leftPageIndex, leftPagePosition, rightPageIndex, rightPagePosition); } public boolean positionEqualsRow(PagesHashStrategy pagesHashStrategy, int indexPosition, int rightPosition, Page rightPage) { long pageAddress = valueAddresses.getLong(indexPosition); int pageIndex = decodeSliceIndex(pageAddress); int pagePosition = decodePosition(pageAddress); return pagesHashStrategy.positionEqualsRow(pageIndex, pagePosition, rightPosition, rightPage); } private PagesIndexOrdering createPagesIndexComparator(List<Integer> sortChannels, List<SortOrder> sortOrders) { List<Type> sortTypes = sortChannels.stream() .map(types::get) .collect(toImmutableList()); return orderingCompiler.compilePagesIndexOrdering(sortTypes, sortChannels, sortOrders); } public Supplier<LookupSource> createLookupSourceSupplier(Session session, List<Integer> joinChannels) { return createLookupSourceSupplier(session, joinChannels, Optional.empty(), Optional.empty(), Optional.empty()); } public PagesHashStrategy createPagesHashStrategy(List<Integer> joinChannels, Optional<Integer> hashChannel) { return createPagesHashStrategy(joinChannels, hashChannel, Optional.empty()); } public PagesHashStrategy createPagesHashStrategy(List<Integer> joinChannels, Optional<Integer> hashChannel, Optional<List<Integer>> outputChannels) { try { return joinCompiler.compilePagesHashStrategyFactory(types, joinChannels, outputChannels) .createPagesHashStrategy(ImmutableList.copyOf(channels), hashChannel); } catch (Exception e) { log.error(e, "Lookup source compile failed for types=%s error=%s", types, e); } // if compilation fails, use interpreter return new SimplePagesHashStrategy(types, outputChannels.orElse(rangeList(types.size())), ImmutableList.copyOf(channels), joinChannels, hashChannel); } public Supplier<LookupSource> createLookupSourceSupplier( Session session, List<Integer> joinChannels, Optional<Integer> hashChannel, Optional<JoinFilterFunctionFactory> filterFunctionFactory) { return createLookupSourceSupplier(session, joinChannels, hashChannel, filterFunctionFactory, Optional.empty()); } public Supplier<LookupSource> createLookupSourceSupplier( Session session, List<Integer> joinChannels, Optional<Integer> hashChannel, Optional<JoinFilterFunctionFactory> filterFunctionFactory, Optional<List<Integer>> outputChannels) { List<List<Block>> channels = ImmutableList.copyOf(this.channels); if (!joinChannels.isEmpty()) { // todo compiled implementation of lookup join does not support when we are joining with empty join channels. // This code path will trigger only for OUTER joins. To fix that we need to add support for // OUTER joins into NestedLoopsJoin and remove "type == INNER" condition in LocalExecutionPlanner.visitJoin() try { LookupSourceSupplierFactory lookupSourceFactory = joinCompiler.compileLookupSourceFactory(types, joinChannels, outputChannels); return lookupSourceFactory.createLookupSourceSupplier( session.toConnectorSession(), valueAddresses, channels, hashChannel, filterFunctionFactory); } catch (Exception e) { log.error(e, "Lookup source compile failed for types=%s error=%s", types, e); } } // if compilation fails PagesHashStrategy hashStrategy = new SimplePagesHashStrategy( types, outputChannels.orElse(rangeList(types.size())), channels, joinChannels, hashChannel); return new JoinHashSupplier( session.toConnectorSession(), hashStrategy, valueAddresses, channels, filterFunctionFactory); } private List<Integer> rangeList(int endExclusive) { return IntStream.range(0, endExclusive) .boxed() .collect(toImmutableList()); } @Override public String toString() { return toStringHelper(this) .add("positionCount", positionCount) .add("types", types) .add("estimatedSize", estimatedSize) .toString(); } }
/* * Copyright 2015 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.store.flowext.impl; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.SettableFuture; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.ReferenceCardinality; import org.apache.felix.scr.annotations.Service; import org.onlab.util.KryoNamespace; import org.onosproject.cluster.ClusterService; import org.onosproject.net.DeviceId; import org.onosproject.net.device.DeviceService; import org.onosproject.net.flow.CompletedBatchOperation; import org.onosproject.net.flow.FlowRuleBatchEntry; import org.onosproject.net.flow.FlowRuleBatchEvent; import org.onosproject.net.flow.FlowRuleBatchRequest; import org.onosproject.net.flowext.DefaultFlowRuleExt; import org.onosproject.net.flowext.DownStreamFlowEntry; import org.onosproject.net.flowext.FlowExtCompletedOperation; import org.onosproject.net.flowext.FlowRuleExtRouter; import org.onosproject.net.flowext.FlowRuleExtRouterListener; import org.onosproject.store.cluster.messaging.ClusterCommunicationService; import org.onosproject.store.cluster.messaging.ClusterMessage; import org.onosproject.store.cluster.messaging.ClusterMessageHandler; import org.onosproject.store.flow.ReplicaInfo; import org.onosproject.store.flow.ReplicaInfoEventListener; import org.onosproject.store.flow.ReplicaInfoService; import org.onosproject.store.serializers.DecodeTo; import org.onosproject.store.serializers.KryoSerializer; import org.onosproject.store.serializers.StoreSerializer; import org.onosproject.store.serializers.impl.DistributedStoreSerializers; import org.slf4j.Logger; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import static org.onlab.util.Tools.groupedThreads; import static org.onosproject.store.flowext.impl.FlowExtRouterMessageSubjects.APPLY_EXTEND_FLOWS; import static org.slf4j.LoggerFactory.getLogger; /** * Experimental extension to the flow rule subsystem; still under development. * Implement a simple routing-like mechanism to directly send service data to its master and push to device. * This Router does not save any flow rule extension data in cache, it focus on routing mechanism. */ @Component(immediate = true, enabled = false) @Service public class DefaultFlowRuleExtRouter implements FlowRuleExtRouter { private final Logger log = getLogger(getClass()); // TODO: Make configurable. private static final int MESSAGE_HANDLER_THREAD_POOL_SIZE = 4; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected ReplicaInfoService replicaInfoManager; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected ClusterCommunicationService clusterCommunicator; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected ClusterService clusterService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected DeviceService deviceService; private int pendingFutureTimeoutMinutes = 5; protected Set<FlowRuleExtRouterListener> routerListener = new HashSet<>(); private Cache<Long, SettableFuture<FlowExtCompletedOperation>> pendingExtendFutures = CacheBuilder .newBuilder() .expireAfterWrite(pendingFutureTimeoutMinutes, TimeUnit.MINUTES) // .removalListener(new TimeoutFuture()) .build(); private final ExecutorService futureListeners = Executors .newCachedThreadPool(groupedThreads("onos/flow", "store-peer-responders")); private ExecutorService messageHandlingExecutor; protected static final StoreSerializer SERIALIZER = new KryoSerializer() { @Override protected void setupKryoPool() { serializerPool = KryoNamespace.newBuilder() .register(DistributedStoreSerializers.STORE_COMMON) .nextId(DistributedStoreSerializers.STORE_CUSTOM_BEGIN) .register(FlowExtCompletedOperation.class) .register(FlowRuleBatchRequest.class) .register(DownStreamFlowEntry.class) .register(DefaultFlowRuleExt.class) .build(); } }; private ReplicaInfoEventListener replicaInfoEventListener; @Activate public void activate() { messageHandlingExecutor = Executors.newFixedThreadPool( MESSAGE_HANDLER_THREAD_POOL_SIZE, groupedThreads("onos/flow", "message-handlers")); clusterCommunicator.addSubscriber(APPLY_EXTEND_FLOWS, new ClusterMessageHandler() { @Override public void handle(ClusterMessage message) { // decode the extended flow entry and store them in memory. FlowRuleBatchRequest operation = SERIALIZER.decode(message.payload()); log.info("received batch request {}", operation); final ListenableFuture<FlowExtCompletedOperation> f = applyBatchInternal(operation); f.addListener(new Runnable() { @Override public void run() { FlowExtCompletedOperation result = Futures.getUnchecked(f); try { message.respond(SERIALIZER.encode(result)); } catch (IOException e) { log.error("Failed to respond back", e); } } }, futureListeners); } }, messageHandlingExecutor); replicaInfoManager.addListener(replicaInfoEventListener); log.info("Started"); } @Deactivate public void deactivate() { clusterCommunicator.removeSubscriber(APPLY_EXTEND_FLOWS); messageHandlingExecutor.shutdown(); replicaInfoManager.removeListener(replicaInfoEventListener); log.info("Stopped"); } /** * apply the sub batch of flow extension rules. * * @param batchOperation batch of flow rules. * A batch can contain flow rules for a single device only. * @return Future response indicating success/failure of the batch operation * all the way down to the device. */ @Override public Future<FlowExtCompletedOperation> applySubBatch(FlowRuleBatchRequest batchOperation) { // TODO Auto-generated method stub if (batchOperation.ops().isEmpty()) { return Futures.immediateFuture(new FlowExtCompletedOperation( batchOperation.batchId(), true, Collections.emptySet())); } // get the deviceId all the collection belongs to DeviceId deviceId = getBatchDeviceId(batchOperation.ops()); if (deviceId == null) { log.error("This Batch exists more than two deviceId"); return null; } ReplicaInfo replicaInfo = replicaInfoManager .getReplicaInfoFor(deviceId); if (replicaInfo.master().get() .equals(clusterService.getLocalNode().id())) { return applyBatchInternal(batchOperation); } log.trace("Forwarding storeBatch to {}, which is the primary (master) for device {}", replicaInfo.master().orNull(), deviceId); ClusterMessage message = new ClusterMessage(clusterService .getLocalNode().id(), APPLY_EXTEND_FLOWS, SERIALIZER.encode(batchOperation)); try { ListenableFuture<byte[]> responseFuture = clusterCommunicator .sendAndReceive(message, replicaInfo.master().get()); // here should add another decode process return Futures.transform(responseFuture, new DecodeTo<FlowExtCompletedOperation>(SERIALIZER)); } catch (IOException e) { return Futures.immediateFailedFuture(e); } } /** * apply the batch in local node. * It means this instance is master of the device the flow entry belongs to. * * @param batchOperation a collection of flow entry, all they should send down to one device * @return Future response indicating success/failure of the batch operation * all the way down to the device. */ private ListenableFuture<FlowExtCompletedOperation> applyBatchInternal(FlowRuleBatchRequest batchOperation) { SettableFuture<FlowExtCompletedOperation> r = SettableFuture.create(); pendingExtendFutures.put(batchOperation.batchId(), r); // here should notify manager to complete notify(batchOperation); return r; } /** * Get the deviceId of this batch. * The whole Batch should belong to one deviceId. * * @param batchOperation a collection of flow entry, all they should send down to one device * @return the deviceId the whole batch belongs to */ private DeviceId getBatchDeviceId(Collection<FlowRuleBatchEntry> batchOperation) { Iterator<FlowRuleBatchEntry> head = batchOperation.iterator(); FlowRuleBatchEntry headOp = head.next(); boolean sameId = true; for (FlowRuleBatchEntry operation : batchOperation) { if (operation.target().deviceId() != headOp.target().deviceId()) { log.warn("this batch does not apply on one device Id "); sameId = false; break; } } return sameId ? headOp.target().deviceId() : null; } /** * Notify the listener of Router to do some reaction. * * @param request the requested operation to do */ public void notify(FlowRuleBatchRequest request) { for (FlowRuleExtRouterListener listener : routerListener) { listener.notify(FlowRuleBatchEvent // TODO fill in the deviceId .requested(request, null)); } } /** * Invoked on the completion of a storeBatch operation. * * @param event flow rule batch event */ @Override public void batchOperationComplete(FlowRuleBatchEvent event) { // TODO Auto-generated method stub final Long batchId = event.subject().batchId(); SettableFuture<FlowExtCompletedOperation> future = pendingExtendFutures .getIfPresent(batchId); if (future != null) { FlowRuleBatchRequest request = event.subject(); CompletedBatchOperation result = event.result(); FlowExtCompletedOperation completed = new FlowExtCompletedOperation(request.batchId(), result.isSuccess(), result.failedItems()); future.set(completed); pendingExtendFutures.invalidate(batchId); } } /** * Register the listener to monitor Router, * The Router find master to send downStream. * * @param listener the listener to register */ @Override public void addListener(FlowRuleExtRouterListener listener) { routerListener.add(listener); } /** * Remove the listener of Router. * * @param listener the listener to remove */ @Override public void removeListener(FlowRuleExtRouterListener listener) { routerListener.remove(listener); } }
package com.test; import java.io.BufferedReader; import java.io.Console; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Queue; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; public class SuffixTree { private Node root = new EmptyNode(null); public static void main(String[] args) throws IOException { SuffixTree tree = new SuffixTree(); List<String> suffixes = new LinkedList<String>(); File f = new File("/Users/anand/Apps/workspace/Test/src/com/test/words.txt"); BufferedReader reader = new BufferedReader(new FileReader(f)); String word = null; while( (word = reader.readLine()) != null ) { suffixes.add(word.trim()); } reader.close(); tree.add(suffixes); System.out.println("Ready for search..."); //GraphVizNodeVisitor graphVizNodeVisitor = new GraphVizNodeVisitor(); //tree.bfs(graphVizNodeVisitor); //System.out.println(graphVizNodeVisitor.getDotNotation()); suffixes = null; Console console = System.console(); String str = null; List<Integer> indices = new LinkedList<>(); while((str = console.readLine()) != null) { long start = System.nanoTime(); tree.find(str, indices); long end = System.nanoTime() - start; System.out.println(indices); System.out.println("Found " + indices.size() + " words in " + end); //System.out.println("Time to find " + (System.nanoTime() - start) + ""); indices.clear(); /* start = System.nanoTime(); int i=1; for(String suffix : suffixes) { if (suffix.contains(str)) { indices.add(i); } ++i; } System.out.println("Time to find " + (System.nanoTime() - start) + " - # of searches " + i); System.out.println(indices); indices.clear(); */ } } public void find(String suffix, List<Integer> indices) { root.find(suffix, indices); } public static class GraphVizNodeVisitor implements NodeVisitor { StringBuilder builder = new StringBuilder("digraph {"); @Override public void visit(Node node) { for(Edge edge : node.characterEdgeMap.values()) { builder.append(node.description() + " -> " + edge.node.description() + "[label=\"" + edge.getLabelAsString() + "-" + edge.indices +"\"];\n"); } } public String getDotNotation() { builder.append("}"); return builder.toString(); } } public interface NodeVisitor { void visit(Node node); } public void bfs(NodeVisitor visitor) { Node node = root; Queue<Node> queue = new LinkedList<Node>(); queue.add(node); while(!queue.isEmpty()) { node = queue.remove(); visitor.visit(node); for(Edge edge : node.characterEdgeMap.values()) { queue.add(edge.node); } } } private static class NameValuePair<T extends Comparable<T>, V extends Comparable<V>> implements Comparable<NameValuePair<T, V>> { T name; V value; public NameValuePair(T name, V value) { this.name = name; this.value = value; } @Override public int compareTo(NameValuePair<T, V> o) { return name.compareTo(o.name); } public String toString() { return "Name = " + name + ", Value = " + value; } } public void add(List<String> suffixes) { if (suffixes == null) { return; } List<NameValuePair<String, Integer>> suffixList = new LinkedList<>(); long start = System.currentTimeMillis(); System.out.println("Generating suffixes"); Integer index = 1; for(String key : suffixes) { int i = key.length() - 1; while(i>=0) { suffixList.add(new NameValuePair<String, Integer>(key.substring(i), index)); --i; } ++index; } System.out.println("Done generating suffixes in " + (System.currentTimeMillis() - start)); start = System.currentTimeMillis(); System.out.println("Sorting suffixes"); Collections.sort(suffixList); System.out.println("Done sorting suffixes in " + (System.currentTimeMillis() - start)); start = System.currentTimeMillis(); System.out.println("Building tree"); for(NameValuePair<String, Integer> nameValuePair : suffixList) { root.addSuffix(nameValuePair.name, nameValuePair.value); } System.out.println("Done building tree in " + (System.currentTimeMillis() - start)); suffixList = null; } public static class Node { Edge parent; Map<Character, Edge> characterEdgeMap = new TreeMap<>(); int id; public Node(Edge parent, int id) { this.parent = parent; this.id = id; } public void addSuffix(String suffix, int index) { if (characterEdgeMap.containsKey(suffix.charAt(0))) { addToEdge(suffix.charAt(0), suffix, index); } else { Edge newEdge = new Edge(index, this); newEdge.addLabel(suffix.toCharArray()); characterEdgeMap.put(suffix.charAt(0), newEdge); } } public void find(String suffix, List<Integer> indices) { if (characterEdgeMap.containsKey(suffix.charAt(0))) { characterEdgeMap.get(suffix.charAt(0)).find(suffix, indices); } } private void addToEdge(char charAt, String suffix, int index) { characterEdgeMap.get(charAt).addSuffix(suffix, index); } public String toString() { return characterEdgeMap.toString(); } public String description() { if (parent == null) return "root"; return parent.description() + id; } public void addSuffixLevel(String suffix, int index) { Edge newEdge = new Edge(this); newEdge.addLabel(suffix.toCharArray()); newEdge.node.characterEdgeMap.putAll(characterEdgeMap); if (newEdge.indices.isEmpty()) { newEdge.indices.addAll(this.parent.indices); } characterEdgeMap.clear(); characterEdgeMap.put(suffix.charAt(0), newEdge); } } public static class EmptyNode extends Node { public EmptyNode(Edge parent) { super(parent, 0); } @Override public String toString() { return "root"; } } public static class Edge { char[] labels = new char[0]; Node node; Set<Integer> indices = new TreeSet<Integer>(); public Edge(int index, Node parent) { this.node = new Node(this, parent.id + 1); indices.add(index); } public Edge(Node parent) { this.node = new Node(this, parent.id + 1); } public void find(String suffix, List<Integer> indices) { boolean matches = true; for(int i=0; i<suffix.length(); i++) { if (i > labels.length - 1 || this.labels[i] != suffix.charAt(i)) { matches = false; break; } } if (!matches && suffix.length() <= labels.length) { return; } if (!matches) { node.find(suffix.substring(labels.length), indices); } else { indices.addAll(this.indices); return; } } public boolean isTerminating() { return labels[0] =='$'; } public void addLabel(char[] label) { labels = Arrays.copyOf(labels, labels.length + label.length); System.arraycopy(label, 0, labels, labels.length - label.length, label.length); //labels.add(label); } public void addIndex(int index) { indices.add(index); } public String description() { if (isTerminating()) { return "t"; } return getLabelAsString().replace("$", ""); } public String getLabelAsString() { return new String(labels).intern(); } public void addSuffix(String suffix, int index) { int i=0; for(int j=0; j<suffix.length(); j++) { if (i>=labels.length || labels[i] != suffix.charAt(j)) { if (i < labels.length) { node.addSuffixLevel(getLabelAsString().substring(i), index); resetLabels(suffix.substring(0, i)); } node.addSuffix(suffix.substring(i), index); break; } ++i; } indices.add(index); } public void resetLabels(String newSuffix) { labels = null; labels = newSuffix.toCharArray(); } public String toString() { return labels.toString(); } } }
package com.musicocracy.fpgk.domain.net; import com.google.protobuf.InvalidProtocolBufferException; import com.musicocracy.fpgk.domain.dal.Database; import com.musicocracy.fpgk.domain.dal.Guest; import com.musicocracy.fpgk.domain.dj.DjAlgorithm; import com.musicocracy.fpgk.domain.spotify.Browser; import com.musicocracy.fpgk.domain.util.Logger; import com.musicocracy.fpgk.domain.util.PartySettings; import com.musicocracy.fpgk.domain.util.RxUtils; import com.musicocracy.fpgk.domain.spotify.SpotifyPlayerHandler; import com.musicocracy.fpgk.net.proto.BasicReply; import com.musicocracy.fpgk.net.proto.BrowseSongsReply; import com.musicocracy.fpgk.net.proto.BrowseSongsRequest; import com.musicocracy.fpgk.net.proto.ConnectRequest; import com.musicocracy.fpgk.net.proto.MessageType; import com.musicocracy.fpgk.net.proto.PlayRequestRequest; import com.musicocracy.fpgk.net.proto.SendVoteRequest; import com.musicocracy.fpgk.net.proto.VotableSongsReply; import com.musicocracy.fpgk.net.proto.VotableSongsRequest; import java.sql.SQLException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.List; import kaaes.spotify.webapi.android.SpotifyApi; import kaaes.spotify.webapi.android.models.Track; import rx.Observable; import rx.Subscriber; import rx.Subscription; import rx.functions.Action1; public class ServerHandler { private static final Subscription[] emptySubs = new Subscription[0]; private static final String TAG = "ServerHandler"; private static final int NUM_BROWSE_RESULTS = 10; private final ServerEventBus eventBus; private final PartySettings partySettings; private final Browser browser; private final SpotifyApi api; private final Logger log; private final DjAlgorithm djAlgorithm; private final Database database; private final SharedSubject<String> newPlayRequest = SharedSubject.create(); private SpotifyPlayerHandler spotifyPlayerHandler; private Subscription[] subscriptions = emptySubs; public ServerHandler(ServerEventBus eventBus, PartySettings partySettings, Browser browser, SpotifyApi api, Logger log, SpotifyPlayerHandler spotifyPlayerHandler, DjAlgorithm djAlgorithm, Database database) { this.eventBus = eventBus; this.partySettings = partySettings; this.browser = browser; this.api = api; this.log = log; this.spotifyPlayerHandler = spotifyPlayerHandler; this.djAlgorithm = djAlgorithm; this.database = database; } public void onCreate() { api.setAccessToken(partySettings.getSpotifyToken()); if (subscriptions == emptySubs) { subscriptions = new Subscription[] { createLogSub(), createClientConnectSub(), createBrowseRequestSub(), createVotableSongsRequestSub(), createPlayRequestSub(), createVoteRequestSub() }; } } private Subscription createLogSub() { return eventBus.getObservableLog() .subscribe(new Action1<String>() { @Override public void call(String s) { log.verbose(TAG, s); } }); } private Subscription createClientConnectSub() { return eventBus.getObservable(MessageType.CONNECT_REQUEST) .subscribe(new Subscriber<ProtoMessageBySender>() { @Override public void onCompleted() { log.warning(TAG, "Unexpected createClientConnectSub: onCompleted"); } @Override public void onError(Throwable e) { log.error(TAG, "Unexpected createClientConnectSub: onError " + e.toString()); } @Override public void onNext(ProtoMessageBySender msgBySender) { ConnectRequest request; try { request = ConnectRequest.parseFrom(msgBySender.message.getBody()); } catch (InvalidProtocolBufferException e) { request = ConnectRequest.getDefaultInstance(); e.printStackTrace(); } BasicReply reply; if (request != ConnectRequest.getDefaultInstance() && request.getPartyName().equals(partySettings.getPartyName())) { reply = BasicReply.newBuilder().setSuccess(true).setMessage("").setReplyingTo(msgBySender.message.getHeader().getType()).build(); try { database.getGuestDao().createOrUpdate(new Guest(partySettings.raw(), "g", request.getRequesterId(), new Timestamp(System.currentTimeMillis()), false)); } catch (SQLException e) { log.error(TAG, e.toString()); } } else { reply = BasicReply.newBuilder().setSuccess(false).setMessage("Invalid party credentials").setReplyingTo(msgBySender.message.getHeader().getType()).build(); } log.verbose(TAG, "Sent: " + reply); msgBySender.replyWith(reply); } }); } private Subscription createBrowseRequestSub() { return eventBus.getObservable(MessageType.BROWSE_SONGS_REQUEST) .subscribe(new Subscriber<ProtoMessageBySender>() { @Override public void onCompleted() { log.warning(TAG, "Unexpected createBrowseRequestSub: onCompleted"); } @Override public void onError(Throwable e) { log.error(TAG, "Unexpected createBrowseRequestSub: onError " + e.toString()); } @Override public void onNext(ProtoMessageBySender msgBySender) { BrowseSongsRequest request; try { request = BrowseSongsRequest.parseFrom(msgBySender.message.getBody()); log.verbose(TAG, "Successful parse"); } catch (InvalidProtocolBufferException e) { log.error(TAG, e.toString()); e.printStackTrace(); request = BrowseSongsRequest.getDefaultInstance(); } List<Track> browseTracks = browser.browseTracks(request.getSongTitle()); log.verbose(TAG, browseTracks.size() + " songs found"); BrowseSongsReply.Builder builder = BrowseSongsReply.newBuilder(); for (int i = 0; i < browseTracks.size() && i < NUM_BROWSE_RESULTS; i++) { builder .addSongs(BrowseSongsReply.BrowsableSong.newBuilder() .setTitle(browseTracks.get(i).name) .setArtist(browseTracks.get(i).artists.get(0).name) .setUri(browseTracks.get(i).uri) .setMusicService("Spotify") .build()); } BrowseSongsReply reply = builder.build(); log.verbose(TAG, "Sent (BrowseReply): " + reply); msgBySender.replyWith(reply); } }); } private Subscription createVotableSongsRequestSub() { return eventBus.getObservable(MessageType.VOTABLE_SONGS_REQUEST) .subscribe(new Subscriber<ProtoMessageBySender>() { @Override public void onCompleted() { log.warning(TAG, "Unexpected createVotableSongsRequestSub: onCompleted"); } @Override public void onError(Throwable e) { log.error(TAG, "Unexpected createVotableSongsRequestSub: onError " + e.toString()); } @Override public void onNext(ProtoMessageBySender msgBySender) { List<String> votableURIs = getVotableURIs(); List<Track> votableTracks = getVotableTracks(votableURIs); VotableSongsReply.Builder builder = VotableSongsReply.newBuilder(); for (int i = 0; i < votableTracks.size(); i++) { Track track = votableTracks.get(i); builder .addSongs(VotableSongsReply.VotableSong.newBuilder() .setArtist(track.artists.get(0).name) .setTitle(track.name) .setChoiceId(i) .build()); } VotableSongsReply reply = builder.build(); log.verbose(TAG, "Sending msg " + reply); msgBySender.replyWith(reply); log.verbose(TAG, "Send complete. ~" + reply.toByteArray().length + " byte body"); } }); } private Subscription createPlayRequestSub() { return eventBus.getObservable(MessageType.PLAY_REQUEST_REQUEST) .subscribe(new Subscriber<ProtoMessageBySender>() { @Override public void onCompleted() { log.warning(TAG, "Unexpected createPlayRequestSub: onCompleted"); } @Override public void onError(Throwable e) { log.error(TAG, "Unexpected createPlayRequestSub: onError " + e.toString()); } @Override public void onNext(ProtoMessageBySender msgBySender) { PlayRequestRequest request; try { log.verbose(TAG, "Successful parse"); request = PlayRequestRequest.parseFrom(msgBySender.message.getBody()); } catch (InvalidProtocolBufferException e) { log.error(TAG, e.toString()); request = PlayRequestRequest.getDefaultInstance(); } BasicReply.Builder builder = BasicReply.newBuilder().setReplyingTo(msgBySender.message.getHeader().getType()); try { if (request == PlayRequestRequest.getDefaultInstance()) { throw new InvalidProtocolBufferException("Invalid Play Request"); } djAlgorithm.request(request.getUri(), request.getRequesterId()); newPlayRequest.onNext(request.getUri()); builder.setSuccess(true).setMessage(""); spotifyPlayerHandler.play(); } catch (SQLException | IllegalArgumentException | InvalidProtocolBufferException e) { log.error(TAG, e.toString()); builder.setSuccess(false).setMessage(e.getMessage()); } BasicReply reply = builder.build(); log.verbose(TAG, "Sent: " + reply); msgBySender.replyWith(reply); } }); } private Subscription createVoteRequestSub() { return eventBus.getObservable(MessageType.SEND_VOTE_REQUEST) .subscribe(new Subscriber<ProtoMessageBySender>() { @Override public void onCompleted() { log.warning(TAG, "Unexpected createVoteRequestSub: onCompleted"); } @Override public void onError(Throwable e) { log.error(TAG, "Unexpected createVoteRequestSub: onError " + e.toString()); } @Override public void onNext(ProtoMessageBySender msgBySender) { SendVoteRequest request; try { request = SendVoteRequest.parseFrom(msgBySender.message.getBody()); } catch (InvalidProtocolBufferException e) { request = SendVoteRequest.getDefaultInstance(); e.printStackTrace(); } BasicReply.Builder builder = BasicReply.newBuilder().setReplyingTo(msgBySender.message.getHeader().getType()); try { if (request == SendVoteRequest.getDefaultInstance()) { throw new InvalidProtocolBufferException("Invalid Vote Request"); } List<String> votableSongURIs = getVotableURIs(); String voteURI = votableSongURIs.get(request.getChoiceId()); djAlgorithm.voteFor(voteURI, request.getRequesterId()); builder.setSuccess(true).setMessage(""); } catch (SQLException | IllegalArgumentException | InvalidProtocolBufferException e) { log.error(TAG, e.toString()); builder.setSuccess(false).setMessage(e.getMessage()); } BasicReply reply = builder.build(); log.verbose(TAG, "Sent: " + reply); msgBySender.replyWith(reply); } }); } public List<String> getVotableURIs() { List<String> votableSongURIs = null; try { votableSongURIs = djAlgorithm.getVotableSongUris(); } catch (SQLException e) { log.error(TAG, e.toString()); } return votableSongURIs; } public List<Track> getVotableTracks(List<String> votableSongURIs) { List<Track> votableTracks = new ArrayList<>(); for (String uri : votableSongURIs) { Track track = browser.getTrackByURI(uri); votableTracks.add(track); } return votableTracks; } public Observable<String> newPlayRequest() { return newPlayRequest.asObservable(); } public void onDestroy() { spotifyPlayerHandler.onDestroy(); for (Subscription subscription : subscriptions) { RxUtils.safeUnsubscribe(subscription); } subscriptions = emptySubs; } }
package org.knowm.xchange; import java.util.HashMap; import java.util.Map; /** * <p> * Specification to provide the following to {@link ExchangeFactory}: * </p> * <ul> * <li>Provision of required exchangeSpecificParameters for creating an {@link Exchange}</li> * <li>Provision of optional exchangeSpecificParameters for additional configuration</li> * </ul> */ public class ExchangeSpecification { private String exchangeName; private String exchangeDescription; private String userName; private String password; private String secretKey; private String apiKey; private String sslUri; private String plainTextUri; private String host; private int port = 80; private String metaDataJsonFileOverride = null; private boolean shouldLoadRemoteMetaData = true; // default value private final String exchangeClassName; /** arbitrary exchange params that can be set for unique cases */ private Map<String, Object> exchangeSpecificParameters = new HashMap<String, Object>(); /** * Dynamic binding * * @param exchangeClassName The exchange class name (e.g. "org.knowm.xchange.mtgox.v1.MtGoxExchange") */ public ExchangeSpecification(String exchangeClassName) { this.exchangeClassName = exchangeClassName; } /** * Static binding * * @param exchangeClass The exchange class */ public ExchangeSpecification(Class exchangeClass) { this.exchangeClassName = exchangeClass.getCanonicalName(); } /** * @return The exchange class name for loading at runtime */ public String getExchangeClassName() { return exchangeClassName; } /** * @param key The key into the parameter map (recommend using the provided standard static entries) * @return Any additional exchangeSpecificParameters that the {@link Exchange} may consume to configure services */ public Object getParameter(String key) { return exchangeSpecificParameters.get(key); } /** * Get the host name of the server providing data (e.g. "mtgox.com"). * * @return the host name */ public String getHost() { return host; } /** * Set the host name of the server providing data. * * @param host the host name */ public void setHost(String host) { this.host = host; } /** * Get the API key. For MtGox this would be the "Rest-Key" field. * * @return the API key */ public String getApiKey() { return apiKey; } /** * Set the API key. For MtGox this would be the "Rest-Key" field. * * @param apiKey the API key */ public void setApiKey(String apiKey) { this.apiKey = apiKey; } /** * Get the port number of the server providing direct socket data (e.g. "1337"). * * @return the port number */ public int getPort() { return port; } /** * Set the port number of the server providing direct socket data (e.g. "1337"). * * @param port the port number */ public void setPort(int port) { this.port = port; } /** * Get the API secret key typically used in HMAC signing of requests. For MtGox this would be the "Rest-Sign" field. * * @return the secret key */ public String getSecretKey() { return secretKey; } /** * Set the API secret key typically used in HMAC signing of requests. For MtGox this would be the "Rest-Sign" field. * * @param secretKey the secret key */ public void setSecretKey(String secretKey) { this.secretKey = secretKey; } /** * Get the URI to reach the <b>root</b> of the exchange API for SSL queries (e.g. use "https://example.com:8443/exchange", not * "https://example.com:8443/exchange/api/v3/trades"). * * @return the SSL URI */ public String getSslUri() { return sslUri; } /** * Set the URI to reach the <b>root</b> of the exchange API for SSL queries (e.g. use "https://example.com:8443/exchange", not * "https://example.com:8443/exchange/api/v3/trades"). * * @param uri the SSL URI */ public void setSslUri(String uri) { this.sslUri = uri; } /** * Get the URI to reach the <b>root</b> of the exchange API for plaintext (non-SSL) queries (e.g. use "http://example.com:8080/exchange", not * "http://example.com:8080/exchange/api/v3/trades") * * @return the plain text URI */ public String getPlainTextUri() { return plainTextUri; } /** * Set the URI to reach the <b>root</b> of the exchange API for plaintext (non-SSL) queries (e.g. use "http://example.com:8080/exchange", not * "http://example.com:8080/exchange/api/v3/trades") * * @param plainTextUri the plain text URI */ public void setPlainTextUri(String plainTextUri) { this.plainTextUri = plainTextUri; } /** * Get the arbitrary exchange-specific parameters to be passed to the exchange implementation. * * @return a Map of named exchange-specific parameter values */ public Map<String, Object> getExchangeSpecificParameters() { return exchangeSpecificParameters; } /** * Set the arbitrary exchange-specific parameters to be passed to the exchange implementation. * * @param exchangeSpecificParameters a Map of named exchange-specific parameter values */ public void setExchangeSpecificParameters(Map<String, Object> exchangeSpecificParameters) { this.exchangeSpecificParameters = exchangeSpecificParameters; } /** * Get an item from the arbitrary exchange-specific parameters to be passed to the exchange implementation. * * @return a Map of named exchange-specific parameter values */ public Object getExchangeSpecificParametersItem(String key) { return exchangeSpecificParameters.get(key); } /** * Set an item in the arbitrary exchange-specific parameters to be passed to the exchange implementation. * * @param exchangeSpecificParameters a Map of named exchange-specific parameter values */ public void setExchangeSpecificParametersItem(String key, Object value) { this.exchangeSpecificParameters.put(key, value); } /** * Get the password for authentication. * * @return the password */ public String getPassword() { return password; } /** * Set the password for authentication. * * @param password the password */ public void setPassword(String password) { this.password = password; } /** * Get the username for authentication. * * @return the username */ public String getUserName() { return userName; } /** * Set the username for authentication. * * @param userName the username */ public void setUserName(String userName) { this.userName = userName; } /** * Get the exchange name. * * @return the exchange name (e.g. "Mt Gox") */ public String getExchangeName() { return exchangeName; } /** * Set the exchange name (e.g. "Mt Gox"). * * @param exchangeName the exchange name */ public void setExchangeName(String exchangeName) { this.exchangeName = exchangeName; } /** * Get the exchange description (e.g. "Major exchange specialising in USD, EUR, GBP"). * * @return the exchange description */ public String getExchangeDescription() { return exchangeDescription; } /** * Set the exchange description (e.g. "Major exchange specialising in USD, EUR, GBP"). * * @param exchangeDescription the exchange description */ public void setExchangeDescription(String exchangeDescription) { this.exchangeDescription = exchangeDescription; } /** * Get the override file for generating the {@link org.knowm.xchange.dto.meta.ExchangeMetaData} object. By default, the * {@link org.knowm.xchange.dto.meta.ExchangeMetaData} object is loaded at startup from a json file on the classpath with the same name as the name * of the exchange as defined in {@link ExchangeSpecification}. With this parameter, you can override that file with a file of your choice located * outside of the classpath. * * @return */ public String getMetaDataJsonFileOverride() { return metaDataJsonFileOverride; } /** * Set the override file for generating the {@link org.knowm.xchange.dto.meta.ExchangeMetaData} object. By default, the * {@link org.knowm.xchange.dto.meta.ExchangeMetaData} object is loaded at startup from a json file on the classpath with the same name as the name * of the exchange as defined in {@link ExchangeSpecification}. With this parameter, you can override that file with a file of your choice located * outside of the classpath. * * @return */ public void setMetaDataJsonFileOverride(String metaDataJsonFileOverride) { this.metaDataJsonFileOverride = metaDataJsonFileOverride; } /** * By default, some meta data from the exchange is remotely loaded (if implemented). * * @return */ public boolean isShouldLoadRemoteMetaData() { return shouldLoadRemoteMetaData; } /** * By default, some meta data from the exchange is remotely loaded (if implemented). Here you can set this default behavior. * * @param shouldLoadRemoteMetaData */ public void setShouldLoadRemoteMetaData(boolean shouldLoadRemoteMetaData) { this.shouldLoadRemoteMetaData = shouldLoadRemoteMetaData; } }
/* * Copyright 2017-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.routing.bgp; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Modified; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.ReferenceCardinality; import org.apache.felix.scr.annotations.Service; import org.jboss.netty.bootstrap.ServerBootstrap; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelException; import org.jboss.netty.channel.ChannelFactory; import org.jboss.netty.channel.ChannelPipeline; import org.jboss.netty.channel.ChannelPipelineFactory; import org.jboss.netty.channel.Channels; import org.jboss.netty.channel.group.ChannelGroup; import org.jboss.netty.channel.group.DefaultChannelGroup; import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory; import org.onlab.packet.Ip4Address; import org.onlab.packet.Ip4Prefix; import org.onlab.packet.Ip6Prefix; import org.onlab.packet.IpPrefix; import org.onosproject.cluster.ClusterService; import org.onosproject.routeservice.Route; import org.onosproject.routeservice.RouteAdminService; import org.osgi.service.component.ComponentContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.util.Collection; import java.util.Dictionary; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import static java.util.concurrent.Executors.newCachedThreadPool; import static org.onlab.util.Tools.groupedThreads; /** * BGP Session Manager class. */ @Component(immediate = true, enabled = false) @Service public class BgpSessionManager implements BgpInfoService { private static final Logger log = LoggerFactory.getLogger(BgpSessionManager.class); @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected RouteAdminService routeService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected ClusterService clusterService; boolean isShutdown = true; private Channel serverChannel; // Listener for incoming BGP connections private ServerBootstrap serverBootstrap; private ChannelGroup allChannels = new DefaultChannelGroup(); private ConcurrentMap<SocketAddress, BgpSession> bgpSessions = new ConcurrentHashMap<>(); private Ip4Address myBgpId; // Same BGP ID for all peers private BgpRouteSelector bgpRouteSelector; private ConcurrentMap<Ip4Prefix, BgpRouteEntry> bgpRoutes4 = new ConcurrentHashMap<>(); private ConcurrentMap<Ip6Prefix, BgpRouteEntry> bgpRoutes6 = new ConcurrentHashMap<>(); private static final int DEFAULT_BGP_PORT = 2000; private int bgpPort; @Activate protected void activate(ComponentContext context) { bgpRouteSelector = new BgpRouteSelector(this, clusterService); readComponentConfiguration(context); start(); log.info("BgpSessionManager started"); } @Deactivate protected void deactivate() { stop(); log.info("BgpSessionManager stopped"); } /** * Extracts properties from the component configuration context. * * @param context the component context */ private void readComponentConfiguration(ComponentContext context) { Dictionary<?, ?> properties = context.getProperties(); try { String strPort = (String) properties.get("bgpPort"); if (strPort != null) { bgpPort = Integer.parseInt(strPort); } else { bgpPort = DEFAULT_BGP_PORT; } } catch (NumberFormatException | ClassCastException e) { bgpPort = DEFAULT_BGP_PORT; } log.debug("BGP port is set to {}", bgpPort); } @Modified public void modified(ComponentContext context) { // Blank @Modified method to catch modifications to the context. // If no @Modified method exists, it seems @Activate is called again // when the context is modified. } /** * Checks whether the BGP Session Manager is shutdown. * * @return true if the BGP Session Manager is shutdown, otherwise false */ boolean isShutdown() { return this.isShutdown; } /** * Gets the BGP sessions. * * @return the BGP sessions */ @Override public Collection<BgpSession> getBgpSessions() { return bgpSessions.values(); } /** * Gets the selected IPv4 BGP routes among all BGP sessions. * * @return the selected IPv4 BGP routes among all BGP sessions */ @Override public Collection<BgpRouteEntry> getBgpRoutes4() { return bgpRoutes4.values(); } /** * Gets the selected IPv6 BGP routes among all BGP sessions. * * @return the selected IPv6 BGP routes among all BGP sessions */ @Override public Collection<BgpRouteEntry> getBgpRoutes6() { return bgpRoutes6.values(); } /** * Finds a BGP route for a prefix. The prefix can be either IPv4 or IPv6. * * @param prefix the prefix to use * @return the BGP route if found, otherwise null */ BgpRouteEntry findBgpRoute(IpPrefix prefix) { if (prefix.isIp4()) { return bgpRoutes4.get(prefix.getIp4Prefix()); // IPv4 } return bgpRoutes6.get(prefix.getIp6Prefix()); // IPv6 } /** * Adds a BGP route. The route can be either IPv4 or IPv6. * * @param bgpRouteEntry the BGP route entry to use */ void addBgpRoute(BgpRouteEntry bgpRouteEntry) { if (bgpRouteEntry.isIp4()) { bgpRoutes4.put(bgpRouteEntry.prefix().getIp4Prefix(), // IPv4 bgpRouteEntry); } else { bgpRoutes6.put(bgpRouteEntry.prefix().getIp6Prefix(), // IPv6 bgpRouteEntry); } } /** * Removes a BGP route for a prefix. The prefix can be either IPv4 or IPv6. * * @param prefix the prefix to use * @return true if the route was found and removed, otherwise false */ boolean removeBgpRoute(IpPrefix prefix) { if (prefix.isIp4()) { return (bgpRoutes4.remove(prefix.getIp4Prefix()) != null); // IPv4 } return (bgpRoutes6.remove(prefix.getIp6Prefix()) != null); // IPv6 } /** * Adds the channel for a BGP session. * * @param channel the channel to add */ void addSessionChannel(Channel channel) { allChannels.add(channel); } /** * Removes the channel for a BGP session. * * @param channel the channel to remove */ void removeSessionChannel(Channel channel) { allChannels.remove(channel); } /** * Processes the connection from a BGP peer. * * @param bgpSession the BGP session for the peer * @return true if the connection can be established, otherwise false */ boolean peerConnected(BgpSession bgpSession) { // Test whether there is already a session from the same remote if (bgpSessions.get(bgpSession.remoteInfo().address()) != null) { return false; // Duplicate BGP session } bgpSessions.put(bgpSession.remoteInfo().address(), bgpSession); // // If the first connection, set my BGP ID to the local address // of the socket. // if (bgpSession.localInfo().address() instanceof InetSocketAddress) { InetAddress inetAddr = ((InetSocketAddress) bgpSession.localInfo().address()).getAddress(); Ip4Address ip4Address = Ip4Address.valueOf(inetAddr.getAddress()); updateMyBgpId(ip4Address); } return true; } /** * Processes the disconnection from a BGP peer. * * @param bgpSession the BGP session for the peer */ void peerDisconnected(BgpSession bgpSession) { bgpSessions.remove(bgpSession.remoteInfo().address()); } /** * Conditionally updates the local BGP ID if it wasn't set already. * <p/> * NOTE: A BGP instance should use same BGP ID across all BGP sessions. * * @param ip4Address the IPv4 address to use as BGP ID */ private synchronized void updateMyBgpId(Ip4Address ip4Address) { if (myBgpId == null) { myBgpId = ip4Address; log.debug("BGP: My BGP ID is {}", myBgpId); } } /** * Gets the local BGP Identifier as an IPv4 address. * * @return the local BGP Identifier as an IPv4 address */ Ip4Address getMyBgpId() { return myBgpId; } /** * Gets the BGP Route Selector. * * @return the BGP Route Selector */ BgpRouteSelector getBgpRouteSelector() { return bgpRouteSelector; } /** * Sends updates routes to the route service. * * @param updates routes to update */ void update(Collection<Route> updates) { routeService.update(updates); } /** * Sends withdrawn routes to the routes service. * * @param withdraws routes to withdraw */ void withdraw(Collection<Route> withdraws) { routeService.withdraw(withdraws); } public void start() { log.debug("BGP Session Manager start."); isShutdown = false; ChannelFactory channelFactory = new NioServerSocketChannelFactory( newCachedThreadPool(groupedThreads("onos/bgp", "sm-boss-%d", log)), newCachedThreadPool(groupedThreads("onos/bgp", "sm-worker-%d", log))); ChannelPipelineFactory pipelineFactory = () -> { // Allocate a new session per connection BgpSession bgpSessionHandler = new BgpSession(BgpSessionManager.this); BgpFrameDecoder bgpFrameDecoder = new BgpFrameDecoder(bgpSessionHandler); // Setup the processing pipeline ChannelPipeline pipeline = Channels.pipeline(); pipeline.addLast("BgpFrameDecoder", bgpFrameDecoder); pipeline.addLast("BgpSession", bgpSessionHandler); return pipeline; }; InetSocketAddress listenAddress = new InetSocketAddress(bgpPort); serverBootstrap = new ServerBootstrap(channelFactory); // serverBootstrap.setOptions("reuseAddr", true); serverBootstrap.setOption("child.keepAlive", true); serverBootstrap.setOption("child.tcpNoDelay", true); serverBootstrap.setPipelineFactory(pipelineFactory); try { serverChannel = serverBootstrap.bind(listenAddress); allChannels.add(serverChannel); } catch (ChannelException e) { log.debug("Exception binding to BGP port {}: ", listenAddress.getPort(), e); } } public void stop() { isShutdown = true; allChannels.close().awaitUninterruptibly(); serverBootstrap.releaseExternalResources(); } }
/** * Copyright The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.io.hfile.bucket; import java.util.Arrays; import java.util.Comparator; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Queue; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; import org.apache.hadoop.hbase.shaded.com.google.common.collect.MinMaxPriorityQueue; import org.apache.commons.collections.map.LinkedMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.io.hfile.BlockCacheKey; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BucketEntry; import org.codehaus.jackson.annotate.JsonIgnoreProperties; import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.primitives.Ints; /** * This class is used to allocate a block with specified size and free the block * when evicting. It manages an array of buckets, each bucket is associated with * a size and caches elements up to this size. For a completely empty bucket, this * size could be re-specified dynamically. * * This class is not thread safe. */ @InterfaceAudience.Private @JsonIgnoreProperties({"indexStatistics", "freeSize", "usedSize"}) public final class BucketAllocator { private static final Log LOG = LogFactory.getLog(BucketAllocator.class); @JsonIgnoreProperties({"completelyFree", "uninstantiated"}) public final static class Bucket { private long baseOffset; private int itemAllocationSize, sizeIndex; private int itemCount; private int freeList[]; private int freeCount, usedCount; public Bucket(long offset) { baseOffset = offset; sizeIndex = -1; } void reconfigure(int sizeIndex, int[] bucketSizes, long bucketCapacity) { Preconditions.checkElementIndex(sizeIndex, bucketSizes.length); this.sizeIndex = sizeIndex; itemAllocationSize = bucketSizes[sizeIndex]; itemCount = (int) (bucketCapacity / (long) itemAllocationSize); freeCount = itemCount; usedCount = 0; freeList = new int[itemCount]; for (int i = 0; i < freeCount; ++i) freeList[i] = i; } public boolean isUninstantiated() { return sizeIndex == -1; } public int sizeIndex() { return sizeIndex; } public int getItemAllocationSize() { return itemAllocationSize; } public boolean hasFreeSpace() { return freeCount > 0; } public boolean isCompletelyFree() { return usedCount == 0; } public int freeCount() { return freeCount; } public int usedCount() { return usedCount; } public int getFreeBytes() { return freeCount * itemAllocationSize; } public int getUsedBytes() { return usedCount * itemAllocationSize; } public long getBaseOffset() { return baseOffset; } /** * Allocate a block in this bucket, return the offset representing the * position in physical space * @return the offset in the IOEngine */ public long allocate() { assert freeCount > 0; // Else should not have been called assert sizeIndex != -1; ++usedCount; long offset = baseOffset + (freeList[--freeCount] * itemAllocationSize); assert offset >= 0; return offset; } public void addAllocation(long offset) throws BucketAllocatorException { offset -= baseOffset; if (offset < 0 || offset % itemAllocationSize != 0) throw new BucketAllocatorException( "Attempt to add allocation for bad offset: " + offset + " base=" + baseOffset + ", bucket size=" + itemAllocationSize); int idx = (int) (offset / itemAllocationSize); boolean matchFound = false; for (int i = 0; i < freeCount; ++i) { if (matchFound) freeList[i - 1] = freeList[i]; else if (freeList[i] == idx) matchFound = true; } if (!matchFound) throw new BucketAllocatorException("Couldn't find match for index " + idx + " in free list"); ++usedCount; --freeCount; } private void free(long offset) { offset -= baseOffset; assert offset >= 0; assert offset < itemCount * itemAllocationSize; assert offset % itemAllocationSize == 0; assert usedCount > 0; assert freeCount < itemCount; // Else duplicate free int item = (int) (offset / (long) itemAllocationSize); assert !freeListContains(item); --usedCount; freeList[freeCount++] = item; } private boolean freeListContains(int blockNo) { for (int i = 0; i < freeCount; ++i) { if (freeList[i] == blockNo) return true; } return false; } } final class BucketSizeInfo { // Free bucket means it has space to allocate a block; // Completely free bucket means it has no block. private LinkedMap bucketList, freeBuckets, completelyFreeBuckets; private int sizeIndex; BucketSizeInfo(int sizeIndex) { bucketList = new LinkedMap(); freeBuckets = new LinkedMap(); completelyFreeBuckets = new LinkedMap(); this.sizeIndex = sizeIndex; } public synchronized void instantiateBucket(Bucket b) { assert b.isUninstantiated() || b.isCompletelyFree(); b.reconfigure(sizeIndex, bucketSizes, bucketCapacity); bucketList.put(b, b); freeBuckets.put(b, b); completelyFreeBuckets.put(b, b); } public int sizeIndex() { return sizeIndex; } /** * Find a bucket to allocate a block * @return the offset in the IOEngine */ public long allocateBlock() { Bucket b = null; if (freeBuckets.size() > 0) { // Use up an existing one first... b = (Bucket) freeBuckets.lastKey(); } if (b == null) { b = grabGlobalCompletelyFreeBucket(); if (b != null) instantiateBucket(b); } if (b == null) return -1; long result = b.allocate(); blockAllocated(b); return result; } void blockAllocated(Bucket b) { if (!b.isCompletelyFree()) completelyFreeBuckets.remove(b); if (!b.hasFreeSpace()) freeBuckets.remove(b); } public Bucket findAndRemoveCompletelyFreeBucket() { Bucket b = null; assert bucketList.size() > 0; if (bucketList.size() == 1) { // So we never get complete starvation of a bucket for a size return null; } if (completelyFreeBuckets.size() > 0) { b = (Bucket) completelyFreeBuckets.firstKey(); removeBucket(b); } return b; } private synchronized void removeBucket(Bucket b) { assert b.isCompletelyFree(); bucketList.remove(b); freeBuckets.remove(b); completelyFreeBuckets.remove(b); } public void freeBlock(Bucket b, long offset) { assert bucketList.containsKey(b); // else we shouldn't have anything to free... assert (!completelyFreeBuckets.containsKey(b)); b.free(offset); if (!freeBuckets.containsKey(b)) freeBuckets.put(b, b); if (b.isCompletelyFree()) completelyFreeBuckets.put(b, b); } public synchronized IndexStatistics statistics() { long free = 0, used = 0; for (Object obj : bucketList.keySet()) { Bucket b = (Bucket) obj; free += b.freeCount(); used += b.usedCount(); } return new IndexStatistics(free, used, bucketSizes[sizeIndex]); } @Override public String toString() { return MoreObjects.toStringHelper(this.getClass()) .add("sizeIndex", sizeIndex) .add("bucketSize", bucketSizes[sizeIndex]) .toString(); } } // Default block size in hbase is 64K, so we choose more sizes near 64K, you'd better // reset it according to your cluster's block size distribution // TODO Support the view of block size distribution statistics // TODO: Why we add the extra 1024 bytes? Slop? private static final int DEFAULT_BUCKET_SIZES[] = { 4 * 1024 + 1024, 8 * 1024 + 1024, 16 * 1024 + 1024, 32 * 1024 + 1024, 40 * 1024 + 1024, 48 * 1024 + 1024, 56 * 1024 + 1024, 64 * 1024 + 1024, 96 * 1024 + 1024, 128 * 1024 + 1024, 192 * 1024 + 1024, 256 * 1024 + 1024, 384 * 1024 + 1024, 512 * 1024 + 1024 }; /** * Round up the given block size to bucket size, and get the corresponding * BucketSizeInfo */ public BucketSizeInfo roundUpToBucketSizeInfo(int blockSize) { for (int i = 0; i < bucketSizes.length; ++i) if (blockSize <= bucketSizes[i]) return bucketSizeInfos[i]; return null; } /** * So, what is the minimum amount of items we'll tolerate in a single bucket? */ static public final int FEWEST_ITEMS_IN_BUCKET = 4; private final int[] bucketSizes; private final int bigItemSize; // The capacity size for each bucket private final long bucketCapacity; private Bucket[] buckets; private BucketSizeInfo[] bucketSizeInfos; private final long totalSize; private long usedSize = 0; BucketAllocator(long availableSpace, int[] bucketSizes) throws BucketAllocatorException { this.bucketSizes = bucketSizes == null ? DEFAULT_BUCKET_SIZES : bucketSizes; Arrays.sort(this.bucketSizes); this.bigItemSize = Ints.max(this.bucketSizes); this.bucketCapacity = FEWEST_ITEMS_IN_BUCKET * bigItemSize; buckets = new Bucket[(int) (availableSpace / bucketCapacity)]; if (buckets.length < this.bucketSizes.length) throw new BucketAllocatorException("Bucket allocator size too small (" + buckets.length + "); must have room for at least " + this.bucketSizes.length + " buckets"); bucketSizeInfos = new BucketSizeInfo[this.bucketSizes.length]; for (int i = 0; i < this.bucketSizes.length; ++i) { bucketSizeInfos[i] = new BucketSizeInfo(i); } for (int i = 0; i < buckets.length; ++i) { buckets[i] = new Bucket(bucketCapacity * i); bucketSizeInfos[i < this.bucketSizes.length ? i : this.bucketSizes.length - 1] .instantiateBucket(buckets[i]); } this.totalSize = ((long) buckets.length) * bucketCapacity; if (LOG.isInfoEnabled()) { LOG.info("Cache totalSize=" + this.totalSize + ", buckets=" + this.buckets.length + ", bucket capacity=" + this.bucketCapacity + "=(" + FEWEST_ITEMS_IN_BUCKET + "*" + this.bigItemSize + ")=" + "(FEWEST_ITEMS_IN_BUCKET*(largest configured bucketcache size))"); } } /** * Rebuild the allocator's data structures from a persisted map. * @param availableSpace capacity of cache * @param map A map stores the block key and BucketEntry(block's meta data * like offset, length) * @param realCacheSize cached data size statistics for bucket cache * @throws BucketAllocatorException */ BucketAllocator(long availableSpace, int[] bucketSizes, Map<BlockCacheKey, BucketEntry> map, AtomicLong realCacheSize) throws BucketAllocatorException { this(availableSpace, bucketSizes); // each bucket has an offset, sizeindex. probably the buckets are too big // in our default state. so what we do is reconfigure them according to what // we've found. we can only reconfigure each bucket once; if more than once, // we know there's a bug, so we just log the info, throw, and start again... boolean[] reconfigured = new boolean[buckets.length]; int sizeNotMatchedCount = 0; int insufficientCapacityCount = 0; Iterator<Map.Entry<BlockCacheKey, BucketEntry>> iterator = map.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<BlockCacheKey, BucketEntry> entry = iterator.next(); long foundOffset = entry.getValue().offset(); int foundLen = entry.getValue().getLength(); int bucketSizeIndex = -1; for (int i = 0; i < this.bucketSizes.length; ++i) { if (foundLen <= this.bucketSizes[i]) { bucketSizeIndex = i; break; } } if (bucketSizeIndex == -1) { sizeNotMatchedCount++; iterator.remove(); continue; } int bucketNo = (int) (foundOffset / bucketCapacity); if (bucketNo < 0 || bucketNo >= buckets.length) { insufficientCapacityCount++; iterator.remove(); continue; } Bucket b = buckets[bucketNo]; if (reconfigured[bucketNo]) { if (b.sizeIndex() != bucketSizeIndex) { throw new BucketAllocatorException("Inconsistent allocation in bucket map;"); } } else { if (!b.isCompletelyFree()) { throw new BucketAllocatorException( "Reconfiguring bucket " + bucketNo + " but it's already allocated; corrupt data"); } // Need to remove the bucket from whichever list it's currently in at // the moment... BucketSizeInfo bsi = bucketSizeInfos[bucketSizeIndex]; BucketSizeInfo oldbsi = bucketSizeInfos[b.sizeIndex()]; oldbsi.removeBucket(b); bsi.instantiateBucket(b); reconfigured[bucketNo] = true; } realCacheSize.addAndGet(foundLen); buckets[bucketNo].addAllocation(foundOffset); usedSize += buckets[bucketNo].getItemAllocationSize(); bucketSizeInfos[bucketSizeIndex].blockAllocated(b); } if (sizeNotMatchedCount > 0) { LOG.warn("There are " + sizeNotMatchedCount + " blocks which can't be rebuilt because " + "there is no matching bucket size for these blocks"); } if (insufficientCapacityCount > 0) { LOG.warn("There are " + insufficientCapacityCount + " blocks which can't be rebuilt - " + "did you shrink the cache?"); } } public String toString() { StringBuilder sb = new StringBuilder(1024); for (int i = 0; i < buckets.length; ++i) { Bucket b = buckets[i]; if (i > 0) sb.append(", "); sb.append("bucket.").append(i).append(": size=").append(b.getItemAllocationSize()); sb.append(", freeCount=").append(b.freeCount()).append(", used=").append(b.usedCount()); } return sb.toString(); } public long getUsedSize() { return this.usedSize; } public long getFreeSize() { return this.totalSize - getUsedSize(); } public long getTotalSize() { return this.totalSize; } /** * Allocate a block with specified size. Return the offset * @param blockSize size of block * @throws BucketAllocatorException * @throws CacheFullException * @return the offset in the IOEngine */ public synchronized long allocateBlock(int blockSize) throws CacheFullException, BucketAllocatorException { assert blockSize > 0; BucketSizeInfo bsi = roundUpToBucketSizeInfo(blockSize); if (bsi == null) { throw new BucketAllocatorException("Allocation too big size=" + blockSize + "; adjust BucketCache sizes " + CacheConfig.BUCKET_CACHE_BUCKETS_KEY + " to accomodate if size seems reasonable and you want it cached."); } long offset = bsi.allocateBlock(); // Ask caller to free up space and try again! if (offset < 0) throw new CacheFullException(blockSize, bsi.sizeIndex()); usedSize += bucketSizes[bsi.sizeIndex()]; return offset; } private Bucket grabGlobalCompletelyFreeBucket() { for (BucketSizeInfo bsi : bucketSizeInfos) { Bucket b = bsi.findAndRemoveCompletelyFreeBucket(); if (b != null) return b; } return null; } /** * Free a block with the offset * @param offset block's offset * @return size freed */ public synchronized int freeBlock(long offset) { int bucketNo = (int) (offset / bucketCapacity); assert bucketNo >= 0 && bucketNo < buckets.length; Bucket targetBucket = buckets[bucketNo]; bucketSizeInfos[targetBucket.sizeIndex()].freeBlock(targetBucket, offset); usedSize -= targetBucket.getItemAllocationSize(); return targetBucket.getItemAllocationSize(); } public int sizeIndexOfAllocation(long offset) { int bucketNo = (int) (offset / bucketCapacity); assert bucketNo >= 0 && bucketNo < buckets.length; Bucket targetBucket = buckets[bucketNo]; return targetBucket.sizeIndex(); } public int sizeOfAllocation(long offset) { int bucketNo = (int) (offset / bucketCapacity); assert bucketNo >= 0 && bucketNo < buckets.length; Bucket targetBucket = buckets[bucketNo]; return targetBucket.getItemAllocationSize(); } static class IndexStatistics { private long freeCount, usedCount, itemSize, totalCount; public long freeCount() { return freeCount; } public long usedCount() { return usedCount; } public long totalCount() { return totalCount; } public long freeBytes() { return freeCount * itemSize; } public long usedBytes() { return usedCount * itemSize; } public long totalBytes() { return totalCount * itemSize; } public long itemSize() { return itemSize; } public IndexStatistics(long free, long used, long itemSize) { setTo(free, used, itemSize); } public IndexStatistics() { setTo(-1, -1, 0); } public void setTo(long free, long used, long itemSize) { this.itemSize = itemSize; this.freeCount = free; this.usedCount = used; this.totalCount = free + used; } } public Bucket [] getBuckets() { return this.buckets; } void logStatistics() { IndexStatistics total = new IndexStatistics(); IndexStatistics[] stats = getIndexStatistics(total); LOG.info("Bucket allocator statistics follow:\n"); LOG.info(" Free bytes=" + total.freeBytes() + "+; used bytes=" + total.usedBytes() + "; total bytes=" + total.totalBytes()); for (IndexStatistics s : stats) { LOG.info(" Object size " + s.itemSize() + " used=" + s.usedCount() + "; free=" + s.freeCount() + "; total=" + s.totalCount()); } } IndexStatistics[] getIndexStatistics(IndexStatistics grandTotal) { IndexStatistics[] stats = getIndexStatistics(); long totalfree = 0, totalused = 0; for (IndexStatistics stat : stats) { totalfree += stat.freeBytes(); totalused += stat.usedBytes(); } grandTotal.setTo(totalfree, totalused, 1); return stats; } IndexStatistics[] getIndexStatistics() { IndexStatistics[] stats = new IndexStatistics[bucketSizes.length]; for (int i = 0; i < stats.length; ++i) stats[i] = bucketSizeInfos[i].statistics(); return stats; } public long freeBlock(long freeList[]) { long sz = 0; for (int i = 0; i < freeList.length; ++i) sz += freeBlock(freeList[i]); return sz; } public int getBucketIndex(long offset) { return (int) (offset / bucketCapacity); } /** * Returns a set of indices of the buckets that are least filled * excluding the offsets, we also the fully free buckets for the * BucketSizes where everything is empty and they only have one * completely free bucket as a reserved * * @param excludedBuckets the buckets that need to be excluded due to * currently being in used * @param bucketCount max Number of buckets to return * @return set of bucket indices which could be used for eviction */ public Set<Integer> getLeastFilledBuckets(Set<Integer> excludedBuckets, int bucketCount) { Queue<Integer> queue = MinMaxPriorityQueue.<Integer>orderedBy( new Comparator<Integer>() { @Override public int compare(Integer left, Integer right) { // We will always get instantiated buckets return Float.compare( ((float) buckets[left].usedCount) / buckets[left].itemCount, ((float) buckets[right].usedCount) / buckets[right].itemCount); } }).maximumSize(bucketCount).create(); for (int i = 0; i < buckets.length; i ++ ) { if (!excludedBuckets.contains(i) && !buckets[i].isUninstantiated() && // Avoid the buckets that are the only buckets for a sizeIndex bucketSizeInfos[buckets[i].sizeIndex()].bucketList.size() != 1) { queue.add(i); } } Set<Integer> result = new HashSet<>(bucketCount); result.addAll(queue); return result; } }
/* The following code was generated by JFlex 1.5.1 */ package org.jetbrains.haskell.parser.lexer; import java.util.*; import com.intellij.lexer.*; import com.intellij.psi.*; import org.jetbrains.haskell.parser.token.*; import com.intellij.psi.tree.IElementType; import org.jetbrains.haskell.parser.cpp.CPPTokens; import org.jetbrains.grammar.HaskellLexerTokens; /** * This class is a scanner generated by * <a href="http://www.jflex.de/">JFlex</a> 1.5.1 * from the specification file <tt>/home/atsky/work/haskell-idea-plugin/plugin/src/org/jetbrains/haskell/parser/lexer/Haskell.flex</tt> */ class _HaskellLexer implements FlexLexer { /** initial size of the lookahead buffer */ private static final int ZZ_BUFFERSIZE = 16384; /** lexical states */ public static final int YYINITIAL = 0; public static final int BLOCK_COMMENT = 2; public static final int TEX = 4; public static final int LAMBDA = 6; public static final int QUASI_QUOTE = 8; /** * ZZ_LEXSTATE[l] is the state in the DFA for the lexical state l * ZZ_LEXSTATE[l+1] is the state in the DFA for the lexical state l * at the beginning of a line * l is of the form l = 2*k, k a non negative integer */ private static final int ZZ_LEXSTATE[] = { 0, 0, 1, 1, 2, 2, 3, 3, 4, 4 }; /** * Translates characters to character classes */ private static final String ZZ_CMAP_PACKED = "\3\0\1\4\1\0\1\1\3\0\1\1\1\2\1\11\1\10\1\10"+ "\22\0\1\34\1\124\1\7\1\113\1\35\1\6\1\12\1\33\1\111"+ "\1\112\1\40\1\6\1\116\1\101\1\115\1\6\1\135\1\73\1\73"+ "\1\73\1\73\3\30\2\3\1\27\1\114\1\120\1\117\1\121\1\136"+ "\1\43\1\63\1\66\1\64\1\32\1\61\1\67\1\76\1\56\1\72"+ "\1\25\1\65\1\53\1\75\1\51\1\55\1\77\1\62\1\71\1\54"+ "\1\57\1\52\1\70\1\134\1\60\1\74\1\25\1\42\1\41\1\110"+ "\1\44\1\26\1\5\1\50\1\16\1\31\1\37\1\102\1\46\1\103"+ "\1\130\1\36\1\133\1\23\1\20\1\15\1\104\1\17\1\127\1\132"+ "\1\47\1\13\1\125\1\126\1\45\1\131\1\100\1\14\1\23\1\105"+ "\1\107\1\106\1\123\6\0\1\11\32\0\1\1\11\6\1\23\2\6"+ "\1\0\4\6\2\0\1\6\1\23\3\6\1\0\1\23\1\6\3\0"+ "\1\6\27\21\1\6\7\21\30\23\1\6\10\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\2\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\2\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\2\21\1\23\1\21\1\23\1\21"+ "\3\23\2\21\1\23\1\21\1\23\2\21\1\23\3\21\2\23\4\21"+ "\1\23\2\21\1\23\3\21\3\23\2\21\1\23\2\21\1\23\1\21"+ "\1\23\1\21\1\23\2\21\1\23\1\21\2\23\1\21\1\23\2\21"+ "\1\23\3\21\1\23\1\21\1\23\2\21\2\23\1\0\1\21\3\23"+ "\4\0\1\21\1\0\1\23\1\21\1\0\1\23\1\21\1\0\1\23"+ "\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23"+ "\1\21\1\23\1\21\1\23\1\21\2\23\1\21\1\23\1\21\1\23"+ "\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23"+ "\1\21\1\23\1\21\2\23\1\21\1\0\1\23\1\21\1\23\3\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\7\23\2\21"+ "\1\23\2\21\2\23\1\21\1\23\4\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\105\23\1\0\44\23\7\0\2\23\4\6"+ "\14\0\16\6\5\23\7\6\1\0\1\6\1\0\21\6\105\0\1\23"+ "\52\0\1\21\1\23\1\21\1\23\1\0\1\6\1\21\1\23\2\0"+ "\4\23\1\6\5\0\2\6\1\21\1\6\3\21\1\0\1\21\1\0"+ "\2\21\1\23\21\21\1\0\11\21\43\23\1\21\2\23\3\21\3\23"+ "\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23"+ "\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23"+ "\1\21\1\23\1\21\5\23\1\21\1\23\1\6\1\21\1\23\2\21"+ "\2\23\63\21\60\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\6\7\0\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\2\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\2\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\11\0\46\21\3\0\6\6\1\0"+ "\47\23\1\0\2\6\4\0\1\6\56\0\1\6\1\0\1\6\2\0"+ "\1\6\2\0\1\6\54\0\2\6\21\0\12\6\13\0\1\6\2\0"+ "\2\6\112\0\4\6\146\0\1\6\11\0\1\6\12\0\1\6\23\0"+ "\2\6\1\0\16\6\350\0\4\6\66\0\17\6\37\0\1\6\u0105\0"+ "\2\6\12\0\1\6\201\0\2\6\6\0\2\6\364\0\2\6\176\0"+ "\1\6\202\0\10\6\204\0\1\6\371\0\1\6\172\0\1\6\112\0"+ "\1\6\17\0\1\6\12\0\2\6\245\0\27\6\2\0\6\6\24\0"+ "\1\6\1\0\1\6\1\0\1\6\1\0\4\6\107\0\1\6\70\0"+ "\10\6\1\0\6\6\1\0\15\6\157\0\6\6\116\0\2\6\46\21"+ "\1\0\1\21\5\0\1\21\55\0\1\6\u0264\0\11\6\47\0\12\6"+ "\146\0\1\6\u026c\0\2\6\54\0\2\6\116\0\3\6\107\0\2\6"+ "\235\0\3\6\1\0\4\6\44\0\13\6\u0135\0\1\6\3\0\2\6"+ "\230\0\42\6\36\0\2\6\200\0\7\6\1\0\6\6\254\0\21\6"+ "\11\0\11\6\177\0\4\6\73\0\5\6\76\0\2\6\100\0\10\6"+ "\13\0\1\6\54\0\300\23\100\0\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\11\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\11\23\10\21\6\23\2\0\6\21\2\0\10\23\10\21"+ "\10\23\10\21\6\23\2\0\6\21\2\0\10\23\1\0\1\21\1\0"+ "\1\21\1\0\1\21\1\0\1\21\10\23\10\21\16\23\2\0\10\23"+ "\10\0\10\23\10\0\10\23\10\0\5\23\1\0\2\23\4\21\1\0"+ "\1\6\1\23\3\6\3\23\1\0\2\23\4\21\1\0\3\6\4\23"+ "\2\0\2\23\4\21\1\0\3\6\10\23\5\21\3\6\2\0\3\23"+ "\1\0\2\23\4\21\1\0\2\6\21\0\30\6\1\11\1\11\6\0"+ "\57\6\22\0\1\23\10\0\5\6\1\23\12\0\5\6\1\0\15\23"+ "\3\0\33\6\105\0\2\6\1\21\4\6\1\21\2\6\1\23\3\21"+ "\2\23\3\21\1\23\1\6\1\21\3\6\5\21\6\6\1\21\1\6"+ "\1\21\1\6\1\21\1\6\4\21\1\6\1\23\4\21\1\23\4\0"+ "\1\23\2\6\2\23\2\21\5\6\1\21\4\23\4\6\1\23\1\6"+ "\20\0\20\21\20\23\3\0\1\21\1\23\13\0\2\6\1\122\155\6"+ "\1\6\u01f3\6\14\0\47\6\31\0\13\6\121\0\32\6\32\22\32\24"+ "\26\0\u0200\6\1\0\165\6\36\0\u03b9\6\3\0\12\6\246\0\57\21"+ "\1\0\57\23\1\0\1\21\1\23\3\21\2\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\4\21\1\23\1\21\2\23\1\21\10\23\3\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\2\23\6\6"+ "\1\21\1\23\1\21\1\23\3\0\1\21\1\23\5\0\4\6\1\0"+ "\2\6\46\23\1\0\1\23\5\0\1\23\102\0\1\6\217\0\57\6"+ "\1\0\14\6\104\0\32\6\1\0\131\6\14\0\326\6\32\0\14\6"+ "\5\0\4\6\3\0\31\6\17\0\1\6\5\0\2\6\5\0\3\6"+ "\133\0\2\6\3\0\1\6\132\0\1\6\224\0\2\6\4\0\12\6"+ "\40\0\44\6\34\0\37\6\13\0\36\6\10\0\1\6\17\0\40\6"+ "\12\0\47\6\17\0\77\6\1\0\u0100\6\u19c0\0\100\6\u5690\0\67\6"+ "\67\0\2\6\u010d\0\3\6\60\0\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21"+ "\1\23\5\0\1\6\12\0\1\6\1\0\1\21\1\23\1\21\1\23"+ "\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23"+ "\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23"+ "\132\0\6\6\10\0\27\6\11\0\2\6\1\21\1\23\1\21\1\23"+ "\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\3\23"+ "\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23"+ "\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23"+ "\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23"+ "\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23"+ "\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23"+ "\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23"+ "\1\21\12\23\1\21\1\23\1\21\1\23\2\21\1\23\1\21\1\23"+ "\1\21\1\23\1\21\1\23\1\21\1\23\1\0\2\6\1\21\1\23"+ "\1\21\1\23\1\0\1\21\1\23\1\21\1\23\14\0\1\21\1\23"+ "\1\21\1\23\1\21\1\23\1\21\1\23\1\21\1\23\1\21\115\0"+ "\3\23\55\0\4\6\12\0\4\6\72\0\4\6\126\0\2\6\50\0"+ "\3\6\63\0\2\6\57\0\1\6\141\0\15\6\20\0\2\6\174\0"+ "\4\6\27\0\3\6\144\0\2\6\20\0\2\6\371\0\1\6\u4f14\0"+ "\7\23\14\0\5\23\21\0\1\6\210\0\20\6\u017c\0\2\6\274\0"+ "\2\6\22\0\12\6\26\0\43\6\1\0\23\6\1\0\4\6\225\0"+ "\17\6\12\0\7\6\32\21\6\6\32\23\13\6\172\0\7\6\1\0"+ "\7\6\15\0\2\6\2\0"; /** * Translates characters to character classes */ private static final char [] ZZ_CMAP = zzUnpackCMap(ZZ_CMAP_PACKED); /** * Translates DFA states to action switch labels. */ private static final int [] ZZ_ACTION = zzUnpackAction(); private static final String ZZ_ACTION_PACKED_0 = "\2\0\1\1\2\0\1\2\1\3\1\4\1\5\1\6"+ "\1\7\1\2\5\10\1\11\2\7\1\12\1\13\1\10"+ "\1\14\1\7\2\10\1\15\1\16\1\17\1\20\3\10"+ "\1\21\2\10\1\22\1\23\1\24\1\25\1\26\1\27"+ "\1\7\1\30\1\31\1\32\1\33\1\7\1\34\1\35"+ "\1\36\7\10\1\5\1\7\3\37\2\1\2\40\1\37"+ "\1\5\1\0\1\41\1\0\4\10\1\42\1\10\1\11"+ "\1\0\1\7\1\43\1\44\1\45\3\10\1\0\1\46"+ "\1\0\1\7\1\47\1\10\1\50\1\51\1\52\2\10"+ "\2\0\1\53\1\0\1\54\3\10\1\55\1\56\3\10"+ "\1\57\1\7\1\60\1\61\1\0\1\62\1\0\1\7"+ "\1\0\1\63\1\64\1\65\1\66\10\10\2\5\1\67"+ "\1\7\1\70\1\71\1\0\1\1\1\0\1\72\1\5"+ "\17\0\3\10\1\73\1\74\1\75\2\7\4\10\1\76"+ "\17\0\1\77\6\10\1\0\2\100\1\101\6\10\1\0"+ "\1\102\1\0\1\103\1\104\2\0\10\10\14\0\1\105"+ "\2\10\1\74\1\75\2\10\1\106\1\107\15\0\3\10"+ "\1\110\2\10\1\0\3\10\1\111\1\112\2\10\6\0"+ "\1\113\1\114\1\10\1\115\4\10\1\0\1\116\2\10"+ "\1\117\1\120\2\10\1\121\2\10\1\0\5\10\17\0"+ "\1\122\1\0\2\10\1\123\2\10\1\0\1\10\1\124"+ "\1\125\1\10\1\126\1\127\2\10\1\0\1\130\1\10"+ "\1\131\1\132\1\10\20\0\1\133\3\0\1\134\1\135"+ "\1\136\2\10\1\0\1\137\1\10\1\140\1\10\1\0"+ "\1\141\1\142\5\0\1\143\2\0\1\144\12\0\2\10"+ "\1\0\1\145\1\146\10\0\1\147\11\0\2\10\11\0"+ "\1\150\1\0\1\151\6\0\1\152\1\10\1\0\1\153"+ "\3\0\1\154\1\155\3\0\1\156\4\0\1\157\1\160"+ "\10\0\1\161\1\0\1\162\2\0\1\163\2\0\1\164"+ "\4\0\1\165\5\0\1\166\1\0\1\167\1\170\1\0"+ "\1\171\3\0\1\172\1\173\2\0\1\174\2\0\1\175"+ "\10\0\1\176\1\177"; private static int [] zzUnpackAction() { int [] result = new int[503]; int offset = 0; offset = zzUnpackAction(ZZ_ACTION_PACKED_0, offset, result); return result; } private static int zzUnpackAction(String packed, int offset, int [] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int count = packed.charAt(i++); int value = packed.charAt(i++); do result[j++] = value; while (--count > 0); } return j; } /** * Translates a state to a row index in the transition table */ private static final int [] ZZ_ROWMAP = zzUnpackRowMap(); private static final String ZZ_ROWMAP_PACKED_0 = "\0\0\0\137\0\276\0\u011d\0\u017c\0\u01db\0\u023a\0\u01db"+ "\0\u0299\0\u01db\0\u02f8\0\u0357\0\u03b6\0\u0415\0\u0474\0\u04d3"+ "\0\u0532\0\u0591\0\u05f0\0\u064f\0\u0415\0\u06ae\0\u070d\0\u076c"+ "\0\u07cb\0\u082a\0\u0889\0\u02f8\0\u08e8\0\u0947\0\u02f8\0\u09a6"+ "\0\u0a05\0\u0a64\0\u0ac3\0\u0b22\0\u0b81\0\u0be0\0\u01db\0\u0c3f"+ "\0\u01db\0\u0c9e\0\u01db\0\u0cfd\0\u01db\0\u0d5c\0\u01db\0\u0dbb"+ "\0\u0e1a\0\u02f8\0\u02f8\0\u02f8\0\u0e79\0\u0ed8\0\u0f37\0\u0f96"+ "\0\u0ff5\0\u1054\0\u10b3\0\u1112\0\u1171\0\u01db\0\u11d0\0\u122f"+ "\0\u128e\0\u12ed\0\u01db\0\u134c\0\u13ab\0\u140a\0\u0357\0\u01db"+ "\0\u1469\0\u14c8\0\u1527\0\u1586\0\u15e5\0\u0415\0\u1644\0\u16a3"+ "\0\u1702\0\u1761\0\u17c0\0\u17c0\0\u01db\0\u181f\0\u187e\0\u18dd"+ "\0\u193c\0\u01db\0\u199b\0\u19fa\0\u01db\0\u1a59\0\u0415\0\u1ab8"+ "\0\u0415\0\u1b17\0\u1b76\0\u1bd5\0\u1c34\0\u01db\0\u1c93\0\u1cf2"+ "\0\u1d51\0\u1db0\0\u1e0f\0\u0415\0\u1e6e\0\u1ecd\0\u1f2c\0\u1f8b"+ "\0\u1fea\0\u2049\0\u01db\0\u01db\0\u20a8\0\u01db\0\u2107\0\u2166"+ "\0\u21c5\0\u01db\0\u02f8\0\u02f8\0\u02f8\0\u2224\0\u2283\0\u22e2"+ "\0\u2341\0\u23a0\0\u23ff\0\u245e\0\u24bd\0\u251c\0\u257b\0\u25da"+ "\0\u2639\0\u01db\0\u01db\0\u2698\0\u26f7\0\u2756\0\u01db\0\u01db"+ "\0\u27b5\0\u2814\0\u2873\0\u28d2\0\u2931\0\u2990\0\u29ef\0\u2a4e"+ "\0\u2aad\0\u2b0c\0\u2b6b\0\u2bca\0\u2c29\0\u2c88\0\u2ce7\0\u2d46"+ "\0\u2da5\0\u2e04\0\u0415\0\u2e63\0\u2ec2\0\u2f21\0\u2f80\0\u2fdf"+ "\0\u303e\0\u309d\0\u30fc\0\u01db\0\u315b\0\u31ba\0\u3219\0\u3278"+ "\0\u32d7\0\u3336\0\u3395\0\u33f4\0\u3453\0\u34b2\0\u3511\0\u3570"+ "\0\u35cf\0\u362e\0\u368d\0\u01db\0\u36ec\0\u374b\0\u37aa\0\u3809"+ "\0\u3868\0\u38c7\0\u3926\0\u01db\0\u1cf2\0\u01db\0\u3985\0\u39e4"+ "\0\u3a43\0\u3aa2\0\u3b01\0\u3b60\0\u3bbf\0\u01db\0\u3c1e\0\u3c7d"+ "\0\u01db\0\u3cdc\0\u3d3b\0\u3d9a\0\u3df9\0\u3e58\0\u3eb7\0\u3f16"+ "\0\u3f75\0\u3fd4\0\u4033\0\u4092\0\u40f1\0\u4150\0\u41af\0\u420e"+ "\0\u426d\0\u42cc\0\u432b\0\u438a\0\u43e9\0\u4448\0\u44a7\0\u0415"+ "\0\u4506\0\u4565\0\u45c4\0\u4623\0\u4682\0\u46e1\0\u0415\0\u0415"+ "\0\u4740\0\u479f\0\u47fe\0\u485d\0\u48bc\0\u491b\0\u497a\0\u49d9"+ "\0\u4a38\0\u4a97\0\u4af6\0\u4b55\0\u4bb4\0\u4c13\0\u4c72\0\u4cd1"+ "\0\u0415\0\u4d30\0\u4d8f\0\u4dee\0\u4e4d\0\u4eac\0\u4f0b\0\u0415"+ "\0\u0415\0\u4f6a\0\u4fc9\0\u5028\0\u5087\0\u50e6\0\u5145\0\u51a4"+ "\0\u5203\0\u0415\0\u0415\0\u5262\0\u0415\0\u52c1\0\u5320\0\u537f"+ "\0\u53de\0\u543d\0\u01db\0\u549c\0\u54fb\0\u0415\0\u0415\0\u555a"+ "\0\u55b9\0\u5618\0\u5677\0\u56d6\0\u5735\0\u5794\0\u57f3\0\u5852"+ "\0\u58b1\0\u5910\0\u596f\0\u59ce\0\u5a2d\0\u5a8c\0\u5aeb\0\u5b4a"+ "\0\u5ba9\0\u5c08\0\u5c67\0\u5cc6\0\u5d25\0\u5d84\0\u5de3\0\u5e42"+ "\0\u5ea1\0\u5f00\0\u5f5f\0\u5fbe\0\u601d\0\u0415\0\u607c\0\u60db"+ "\0\u613a\0\u6199\0\u0415\0\u0415\0\u61f8\0\u0415\0\u0415\0\u6257"+ "\0\u62b6\0\u6315\0\u0415\0\u6374\0\u0415\0\u0415\0\u63d3\0\u6432"+ "\0\u6491\0\u64f0\0\u654f\0\u65ae\0\u660d\0\u666c\0\u66cb\0\u672a"+ "\0\u6789\0\u67e8\0\u6847\0\u68a6\0\u6905\0\u6964\0\u69c3\0\u5028"+ "\0\u6a22\0\u6a81\0\u6ae0\0\u6b3f\0\u0415\0\u0415\0\u6b9e\0\u6bfd"+ "\0\u6c5c\0\u0415\0\u6cbb\0\u0415\0\u6d1a\0\u6d79\0\u0415\0\u0415"+ "\0\u6dd8\0\u6e37\0\u6e96\0\u6ef5\0\u6f54\0\u5028\0\u6fb3\0\u7012"+ "\0\u5028\0\u7071\0\u70d0\0\u712f\0\u718e\0\u71ed\0\u724c\0\u72ab"+ "\0\u730a\0\u7369\0\u73c8\0\u7427\0\u7486\0\u74e5\0\u0415\0\u0415"+ "\0\u7544\0\u75a3\0\u7602\0\u7661\0\u76c0\0\u771f\0\u777e\0\u77dd"+ "\0\u5028\0\u783c\0\u789b\0\u78fa\0\u7959\0\u79b8\0\u7a17\0\u7a76"+ "\0\u7ad5\0\u7b34\0\u7b93\0\u7bf2\0\u7c51\0\u7cb0\0\u7d0f\0\u7d6e"+ "\0\u7dcd\0\u7e2c\0\u7e8b\0\u7eea\0\u7f49\0\u5028\0\u7fa8\0\u5028"+ "\0\u8007\0\u8066\0\u80c5\0\u8124\0\u8183\0\u81e2\0\u0415\0\u8241"+ "\0\u82a0\0\u01db\0\u82ff\0\u835e\0\u83bd\0\u5028\0\u5028\0\u841c"+ "\0\u847b\0\u84da\0\u5028\0\u8539\0\u8598\0\u85f7\0\u8656\0\u6a22"+ "\0\u0415\0\u86b5\0\u8714\0\u8773\0\u87d2\0\u8831\0\u8890\0\u88ef"+ "\0\u894e\0\u5028\0\u89ad\0\u5028\0\u8a0c\0\u8a6b\0\u5028\0\u8aca"+ "\0\u8b29\0\u5028\0\u8b88\0\u8be7\0\u8c46\0\u8ca5\0\u01db\0\u8d04"+ "\0\u8d63\0\u8dc2\0\u8e21\0\u8e80\0\u8edf\0\u8f3e\0\u5028\0\u5028"+ "\0\u8f9d\0\u8ffc\0\u905b\0\u90ba\0\u9119\0\u5028\0\u5028\0\u9178"+ "\0\u91d7\0\u5028\0\u9236\0\u9295\0\u5028\0\u92f4\0\u9353\0\u93b2"+ "\0\u9411\0\u9470\0\u94cf\0\u952e\0\u958d\0\u5028\0\u5028"; private static int [] zzUnpackRowMap() { int [] result = new int[503]; int offset = 0; offset = zzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result); return result; } private static int zzUnpackRowMap(String packed, int offset, int [] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int high = packed.charAt(i++) << 16; result[j++] = high | packed.charAt(i++); } return j; } /** * The transition table of the DFA */ private static final int [] ZZ_TRANS = zzUnpackTrans(); private static final String ZZ_TRANS_PACKED_0 = "\1\6\1\7\1\10\2\11\1\12\1\13\1\14\1\7"+ "\1\0\1\13\1\15\1\16\1\17\1\16\1\20\1\21"+ "\1\22\1\23\1\16\1\24\1\22\1\25\1\26\1\11"+ "\1\27\1\22\1\30\1\7\1\31\1\32\1\33\1\34"+ "\1\35\1\36\1\37\1\13\1\16\1\40\1\41\1\42"+ "\22\22\1\11\4\22\1\16\1\43\1\44\1\16\1\45"+ "\1\46\1\47\1\50\1\51\1\52\1\53\1\54\1\55"+ "\1\56\1\57\1\60\1\61\1\13\1\62\1\63\1\64"+ "\1\65\1\66\1\67\1\70\1\71\1\72\1\73\1\22"+ "\1\74\1\75\11\76\1\0\67\76\1\77\3\76\1\100"+ "\31\76\41\101\1\102\75\101\11\103\1\0\17\103\1\104"+ "\105\103\11\76\1\0\75\76\1\105\27\76\140\0\1\7"+ "\6\0\1\7\23\0\1\7\105\0\2\11\23\0\1\11"+ "\42\0\1\11\17\0\1\106\21\0\1\11\7\0\1\13"+ "\3\0\1\13\7\0\1\13\1\0\1\13\2\0\1\13"+ "\5\0\1\13\2\0\2\13\1\0\2\13\34\0\1\13"+ "\5\0\1\13\3\0\1\13\1\0\1\13\1\0\6\13"+ "\11\0\1\13\2\107\1\110\4\107\1\110\31\107\1\111"+ "\75\107\3\0\2\16\6\0\14\16\1\0\4\16\2\0"+ "\2\16\5\0\3\16\1\112\30\16\1\0\3\16\6\0"+ "\1\113\11\0\1\114\10\16\4\0\2\16\6\0\14\16"+ "\1\0\4\16\2\0\2\16\5\0\34\16\1\0\3\16"+ "\6\0\1\113\11\0\11\16\4\0\2\16\6\0\4\16"+ "\1\115\7\16\1\0\4\16\2\0\2\16\5\0\34\16"+ "\1\0\3\16\6\0\1\113\11\0\11\16\4\0\2\16"+ "\6\0\14\16\1\0\4\16\2\0\2\16\5\0\1\16"+ "\1\116\32\16\1\0\3\16\6\0\1\113\11\0\11\16"+ "\4\0\2\16\6\0\14\16\1\0\4\16\2\0\2\16"+ "\5\0\34\16\1\0\1\117\2\16\6\0\1\113\11\0"+ "\11\16\4\0\2\22\6\0\14\22\1\0\4\22\2\0"+ "\2\22\5\0\34\22\1\0\3\22\6\0\1\120\1\0"+ "\1\121\7\0\11\22\4\0\2\22\1\0\1\13\3\0"+ "\1\13\7\22\1\23\1\22\1\23\2\22\1\13\4\22"+ "\1\0\1\13\2\22\2\13\1\0\2\13\34\22\1\13"+ "\3\22\2\0\1\13\3\0\1\13\1\0\1\122\1\0"+ "\6\13\11\22\1\13\3\0\2\16\1\0\1\13\3\0"+ "\1\13\7\16\1\24\1\16\1\24\2\16\1\13\4\16"+ "\1\0\1\13\2\16\2\13\1\0\2\13\34\16\1\13"+ "\3\16\2\0\1\13\3\0\1\13\1\0\1\13\1\0"+ "\6\13\11\16\1\13\6\0\1\123\3\0\1\123\7\0"+ "\1\123\1\0\1\123\2\0\1\124\5\0\1\123\2\0"+ "\2\123\1\0\2\123\34\0\1\123\5\0\1\123\1\125"+ "\2\0\1\123\1\0\1\123\1\0\6\123\11\0\1\123"+ "\3\0\2\16\6\0\5\16\1\126\6\16\1\0\1\16"+ "\1\127\2\16\2\0\2\16\5\0\3\16\1\130\30\16"+ "\1\0\3\16\6\0\1\113\11\0\11\16\1\0\2\131"+ "\1\0\30\131\1\132\5\131\1\133\75\131\6\0\1\13"+ "\3\0\1\13\7\0\1\13\1\0\1\13\2\0\1\13"+ "\5\0\1\134\2\0\2\13\1\0\2\13\34\0\1\13"+ "\5\0\1\13\1\0\1\135\1\0\1\13\1\0\1\13"+ "\1\0\6\13\11\0\1\13\3\0\2\16\6\0\2\16"+ "\1\136\11\16\1\0\4\16\2\0\2\16\5\0\1\16"+ "\1\137\32\16\1\0\2\16\1\140\6\0\1\113\11\0"+ "\11\16\4\0\2\16\6\0\4\16\1\141\7\16\1\0"+ "\4\16\2\0\2\16\5\0\3\16\1\142\30\16\1\0"+ "\1\143\2\16\6\0\1\113\11\0\11\16\7\0\1\13"+ "\3\0\1\13\7\0\1\13\1\0\1\13\2\0\1\13"+ "\5\0\1\13\2\0\2\13\1\0\2\13\34\0\1\13"+ "\1\144\4\0\1\13\3\0\1\13\1\0\1\13\1\0"+ "\6\13\11\0\1\13\13\0\6\145\2\0\2\145\1\0"+ "\1\145\1\146\1\0\1\145\4\0\2\145\5\0\4\145"+ "\27\0\1\145\1\0\1\147\2\145\2\0\1\150\15\0"+ "\7\145\6\0\2\16\6\0\4\16\1\151\7\16\1\0"+ "\4\16\2\0\2\16\5\0\3\16\1\152\30\16\1\0"+ "\3\16\6\0\1\113\11\0\11\16\4\0\2\16\6\0"+ "\4\16\1\153\7\16\1\0\4\16\2\0\2\16\5\0"+ "\34\16\1\0\3\16\6\0\1\113\11\0\11\16\4\0"+ "\2\16\6\0\1\154\13\16\1\0\4\16\2\0\2\16"+ "\5\0\34\16\1\0\3\16\6\0\1\113\11\0\11\16"+ "\7\0\1\13\3\0\1\13\7\0\1\13\1\0\1\13"+ "\2\0\1\13\5\0\1\13\2\0\2\13\1\0\2\13"+ "\34\0\1\155\5\0\1\13\3\0\1\13\1\0\1\13"+ "\1\0\2\13\1\62\3\13\11\0\1\13\3\0\2\16"+ "\6\0\5\16\1\156\6\16\1\0\4\16\2\0\2\16"+ "\5\0\33\16\1\157\1\0\3\16\6\0\1\113\11\0"+ "\11\16\4\0\2\16\6\0\14\16\1\0\4\16\2\0"+ "\2\16\5\0\34\16\1\0\1\160\2\16\6\0\1\113"+ "\11\0\11\16\102\0\1\161\43\0\1\13\3\0\1\13"+ "\7\0\1\13\1\0\1\13\2\0\1\13\5\0\1\13"+ "\2\0\2\13\1\0\2\13\34\0\1\13\5\0\1\162"+ "\1\163\1\0\1\164\1\13\1\0\1\13\1\0\6\13"+ "\11\0\1\13\107\0\1\165\3\0\1\166\31\0\1\13"+ "\3\0\1\13\7\0\1\13\1\0\1\13\2\0\1\13"+ "\5\0\1\13\1\167\1\0\2\13\1\0\2\13\34\0"+ "\1\170\1\171\4\0\1\13\2\0\1\172\1\13\1\0"+ "\1\13\1\0\6\13\11\0\1\13\6\0\1\13\3\0"+ "\1\13\7\0\1\13\1\0\1\13\2\0\1\13\5\0"+ "\1\13\2\0\2\13\1\0\2\13\34\0\1\13\5\0"+ "\1\13\3\0\1\13\1\0\1\173\1\0\6\13\11\0"+ "\1\13\6\0\1\13\3\0\1\13\7\0\1\13\1\0"+ "\1\13\2\0\1\13\5\0\1\13\2\0\2\13\1\0"+ "\2\13\34\0\1\13\5\0\1\13\3\0\1\13\1\0"+ "\1\13\1\0\2\13\1\174\3\13\11\0\1\13\6\0"+ "\1\13\3\0\1\13\7\0\1\13\1\0\1\13\2\0"+ "\1\13\5\0\1\13\2\0\2\13\1\0\2\13\34\0"+ "\1\175\5\0\1\13\3\0\1\13\1\0\1\13\1\0"+ "\6\13\11\0\1\13\3\0\2\16\6\0\1\16\1\176"+ "\12\16\1\0\4\16\2\0\2\16\5\0\34\16\1\0"+ "\3\16\6\0\1\113\11\0\3\16\1\177\5\16\4\0"+ "\2\16\6\0\14\16\1\0\4\16\2\0\2\16\5\0"+ "\34\16\1\0\2\16\1\200\6\0\1\113\11\0\11\16"+ "\4\0\2\16\6\0\14\16\1\0\4\16\2\0\2\16"+ "\5\0\2\16\1\201\31\16\1\0\3\16\6\0\1\113"+ "\11\0\11\16\4\0\2\16\6\0\14\16\1\0\4\16"+ "\2\0\1\202\1\16\5\0\34\16\1\0\3\16\6\0"+ "\1\113\11\0\11\16\4\0\2\16\6\0\14\16\1\0"+ "\4\16\2\0\2\16\5\0\34\16\1\0\3\16\6\0"+ "\1\113\11\0\3\16\1\203\5\16\4\0\2\16\6\0"+ "\14\16\1\0\4\16\2\0\2\16\5\0\34\16\1\0"+ "\3\16\6\0\1\113\11\0\1\16\1\204\7\16\4\0"+ "\2\16\6\0\14\16\1\0\4\16\2\0\2\16\5\0"+ "\3\16\1\205\30\16\1\0\3\16\6\0\1\113\11\0"+ "\11\16\4\0\2\11\12\0\1\206\10\0\1\11\24\0"+ "\1\206\2\0\1\207\12\0\1\11\4\0\1\207\12\0"+ "\1\106\21\0\1\11\7\0\1\13\3\0\1\13\6\210"+ "\1\0\1\13\1\210\1\211\1\0\1\210\1\13\1\0"+ "\1\210\3\0\1\13\2\210\2\13\1\0\2\13\4\210"+ "\27\0\1\210\1\13\3\210\2\0\1\13\3\0\1\13"+ "\1\0\1\13\1\0\6\13\7\210\2\0\1\13\106\0"+ "\1\212\131\0\1\213\35\0\41\101\1\0\75\101\16\0"+ "\1\214\22\0\1\215\145\0\1\216\176\0\1\217\141\0"+ "\1\220\24\0\2\221\1\107\3\0\1\107\1\221\1\0"+ "\1\107\3\0\1\107\1\222\10\0\1\107\1\0\1\223"+ "\1\107\1\221\4\0\1\107\2\0\1\224\4\107\1\225"+ "\1\226\1\227\1\230\1\0\1\231\2\0\1\232\1\0"+ "\1\233\1\234\1\0\1\235\1\236\1\231\1\226\1\0"+ "\1\107\2\0\1\226\1\0\1\237\3\0\1\107\20\0"+ "\1\107\7\0\1\107\4\0\2\16\6\0\14\16\1\0"+ "\4\16\2\0\2\16\5\0\1\16\1\240\32\16\1\0"+ "\3\16\6\0\1\113\11\0\11\16\114\0\1\113\26\0"+ "\2\16\6\0\14\16\1\0\4\16\2\0\1\16\1\241"+ "\5\0\34\16\1\0\3\16\6\0\1\113\11\0\11\16"+ "\4\0\2\16\6\0\14\16\1\0\4\16\2\0\1\16"+ "\1\242\5\0\34\16\1\0\3\16\6\0\1\113\11\0"+ "\11\16\4\0\2\16\6\0\14\16\1\0\4\16\2\0"+ "\2\16\5\0\34\16\1\0\3\16\6\0\1\113\11\0"+ "\1\243\10\16\114\0\1\120\36\0\6\244\2\245\2\244"+ "\1\245\1\244\2\0\1\244\1\245\3\0\2\244\5\0"+ "\4\244\22\245\1\0\4\245\1\244\1\0\3\244\20\0"+ "\7\244\1\245\10\0\1\13\3\0\1\13\6\244\1\245"+ "\1\246\1\244\1\247\1\245\1\244\1\13\1\0\1\244"+ "\1\245\2\0\1\13\2\244\2\13\1\0\2\13\4\244"+ "\22\245\1\0\4\245\1\244\1\13\3\244\2\0\1\13"+ "\3\0\1\13\1\0\1\13\1\0\6\13\7\244\1\245"+ "\1\0\1\13\6\0\1\123\3\0\1\123\7\0\1\123"+ "\1\0\1\123\2\0\1\123\5\0\1\123\2\0\2\123"+ "\1\0\2\123\34\0\1\123\5\0\1\123\3\0\1\123"+ "\1\0\1\123\1\0\6\123\11\0\1\123\3\0\2\16"+ "\6\0\14\16\1\0\4\16\2\0\2\16\5\0\3\16"+ "\1\250\30\16\1\0\3\16\6\0\1\113\11\0\11\16"+ "\4\0\2\16\6\0\14\16\1\0\4\16\2\0\2\16"+ "\5\0\3\16\1\251\30\16\1\0\3\16\6\0\1\113"+ "\11\0\11\16\4\0\2\16\6\0\1\252\13\16\1\0"+ "\4\16\2\0\2\16\5\0\34\16\1\0\3\16\6\0"+ "\1\113\11\0\2\16\1\253\6\16\34\0\1\254\106\0"+ "\1\255\3\0\1\131\2\0\1\131\3\0\1\131\1\256"+ "\10\0\1\255\1\0\1\257\1\131\5\0\1\131\2\0"+ "\1\260\4\131\1\261\1\262\1\263\1\264\1\0\1\265"+ "\2\0\1\266\1\0\1\267\1\270\1\0\1\271\1\272"+ "\1\265\1\262\1\0\1\255\2\0\1\262\1\0\1\273"+ "\3\0\1\131\20\0\1\131\7\0\1\255\7\0\1\13"+ "\3\0\1\13\7\0\1\13\1\0\1\13\2\0\1\13"+ "\5\0\1\13\2\0\2\13\1\0\2\13\34\0\1\13"+ "\5\0\1\13\1\0\1\274\1\0\1\13\1\0\1\13"+ "\1\0\6\13\11\0\1\13\3\0\2\16\6\0\14\16"+ "\1\0\4\16\2\0\2\16\5\0\34\16\1\0\3\16"+ "\6\0\1\113\11\0\2\16\1\275\6\16\4\0\2\16"+ "\6\0\1\276\13\16\1\0\4\16\2\0\2\16\5\0"+ "\1\16\1\277\32\16\1\0\3\16\6\0\1\113\11\0"+ "\11\16\4\0\2\16\6\0\14\16\1\0\4\16\2\0"+ "\2\16\5\0\34\16\1\0\3\16\6\0\1\113\11\0"+ "\1\300\10\16\4\0\2\16\6\0\14\16\1\0\4\16"+ "\2\0\2\16\5\0\1\16\1\301\1\302\31\16\1\0"+ "\3\16\6\0\1\113\11\0\11\16\105\0\1\303\35\0"+ "\2\145\6\0\14\145\1\0\4\145\2\0\2\145\5\0"+ "\34\145\1\0\3\145\2\0\1\304\15\0\11\145\4\0"+ "\2\145\6\0\14\145\1\0\4\145\2\0\2\145\5\0"+ "\34\145\1\0\3\145\2\0\1\305\15\0\11\145\110\0"+ "\1\306\32\0\2\16\6\0\14\16\1\0\4\16\2\0"+ "\2\16\5\0\2\16\1\307\31\16\1\0\3\16\6\0"+ "\1\113\11\0\11\16\4\0\2\16\6\0\2\16\1\310"+ "\11\16\1\0\4\16\2\0\2\16\5\0\34\16\1\0"+ "\3\16\6\0\1\113\11\0\11\16\4\0\2\16\6\0"+ "\5\16\1\311\6\16\1\0\4\16\2\0\2\16\5\0"+ "\34\16\1\0\3\16\6\0\1\113\11\0\11\16\1\0"+ "\2\155\1\0\134\155\3\0\2\16\6\0\1\312\13\16"+ "\1\0\4\16\2\0\2\16\5\0\34\16\1\0\3\16"+ "\6\0\1\113\11\0\11\16\4\0\2\16\6\0\14\16"+ "\1\0\4\16\2\0\2\16\5\0\34\16\1\0\3\16"+ "\6\0\1\113\11\0\2\16\1\313\6\16\4\0\2\16"+ "\6\0\14\16\1\0\4\16\2\0\2\16\5\0\34\16"+ "\1\0\3\16\6\0\1\113\11\0\4\16\1\314\4\16"+ "\114\0\1\315\31\0\1\13\3\0\1\13\7\0\1\13"+ "\1\0\1\13\2\0\1\13\5\0\1\13\2\0\2\13"+ "\1\0\2\13\34\0\1\13\5\0\1\13\1\316\2\0"+ "\1\13\1\0\1\13\1\0\6\13\11\0\1\13\2\317"+ "\1\0\5\317\2\0\125\317\46\0\1\320\76\0\1\13"+ "\3\0\1\13\7\0\1\13\1\0\1\13\2\0\1\13"+ "\5\0\1\13\2\0\2\13\1\0\2\13\34\0\1\13"+ "\4\0\1\321\1\13\3\0\1\13\1\0\1\13\1\0"+ "\6\13\11\0\1\13\20\0\1\322\63\0\1\323\35\0"+ "\2\16\6\0\14\16\1\0\4\16\2\0\2\16\5\0"+ "\34\16\1\0\3\16\6\0\1\113\11\0\2\16\1\324"+ "\6\16\4\0\2\16\6\0\14\16\1\0\4\16\2\0"+ "\2\16\5\0\34\16\1\0\1\325\2\16\6\0\1\113"+ "\11\0\11\16\4\0\2\16\6\0\1\326\13\16\1\0"+ "\4\16\2\0\2\16\5\0\34\16\1\0\3\16\6\0"+ "\1\113\11\0\11\16\4\0\2\16\6\0\14\16\1\0"+ "\4\16\2\0\1\327\1\16\5\0\34\16\1\0\3\16"+ "\6\0\1\113\11\0\11\16\4\0\2\16\6\0\14\16"+ "\1\0\4\16\2\0\1\16\1\330\5\0\34\16\1\0"+ "\3\16\6\0\1\113\11\0\11\16\4\0\2\16\6\0"+ "\14\16\1\0\4\16\2\0\2\16\5\0\34\16\1\0"+ "\1\331\2\16\6\0\1\113\11\0\11\16\4\0\2\16"+ "\6\0\14\16\1\0\4\16\2\0\2\16\5\0\3\16"+ "\1\332\30\16\1\0\3\16\6\0\1\113\11\0\11\16"+ "\4\0\2\16\6\0\14\16\1\0\4\16\2\0\2\16"+ "\5\0\1\333\33\16\1\0\3\16\6\0\1\113\11\0"+ "\11\16\31\0\1\206\42\0\1\206\41\0\1\206\4\0"+ "\1\207\12\0\1\207\11\0\3\207\4\0\1\207\6\0"+ "\1\207\1\0\1\207\10\0\1\207\1\0\2\207\1\0"+ "\2\207\3\0\1\207\6\0\1\207\32\0\1\207\4\0"+ "\2\210\6\0\14\210\1\0\4\210\2\0\2\210\5\0"+ "\34\210\1\0\3\210\20\0\11\210\4\0\2\210\1\0"+ "\1\13\3\0\1\13\7\210\1\211\1\210\1\211\2\210"+ "\1\13\4\210\1\0\1\13\2\210\2\13\1\0\2\13"+ "\34\210\1\13\3\210\2\0\1\13\3\0\1\13\1\0"+ "\1\13\1\0\6\13\11\210\1\13\102\0\1\334\75\0"+ "\1\215\110\0\1\335\124\0\2\221\5\0\1\221\23\0"+ "\1\221\4\0\1\107\125\0\1\107\42\0\1\107\41\0"+ "\1\107\54\0\1\336\5\0\1\337\2\0\1\340\77\0"+ "\2\107\3\0\1\107\6\0\4\107\4\0\22\107\1\0"+ "\4\107\10\0\1\107\23\0\1\107\54\0\1\337\10\0"+ "\1\341\127\0\1\107\151\0\1\107\121\0\1\342\2\0"+ "\1\107\1\0\1\343\12\0\1\107\1\0\1\344\2\0"+ "\1\107\116\0\1\107\130\0\1\345\2\0\1\346\1\231"+ "\1\0\1\347\15\0\1\107\125\0\1\341\135\0\1\344"+ "\5\0\1\107\121\0\1\107\4\0\1\337\131\0\1\107"+ "\12\0\1\107\52\0\1\107\12\0\1\107\11\0\3\107"+ "\4\0\1\107\6\0\1\107\1\0\1\107\10\0\1\107"+ "\1\0\2\107\1\0\2\107\3\0\1\107\6\0\1\107"+ "\32\0\1\107\4\0\2\16\6\0\14\16\1\0\4\16"+ "\2\0\2\16\5\0\34\16\1\0\1\350\2\16\6\0"+ "\1\113\11\0\11\16\4\0\2\16\6\0\14\16\1\0"+ "\1\16\1\351\2\16\2\0\2\16\5\0\34\16\1\0"+ "\3\16\6\0\1\113\11\0\11\16\4\0\2\16\6\0"+ "\14\16\1\0\4\16\2\0\2\16\5\0\34\16\1\0"+ "\3\16\6\0\1\113\11\0\1\16\1\352\7\16\4\0"+ "\2\244\6\0\14\244\1\0\4\244\2\0\2\244\5\0"+ "\34\244\1\0\3\244\6\0\1\353\11\0\11\244\4\0"+ "\2\245\6\0\14\245\1\0\4\245\2\0\2\245\5\0"+ "\34\245\1\0\3\245\6\0\1\354\1\0\1\121\7\0"+ "\11\245\4\0\2\245\1\0\1\13\3\0\1\13\7\245"+ "\1\246\1\245\1\246\2\245\1\13\4\245\1\0\1\13"+ "\2\245\2\13\1\0\2\13\34\245\1\13\3\245\2\0"+ "\1\13\3\0\1\13\1\0\1\122\1\0\6\13\11\245"+ "\1\13\3\0\2\244\1\0\1\13\3\0\1\13\7\244"+ "\1\247\1\244\1\247\2\244\1\13\4\244\1\0\1\13"+ "\2\244\2\13\1\0\2\13\34\244\1\13\3\244\2\0"+ "\1\13\3\0\1\13\1\0\1\13\1\0\6\13\11\244"+ "\1\13\3\0\2\16\6\0\1\355\13\16\1\0\4\16"+ "\2\0\2\16\5\0\34\16\1\0\3\16\6\0\1\113"+ "\11\0\11\16\4\0\2\16\6\0\5\16\1\356\6\16"+ "\1\0\4\16\2\0\2\16\5\0\34\16\1\0\3\16"+ "\6\0\1\113\11\0\11\16\4\0\2\16\6\0\14\16"+ "\1\0\4\16\2\0\2\16\5\0\34\16\1\0\1\357"+ "\2\16\6\0\1\113\11\0\11\16\4\0\2\16\6\0"+ "\14\16\1\0\4\16\2\0\1\360\1\16\5\0\34\16"+ "\1\0\3\16\6\0\1\113\11\0\11\16\4\0\1\255"+ "\24\0\1\255\2\0\1\254\37\0\1\255\41\0\1\255"+ "\31\0\1\361\42\0\1\361\41\0\1\361\54\0\1\362"+ "\5\0\1\363\2\0\1\364\77\0\2\131\3\0\1\131"+ "\6\0\4\131\4\0\22\131\1\0\4\131\10\0\1\131"+ "\23\0\1\131\54\0\1\363\10\0\1\365\127\0\1\131"+ "\151\0\1\131\121\0\1\366\2\0\1\367\1\0\1\370"+ "\12\0\1\131\1\0\1\371\2\0\1\131\116\0\1\131"+ "\130\0\1\372\2\0\1\373\1\265\1\0\1\374\15\0"+ "\1\131\125\0\1\365\135\0\1\371\5\0\1\131\121\0"+ "\1\131\4\0\1\363\131\0\1\131\12\0\1\131\52\0"+ "\1\375\12\0\1\375\11\0\3\375\4\0\1\375\6\0"+ "\1\375\1\0\1\375\10\0\1\375\1\0\2\375\1\0"+ "\2\375\3\0\1\375\6\0\1\375\32\0\1\375\4\0"+ "\2\16\6\0\4\16\1\376\7\16\1\0\4\16\2\0"+ "\2\16\5\0\34\16\1\0\3\16\6\0\1\113\11\0"+ "\11\16\4\0\2\16\6\0\14\16\1\0\4\16\2\0"+ "\2\16\5\0\34\16\1\0\3\16\6\0\1\113\11\0"+ "\1\377\10\16\4\0\2\16\6\0\14\16\1\0\4\16"+ "\2\0\1\u0100\1\16\5\0\34\16\1\0\3\16\6\0"+ "\1\113\11\0\11\16\4\0\2\16\6\0\14\16\1\0"+ "\4\16\2\0\2\16\5\0\3\16\1\u0101\30\16\1\0"+ "\3\16\6\0\1\113\11\0\11\16\4\0\2\16\6\0"+ "\14\16\1\0\4\16\2\0\2\16\5\0\3\16\1\u0102"+ "\30\16\1\0\3\16\6\0\1\113\11\0\11\16\4\0"+ "\2\16\6\0\14\16\1\0\4\16\2\0\1\u0103\1\16"+ "\5\0\34\16\1\0\3\16\6\0\1\113\11\0\11\16"+ "\40\0\1\u0104\102\0\2\16\6\0\14\16\1\0\4\16"+ "\2\0\2\16\5\0\3\16\1\u0105\30\16\1\0\1\u0106"+ "\2\16\6\0\1\113\11\0\11\16\4\0\2\16\6\0"+ "\14\16\1\0\4\16\2\0\1\u0107\1\16\5\0\34\16"+ "\1\0\3\16\6\0\1\113\11\0\11\16\4\0\2\16"+ "\6\0\14\16\1\0\4\16\2\0\2\16\5\0\34\16"+ "\1\0\1\u0108\2\16\6\0\1\113\11\0\11\16\4\0"+ "\2\16\6\0\14\16\1\0\4\16\2\0\2\16\5\0"+ "\34\16\1\0\1\u0109\2\16\6\0\1\113\11\0\11\16"+ "\4\0\2\16\6\0\4\16\1\u010a\7\16\1\0\4\16"+ "\2\0\2\16\5\0\34\16\1\0\3\16\6\0\1\113"+ "\11\0\11\16\4\0\2\16\6\0\14\16\1\0\4\16"+ "\2\0\2\16\5\0\34\16\1\0\3\16\6\0\1\113"+ "\11\0\1\u010b\10\16\1\0\2\u010c\1\0\5\u010c\2\0"+ "\22\u010c\1\u010d\56\u010c\1\u010e\23\u010c\12\0\1\u010f\124\0"+ "\2\320\1\0\134\320\13\0\1\u0110\162\0\1\u0111\102\0"+ "\2\16\6\0\14\16\1\0\4\16\2\0\2\16\5\0"+ "\34\16\1\0\1\u0112\2\16\6\0\1\113\11\0\11\16"+ "\4\0\2\16\6\0\14\16\1\0\4\16\2\0\2\16"+ "\5\0\34\16\1\0\2\16\1\u0113\6\0\1\113\11\0"+ "\11\16\4\0\2\16\6\0\14\16\1\0\4\16\2\0"+ "\2\16\5\0\3\16\1\u0114\30\16\1\0\3\16\6\0"+ "\1\113\11\0\11\16\4\0\2\16\6\0\2\16\1\u0115"+ "\11\16\1\0\4\16\2\0\2\16\5\0\34\16\1\0"+ "\3\16\6\0\1\113\11\0\11\16\4\0\2\16\6\0"+ "\14\16\1\0\4\16\2\0\1\u0116\1\16\5\0\34\16"+ "\1\0\3\16\6\0\1\113\11\0\11\16\4\0\2\16"+ "\6\0\14\16\1\0\4\16\2\0\2\16\5\0\2\16"+ "\1\u0117\31\16\1\0\3\16\6\0\1\113\11\0\11\16"+ "\4\0\2\16\6\0\5\16\1\u0118\6\16\1\0\4\16"+ "\2\0\2\16\5\0\34\16\1\0\3\16\6\0\1\113"+ "\11\0\11\16\4\0\2\16\6\0\14\16\1\0\4\16"+ "\2\0\2\16\5\0\3\16\1\u0119\30\16\1\0\3\16"+ "\6\0\1\113\11\0\11\16\104\0\1\u011a\135\0\1\u011b"+ "\115\0\1\107\130\0\1\107\156\0\1\107\130\0\1\107"+ "\137\0\1\107\130\0\1\107\127\0\1\107\147\0\1\107"+ "\140\0\1\107\132\0\1\107\5\0\1\107\53\0\2\16"+ "\6\0\14\16\1\0\4\16\2\0\2\16\5\0\3\16"+ "\1\u011c\30\16\1\0\3\16\6\0\1\113\11\0\11\16"+ "\4\0\2\16\6\0\5\16\1\u011d\6\16\1\0\4\16"+ "\2\0\2\16\5\0\34\16\1\0\3\16\6\0\1\113"+ "\11\0\11\16\114\0\1\353\136\0\1\354\26\0\2\16"+ "\6\0\1\u011e\13\16\1\0\4\16\2\0\2\16\5\0"+ "\34\16\1\0\3\16\6\0\1\113\11\0\11\16\4\0"+ "\2\16\6\0\5\16\1\u011f\6\16\1\0\4\16\2\0"+ "\2\16\5\0\34\16\1\0\3\16\6\0\1\113\11\0"+ "\11\16\31\0\1\361\2\0\1\254\37\0\1\361\41\0"+ "\1\361\62\0\1\131\130\0\1\131\156\0\1\131\130\0"+ "\1\131\137\0\1\131\103\0\1\254\22\0\1\131\140\0"+ "\1\131\127\0\1\131\147\0\1\131\140\0\1\131\132\0"+ "\1\131\5\0\1\131\53\0\1\375\12\0\1\375\11\0"+ "\3\375\1\254\3\0\1\375\6\0\1\375\1\0\1\375"+ "\10\0\1\375\1\0\2\375\1\0\2\375\3\0\1\375"+ "\6\0\1\375\32\0\1\375\4\0\2\16\6\0\14\16"+ "\1\0\4\16\2\0\2\16\5\0\2\16\1\u0120\31\16"+ "\1\0\3\16\6\0\1\113\11\0\11\16\4\0\2\16"+ "\6\0\14\16\1\0\4\16\2\0\2\16\5\0\3\16"+ "\1\u0121\30\16\1\0\3\16\6\0\1\113\11\0\11\16"+ "\4\0\2\16\6\0\14\16\1\0\4\16\2\0\2\16"+ "\5\0\33\16\1\u0122\1\0\3\16\6\0\1\113\11\0"+ "\11\16\4\0\2\16\6\0\14\16\1\0\4\16\2\0"+ "\2\16\5\0\34\16\1\0\3\16\6\0\1\113\11\0"+ "\1\16\1\u0123\7\16\4\0\2\16\6\0\14\16\1\0"+ "\4\16\2\0\2\16\5\0\1\u0124\33\16\1\0\3\16"+ "\6\0\1\113\11\0\11\16\106\0\1\u0125\34\0\2\16"+ "\6\0\5\16\1\u0126\6\16\1\0\4\16\2\0\2\16"+ "\5\0\34\16\1\0\3\16\6\0\1\113\11\0\11\16"+ "\4\0\2\16\6\0\14\16\1\0\4\16\2\0\1\u0127"+ "\1\16\5\0\34\16\1\0\3\16\6\0\1\113\11\0"+ "\11\16\4\0\2\16\6\0\5\16\1\u0128\6\16\1\0"+ "\4\16\2\0\2\16\5\0\34\16\1\0\3\16\6\0"+ "\1\113\11\0\11\16\4\0\2\16\6\0\14\16\1\0"+ "\4\16\2\0\2\16\5\0\2\16\1\u0129\31\16\1\0"+ "\3\16\6\0\1\113\11\0\11\16\4\0\2\16\6\0"+ "\1\16\1\u012a\12\16\1\0\4\16\2\0\2\16\5\0"+ "\34\16\1\0\3\16\6\0\1\113\11\0\11\16\1\0"+ "\2\u010c\1\0\5\u010c\2\0\101\u010c\1\u010e\25\u010c\1\0"+ "\5\u010c\2\0\20\u010c\1\u012b\16\u010c\1\u012c\1\u012d\1\u010c"+ "\1\u012e\1\u012f\5\u010c\1\u0130\1\u0131\3\u010c\1\u0132\1\u0133"+ "\1\u0134\2\u010c\1\u0135\1\u0136\14\u010c\1\u010e\20\u010c\1\u0137"+ "\4\u010c\1\0\5\u010c\2\0\67\u010c\1\u0138\11\u010c\1\u010e"+ "\23\u010c\12\0\1\u0139\226\0\1\u013a\72\0\1\u013b\103\0"+ "\2\16\6\0\14\16\1\0\4\16\2\0\2\16\5\0"+ "\1\16\1\u013c\32\16\1\0\3\16\6\0\1\113\11\0"+ "\11\16\4\0\2\16\6\0\14\16\1\0\4\16\2\0"+ "\2\16\5\0\34\16\1\0\2\16\1\u013d\6\0\1\113"+ "\11\0\11\16\4\0\2\16\6\0\14\16\1\0\4\16"+ "\2\0\2\16\5\0\34\16\1\0\1\u013e\2\16\6\0"+ "\1\113\11\0\11\16\4\0\2\16\6\0\14\16\1\0"+ "\4\16\2\0\1\u013f\1\16\5\0\34\16\1\0\3\16"+ "\6\0\1\113\11\0\11\16\4\0\2\16\6\0\1\u0140"+ "\13\16\1\0\4\16\2\0\2\16\5\0\34\16\1\0"+ "\3\16\6\0\1\113\11\0\11\16\37\0\1\u0141\103\0"+ "\2\16\6\0\5\16\1\u0142\6\16\1\0\4\16\2\0"+ "\2\16\5\0\34\16\1\0\3\16\6\0\1\113\11\0"+ "\11\16\4\0\2\16\6\0\14\16\1\0\4\16\2\0"+ "\2\16\5\0\34\16\1\0\1\u0143\2\16\6\0\1\113"+ "\11\0\11\16\4\0\2\16\6\0\14\16\1\0\4\16"+ "\2\0\2\16\5\0\34\16\1\0\3\16\6\0\1\113"+ "\11\0\1\u0144\10\16\4\0\2\16\6\0\14\16\1\0"+ "\4\16\2\0\2\16\5\0\34\16\1\0\2\16\1\u0145"+ "\6\0\1\113\11\0\11\16\4\0\2\16\6\0\5\16"+ "\1\u0146\6\16\1\0\4\16\2\0\2\16\5\0\2\16"+ "\1\u0147\31\16\1\0\3\16\6\0\1\113\11\0\11\16"+ "\4\0\2\16\6\0\5\16\1\u0148\6\16\1\0\4\16"+ "\2\0\2\16\5\0\34\16\1\0\3\16\6\0\1\113"+ "\11\0\11\16\4\0\2\16\6\0\14\16\1\0\4\16"+ "\2\0\1\u0149\1\16\5\0\34\16\1\0\3\16\6\0"+ "\1\113\11\0\11\16\32\0\1\u014a\110\0\2\16\6\0"+ "\5\16\1\u014b\6\16\1\0\4\16\2\0\2\16\5\0"+ "\34\16\1\0\3\16\6\0\1\113\11\0\11\16\4\0"+ "\2\16\6\0\14\16\1\0\4\16\2\0\2\16\5\0"+ "\34\16\1\0\1\16\1\u014c\1\16\6\0\1\113\11\0"+ "\11\16\4\0\2\16\6\0\1\16\1\u014d\12\16\1\0"+ "\4\16\2\0\2\16\5\0\34\16\1\0\3\16\6\0"+ "\1\113\11\0\11\16\4\0\2\16\6\0\14\16\1\0"+ "\4\16\2\0\2\16\5\0\34\16\1\0\3\16\6\0"+ "\1\113\11\0\1\u014e\10\16\4\0\2\16\6\0\14\16"+ "\1\0\4\16\2\0\2\16\5\0\34\16\1\0\3\16"+ "\6\0\1\113\11\0\2\16\1\u014f\6\16\1\0\2\u010c"+ "\1\0\5\u010c\2\0\47\u010c\1\u0150\31\u010c\1\u010e\25\u010c"+ "\1\0\5\u010c\2\0\43\u010c\1\u0151\35\u010c\1\u010e\25\u010c"+ "\1\0\5\u010c\2\0\37\u010c\1\u0152\41\u010c\1\u010e\25\u010c"+ "\1\0\5\u010c\2\0\43\u010c\1\u0153\6\u010c\1\u0154\12\u010c"+ "\1\u0155\13\u010c\1\u010e\25\u010c\1\0\5\u010c\2\0\56\u010c"+ "\1\u0156\22\u010c\1\u010e\25\u010c\1\0\5\u010c\2\0\37\u010c"+ "\1\u0157\41\u010c\1\u010e\25\u010c\1\0\5\u010c\2\0\43\u010c"+ "\1\u0158\1\u010c\1\u0159\33\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\47\u010c\1\u015a\31\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\40\u010c\1\u015b\40\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\37\u010c\1\u015c\41\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\60\u010c\1\u015d\20\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\47\u010c\1\u015e\31\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\51\u010c\1\u015f\27\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\74\u010c\1\u0160\4\u010c\1\u010e\23\u010c\13\u0161\1\u0162"+ "\1\u0163\122\u0161\2\u013a\1\0\134\u013a\46\0\1\u0164\73\0"+ "\2\16\6\0\14\16\1\0\4\16\2\0\2\16\5\0"+ "\34\16\1\0\1\u0165\2\16\6\0\1\113\11\0\11\16"+ "\4\0\2\16\6\0\14\16\1\0\4\16\2\0\2\16"+ "\5\0\34\16\1\0\1\16\1\u0166\1\16\6\0\1\113"+ "\11\0\11\16\4\0\2\16\6\0\14\16\1\0\4\16"+ "\2\0\2\16\5\0\1\16\1\u0167\32\16\1\0\3\16"+ "\6\0\1\113\11\0\11\16\4\0\2\16\6\0\14\16"+ "\1\0\1\16\1\u0168\2\16\2\0\2\16\5\0\34\16"+ "\1\0\3\16\6\0\1\113\11\0\11\16\105\0\1\u0169"+ "\35\0\2\16\6\0\5\16\1\u016a\6\16\1\0\4\16"+ "\2\0\2\16\5\0\34\16\1\0\3\16\6\0\1\113"+ "\11\0\11\16\4\0\2\16\6\0\14\16\1\0\1\16"+ "\1\u016b\2\16\2\0\2\16\5\0\34\16\1\0\3\16"+ "\6\0\1\113\11\0\11\16\4\0\2\16\6\0\14\16"+ "\1\0\4\16\2\0\2\16\5\0\34\16\1\0\3\16"+ "\6\0\1\113\11\0\1\u016c\10\16\4\0\2\16\6\0"+ "\14\16\1\0\4\16\2\0\2\16\5\0\34\16\1\0"+ "\2\16\1\u016d\6\0\1\113\11\0\11\16\20\0\1\u016e"+ "\122\0\2\16\6\0\14\16\1\0\4\16\2\0\2\16"+ "\5\0\34\16\1\0\2\16\1\u016f\6\0\1\113\11\0"+ "\11\16\4\0\2\16\6\0\14\16\1\0\4\16\2\0"+ "\2\16\5\0\34\16\1\0\1\u0170\2\16\6\0\1\113"+ "\11\0\11\16\1\0\2\u010c\1\0\5\u010c\2\0\65\u010c"+ "\1\u0171\13\u010c\1\u010e\25\u010c\1\0\5\u010c\2\0\40\u010c"+ "\1\u0172\15\u010c\1\u0173\22\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\65\u010c\1\u0174\13\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\40\u010c\1\u0175\40\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\52\u010c\1\u0176\26\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\47\u010c\1\u0177\31\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\47\u010c\1\u0178\31\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\37\u010c\1\u0179\41\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\57\u010c\1\u017a\21\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\62\u010c\1\u017b\16\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\52\u010c\1\u017c\26\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\41\u010c\1\u017d\37\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\41\u010c\1\u017e\10\u010c\1\u017f\26\u010c\1\u010e\25\u010c"+ "\1\0\5\u010c\2\0\37\u010c\1\u0180\41\u010c\1\u010e\25\u010c"+ "\1\0\5\u010c\2\0\37\u010c\1\u0181\41\u010c\1\u010e\25\u010c"+ "\1\0\5\u010c\2\0\57\u010c\1\u0182\21\u010c\1\u010e\23\u010c"+ "\14\u0161\1\u0163\275\u0161\1\u0163\1\u0183\121\u0161\2\u0164\1\0"+ "\134\u0164\3\0\2\16\6\0\14\16\1\0\4\16\2\0"+ "\1\u0184\1\16\5\0\34\16\1\0\3\16\6\0\1\113"+ "\11\0\11\16\4\0\2\16\6\0\14\16\1\0\4\16"+ "\2\0\2\16\5\0\2\16\1\u0185\31\16\1\0\3\16"+ "\6\0\1\113\11\0\11\16\106\0\1\u0186\34\0\2\16"+ "\6\0\14\16\1\0\4\16\2\0\2\16\5\0\34\16"+ "\1\0\1\u0187\2\16\6\0\1\113\11\0\11\16\4\0"+ "\2\16\6\0\14\16\1\0\4\16\2\0\2\16\5\0"+ "\34\16\1\0\1\16\1\u0188\1\16\6\0\1\113\11\0"+ "\11\16\40\0\1\u0189\77\0\2\u010c\1\0\5\u010c\2\0"+ "\57\u010c\1\u018a\21\u010c\1\u010e\25\u010c\1\0\5\u010c\2\0"+ "\37\u010c\1\u018b\41\u010c\1\u010e\25\u010c\1\0\5\u010c\2\0"+ "\47\u010c\1\u018c\31\u010c\1\u010e\25\u010c\1\0\5\u010c\2\0"+ "\51\u010c\1\u018d\27\u010c\1\u010e\25\u010c\1\0\5\u010c\2\0"+ "\57\u010c\1\u018e\21\u010c\1\u010e\25\u010c\1\0\5\u010c\2\0"+ "\52\u010c\1\u018f\26\u010c\1\u010e\25\u010c\1\0\5\u010c\2\0"+ "\57\u010c\1\u0190\21\u010c\1\u010e\25\u010c\1\0\5\u010c\2\0"+ "\47\u010c\1\u0191\31\u010c\1\u010e\25\u010c\1\0\5\u010c\2\0"+ "\65\u010c\1\u0192\13\u010c\1\u010e\25\u010c\1\0\5\u010c\2\0"+ "\45\u010c\1\u0193\33\u010c\1\u010e\25\u010c\1\0\5\u010c\2\0"+ "\47\u010c\1\u0194\31\u010c\1\u010e\25\u010c\1\0\5\u010c\2\0"+ "\60\u010c\1\u0195\20\u010c\1\u010e\25\u010c\1\0\5\u010c\2\0"+ "\43\u010c\1\u0196\35\u010c\1\u010e\25\u010c\1\0\5\u010c\2\0"+ "\60\u010c\1\u0197\20\u010c\1\u010e\25\u010c\1\0\5\u010c\2\0"+ "\47\u010c\1\u0198\31\u010c\1\u010e\25\u010c\1\0\5\u010c\2\0"+ "\37\u010c\1\u0199\41\u010c\1\u010e\23\u010c\14\u0161\1\u0163\1\u0161"+ "\1\u019a\120\u0161\3\0\2\16\6\0\14\16\1\0\4\16"+ "\2\0\2\16\5\0\34\16\1\0\1\u019b\2\16\6\0"+ "\1\113\11\0\11\16\4\0\2\16\6\0\14\16\1\0"+ "\4\16\2\0\1\u019c\1\16\5\0\34\16\1\0\3\16"+ "\6\0\1\113\11\0\11\16\32\0\1\u019d\207\0\1\u019e"+ "\34\0\2\u010c\1\0\5\u010c\2\0\47\u010c\1\u019f\31\u010c"+ "\1\u010e\25\u010c\1\0\5\u010c\2\0\65\u010c\1\u01a0\13\u010c"+ "\1\u010e\25\u010c\1\0\5\u010c\2\0\52\u010c\1\u01a1\26\u010c"+ "\1\u010e\25\u010c\1\0\5\u010c\2\0\52\u010c\1\u01a2\26\u010c"+ "\1\u010e\25\u010c\1\0\5\u010c\2\0\52\u010c\1\u01a3\26\u010c"+ "\1\u010e\25\u010c\1\0\5\u010c\2\0\60\u010c\1\u01a4\20\u010c"+ "\1\u010e\25\u010c\1\0\5\u010c\2\0\41\u010c\1\u01a5\37\u010c"+ "\1\u010e\25\u010c\1\0\5\u010c\2\0\47\u010c\1\u01a6\31\u010c"+ "\1\u010e\25\u010c\1\0\5\u010c\2\0\43\u010c\1\u01a7\35\u010c"+ "\1\u010e\25\u010c\1\0\5\u010c\2\0\42\u010c\1\u01a8\36\u010c"+ "\1\u010e\25\u010c\1\0\5\u010c\2\0\37\u010c\1\u01a9\41\u010c"+ "\1\u010e\25\u010c\1\0\5\u010c\2\0\44\u010c\1\u01aa\34\u010c"+ "\1\u010e\25\u010c\1\0\5\u010c\2\0\63\u010c\1\u01ab\15\u010c"+ "\1\u010e\25\u010c\1\0\5\u010c\2\0\57\u010c\1\u01ac\21\u010c"+ "\1\u010e\25\u010c\1\0\5\u010c\2\0\60\u010c\1\u01ad\20\u010c"+ "\1\u010e\23\u010c\14\u0161\1\u0163\2\u0161\1\u01ae\117\u0161\3\0"+ "\2\16\6\0\14\16\1\0\4\16\2\0\1\16\1\u01af"+ "\5\0\34\16\1\0\3\16\6\0\1\113\11\0\11\16"+ "\4\0\2\16\6\0\14\16\1\0\4\16\2\0\2\16"+ "\5\0\34\16\1\0\3\16\6\0\1\113\11\0\2\16"+ "\1\u01b0\6\16\20\0\1\u01b1\225\0\1\u01b2\30\0\2\u010c"+ "\1\0\5\u010c\2\0\52\u010c\1\u01b3\26\u010c\1\u010e\25\u010c"+ "\1\0\5\u010c\2\0\51\u010c\1\u01b4\27\u010c\1\u010e\25\u010c"+ "\1\0\5\u010c\2\0\45\u010c\1\u01b5\33\u010c\1\u010e\25\u010c"+ "\1\0\5\u010c\2\0\53\u010c\1\u01b6\25\u010c\1\u010e\25\u010c"+ "\1\0\5\u010c\2\0\47\u010c\1\u01b7\31\u010c\1\u010e\25\u010c"+ "\1\0\5\u010c\2\0\51\u010c\1\u01b8\27\u010c\1\u010e\25\u010c"+ "\1\0\5\u010c\2\0\51\u010c\1\u01b9\27\u010c\1\u010e\25\u010c"+ "\1\0\5\u010c\2\0\57\u010c\1\u01ba\21\u010c\1\u010e\25\u010c"+ "\1\0\5\u010c\2\0\47\u010c\1\u01bb\31\u010c\1\u010e\25\u010c"+ "\1\0\5\u010c\2\0\47\u010c\1\u01bc\31\u010c\1\u010e\25\u010c"+ "\1\0\5\u010c\2\0\51\u010c\1\u01bd\27\u010c\1\u010e\25\u010c"+ "\1\0\5\u010c\2\0\51\u010c\1\u01be\27\u010c\1\u010e\25\u010c"+ "\1\0\5\u010c\2\0\37\u010c\1\u01bf\41\u010c\1\u010e\23\u010c"+ "\14\u0161\1\u0163\3\u0161\1\u01c0\116\u0161\3\0\2\16\6\0"+ "\14\16\1\0\4\16\2\0\2\16\5\0\34\16\1\0"+ "\3\16\6\0\1\113\11\0\1\u01c1\10\16\40\0\1\u01c2"+ "\77\0\2\u010c\1\0\5\u010c\2\0\51\u010c\1\u01c3\27\u010c"+ "\1\u010e\25\u010c\1\0\5\u010c\2\0\52\u010c\1\u01c4\26\u010c"+ "\1\u010e\25\u010c\1\0\5\u010c\2\0\43\u010c\1\u01c5\35\u010c"+ "\1\u010e\25\u010c\1\0\5\u010c\2\0\41\u010c\1\u01c6\37\u010c"+ "\1\u010e\25\u010c\1\0\5\u010c\2\0\65\u010c\1\u01c7\13\u010c"+ "\1\u010e\25\u010c\1\0\5\u010c\2\0\60\u010c\1\u01c8\20\u010c"+ "\1\u010e\25\u010c\1\0\5\u010c\2\0\57\u010c\1\u01c9\21\u010c"+ "\1\u010e\25\u010c\1\0\5\u010c\2\0\41\u010c\1\u01ca\37\u010c"+ "\1\u010e\25\u010c\1\0\5\u010c\2\0\45\u010c\1\u01cb\33\u010c"+ "\1\u010e\25\u010c\1\0\5\u010c\2\0\64\u010c\1\u01cc\14\u010c"+ "\1\u010e\23\u010c\102\0\1\u01cd\34\0\2\u010c\1\0\5\u010c"+ "\2\0\45\u010c\1\u01ce\33\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\53\u010c\1\u01cf\25\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\57\u010c\1\u01d0\21\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\60\u010c\1\u01d1\20\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\42\u010c\1\u01d2\22\u010c\1\u01d3\13\u010c\1\u010e\25\u010c"+ "\1\0\5\u010c\2\0\42\u010c\1\u01d4\36\u010c\1\u010e\25\u010c"+ "\1\0\5\u010c\2\0\47\u010c\1\u01d5\31\u010c\1\u010e\25\u010c"+ "\1\0\5\u010c\2\0\47\u010c\1\u01d6\31\u010c\1\u010e\23\u010c"+ "\106\0\1\u01d7\30\0\2\u010c\1\0\5\u010c\2\0\47\u010c"+ "\1\u01d8\31\u010c\1\u010e\25\u010c\1\0\5\u010c\2\0\60\u010c"+ "\1\u01d9\20\u010c\1\u010e\25\u010c\1\0\5\u010c\2\0\42\u010c"+ "\1\u01da\36\u010c\1\u010e\25\u010c\1\0\5\u010c\2\0\51\u010c"+ "\1\u01db\6\u010c\1\u01dc\20\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\47\u010c\1\u01dd\31\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\37\u010c\1\u01de\41\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\20\u010c\1\u01df\60\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\20\u010c\1\u01e0\60\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\42\u010c\1\u01e1\36\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\47\u010c\1\u01e2\31\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\54\u010c\1\u01e3\24\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\37\u010c\1\u01e4\41\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\14\u010c\1\u01e5\64\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\45\u010c\1\u01e6\33\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\47\u010c\1\u01e7\31\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\14\u010c\1\u01e8\64\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\41\u010c\1\u01e9\37\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\64\u010c\1\u01ea\14\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\42\u010c\1\u01eb\36\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\60\u010c\1\u01ec\20\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\47\u010c\1\u01ed\31\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\52\u010c\1\u01ee\26\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\37\u010c\1\u01ef\41\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\51\u010c\1\u01f0\27\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\41\u010c\1\u01f1\37\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\41\u010c\1\u01f2\37\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\60\u010c\1\u01f3\20\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\51\u010c\1\u01f4\27\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\37\u010c\1\u01f5\41\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\57\u010c\1\u01f6\21\u010c\1\u010e\25\u010c\1\0\5\u010c"+ "\2\0\47\u010c\1\u01f7\31\u010c\1\u010e\23\u010c"; private static int [] zzUnpackTrans() { int [] result = new int[38380]; int offset = 0; offset = zzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result); return result; } private static int zzUnpackTrans(String packed, int offset, int [] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int count = packed.charAt(i++); int value = packed.charAt(i++); value--; do result[j++] = value; while (--count > 0); } return j; } /* error codes */ private static final int ZZ_UNKNOWN_ERROR = 0; private static final int ZZ_NO_MATCH = 1; private static final int ZZ_PUSHBACK_2BIG = 2; private static final char[] EMPTY_BUFFER = new char[0]; private static final int YYEOF = -1; private static java.io.Reader zzReader = null; // Fake /* error messages for the codes above */ private static final String ZZ_ERROR_MSG[] = { "Unkown internal scanner error", "Error: could not match input", "Error: pushback value was too large" }; /** * ZZ_ATTRIBUTE[aState] contains the attributes of state <code>aState</code> */ private static final int [] ZZ_ATTRIBUTE = zzUnpackAttribute(); private static final String ZZ_ATTRIBUTE_PACKED_0 = "\2\0\1\1\2\0\1\11\1\1\1\11\1\1\1\11"+ "\34\1\1\11\1\1\1\11\1\1\1\11\1\1\1\11"+ "\1\1\1\11\16\1\1\11\4\1\1\11\3\1\1\0"+ "\1\11\1\0\7\1\1\0\3\1\1\11\3\1\1\0"+ "\1\11\1\0\1\1\1\11\6\1\2\0\1\11\1\0"+ "\13\1\2\11\1\0\1\11\1\0\1\1\1\0\1\11"+ "\17\1\2\11\1\0\1\1\1\0\2\11\17\0\14\1"+ "\1\11\17\0\1\11\6\1\1\0\1\11\1\1\1\11"+ "\6\1\1\0\1\11\1\0\1\1\1\11\2\0\10\1"+ "\14\0\11\1\15\0\6\1\1\0\7\1\6\0\10\1"+ "\1\0\1\11\11\1\1\0\5\1\17\0\1\1\1\0"+ "\5\1\1\0\10\1\1\0\5\1\20\0\1\1\3\0"+ "\5\1\1\0\4\1\1\0\2\1\5\0\1\1\2\0"+ "\1\1\12\0\2\1\1\0\2\1\10\0\1\1\11\0"+ "\2\1\11\0\1\1\1\0\1\1\6\0\2\1\1\0"+ "\1\11\3\0\2\1\3\0\1\1\4\0\2\1\10\0"+ "\1\1\1\0\1\1\2\0\1\1\2\0\1\1\4\0"+ "\1\11\5\0\1\1\1\0\2\1\1\0\1\1\3\0"+ "\2\1\2\0\1\1\2\0\1\1\10\0\2\1"; private static int [] zzUnpackAttribute() { int [] result = new int[503]; int offset = 0; offset = zzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result); return result; } private static int zzUnpackAttribute(String packed, int offset, int [] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int count = packed.charAt(i++); int value = packed.charAt(i++); do result[j++] = value; while (--count > 0); } return j; } /** the current state of the DFA */ private int zzState; /** the current lexical state */ private int zzLexicalState = YYINITIAL; /** this buffer contains the current text to be matched and is the source of the yytext() string */ private char[] zzBuffer = new char[0]; /** this buffer may contains the current text array to be matched when it is cheap to acquire it */ private char[] zzBufferArray; /** the textposition at the last accepting state */ private int zzMarkedPos; /** the textposition at the last state to be included in yytext */ private int zzPushbackPos; /** the current text position in the buffer */ private int zzCurrentPos; /** startRead marks the beginning of the yytext() string in the buffer */ private int zzStartRead; /** endRead marks the last character in the buffer, that has been read from input */ private int zzEndRead; /** * zzAtBOL == true <=> the scanner is currently at the beginning of a line */ private boolean zzAtBOL = true; /** zzAtEOF == true <=> the scanner is at the EOF */ private boolean zzAtEOF; /* user code: */ private int qouteStart; private int commentStart; private int commentDepth; /** * Creates a new scanner * There is also a java.io.InputStream version of this constructor. * * @param in the java.io.Reader to read input from. */ _HaskellLexer(java.io.Reader in) { this.zzReader = in; } /** * Creates a new scanner. * There is also java.io.Reader version of this constructor. * * @param in the java.io.Inputstream to read input from. */ _HaskellLexer(java.io.InputStream in) { this(new java.io.InputStreamReader (in, java.nio.charset.Charset.forName("UTF-8"))); } /** * Unpacks the compressed character translation table. * * @param packed the packed character translation table * @return the unpacked character translation table */ private static char [] zzUnpackCMap(String packed) { char [] map = new char[0x10000]; int i = 0; /* index in packed string */ int j = 0; /* index in unpacked array */ while (i < 3368) { int count = packed.charAt(i++); char value = packed.charAt(i++); do map[j++] = value; while (--count > 0); } return map; } public final int getTokenStart(){ return zzStartRead; } public final int getTokenEnd(){ return getTokenStart() + yylength(); } public void reset(CharSequence buffer, int start, int end,int initialState){ zzBuffer = buffer.toString().toCharArray(); zzBufferArray = com.intellij.util.text.CharArrayUtil.fromSequenceWithoutCopying(buffer); zzCurrentPos = zzMarkedPos = zzStartRead = start; zzPushbackPos = 0; zzAtEOF = false; zzAtBOL = true; zzEndRead = end; yybegin(initialState); } /** * Refills the input buffer. * * @return <code>false</code>, iff there was new input. * * @exception java.io.IOException if any I/O-Error occurs */ private boolean zzRefill() throws java.io.IOException { return true; } /** * Returns the current lexical state. */ public final int yystate() { return zzLexicalState; } /** * Enters a new lexical state * * @param newState the new lexical state */ public final void yybegin(int newState) { zzLexicalState = newState; } /** * Returns the text matched by the current regular expression. */ public final CharSequence yytext() { return new String(zzBuffer, zzStartRead, zzMarkedPos); } /** * Returns the character at position <tt>pos</tt> from the * matched text. * * It is equivalent to yytext().charAt(pos), but faster * * @param pos the position of the character to fetch. * A value from 0 to yylength()-1. * * @return the character at position pos */ public final char yycharat(int pos) { return zzBufferArray != null ? zzBufferArray[zzStartRead+pos]:zzBuffer[zzStartRead+pos]; } /** * Returns the length of the matched text region. */ public final int yylength() { return zzMarkedPos-zzStartRead; } /** * Reports an error that occured while scanning. * * In a wellformed scanner (no or only correct usage of * yypushback(int) and a match-all fallback rule) this method * will only be called with things that "Can't Possibly Happen". * If this method is called, something is seriously wrong * (e.g. a JFlex bug producing a faulty scanner etc.). * * Usual syntax/scanner level error handling should be done * in error fallback rules. * * @param errorCode the code of the errormessage to display */ private void zzScanError(int errorCode) { String message; try { message = ZZ_ERROR_MSG[errorCode]; } catch (ArrayIndexOutOfBoundsException e) { message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR]; } throw new Error(message); } /** * Pushes the specified amount of characters back into the input stream. * * They will be read again by then next call of the scanning method * * @param number the number of characters to be read again. * This number must not be greater than yylength()! */ public void yypushback(int number) { if ( number > yylength() ) zzScanError(ZZ_PUSHBACK_2BIG); zzMarkedPos -= number; } /** * Resumes scanning until the next regular expression is matched, * the end of input is encountered or an I/O-Error occurs. * * @return the next token * @exception java.io.IOException if any I/O-Error occurs */ public IElementType advance() throws java.io.IOException { int zzInput; int zzAction; // cached fields: int zzCurrentPosL; int zzMarkedPosL; int zzEndReadL = zzEndRead; char[] zzBufferL = zzBuffer; char[] zzBufferArrayL = zzBufferArray; char [] zzCMapL = ZZ_CMAP; int [] zzTransL = ZZ_TRANS; int [] zzRowMapL = ZZ_ROWMAP; int [] zzAttrL = ZZ_ATTRIBUTE; while (true) { zzMarkedPosL = zzMarkedPos; zzAction = -1; zzCurrentPosL = zzCurrentPos = zzStartRead = zzMarkedPosL; zzState = ZZ_LEXSTATE[zzLexicalState]; // set up zzAction for empty match case: int zzAttributes = zzAttrL[zzState]; if ( (zzAttributes & 1) == 1 ) { zzAction = zzState; } zzForAction: { while (true) { if (zzCurrentPosL < zzEndReadL) zzInput = zzBufferL[zzCurrentPosL++]; else if (zzAtEOF) { zzInput = YYEOF; break zzForAction; } else { // store back cached positions zzCurrentPos = zzCurrentPosL; zzMarkedPos = zzMarkedPosL; boolean eof = zzRefill(); // get translated positions and possibly new buffer zzCurrentPosL = zzCurrentPos; zzMarkedPosL = zzMarkedPos; zzBufferL = zzBuffer; zzEndReadL = zzEndRead; if (eof) { zzInput = YYEOF; break zzForAction; } else { zzInput = zzBufferL[zzCurrentPosL++]; } } int zzNext = zzTransL[ zzRowMapL[zzState] + zzCMapL[zzInput] ]; if (zzNext == -1) break zzForAction; zzState = zzNext; zzAttributes = zzAttrL[zzState]; if ( (zzAttributes & 1) == 1 ) { zzAction = zzState; zzMarkedPosL = zzCurrentPosL; if ( (zzAttributes & 8) == 8 ) break zzForAction; } } } // store back cached position zzMarkedPos = zzMarkedPosL; switch (zzAction < 0 ? zzAction : ZZ_ACTION[zzAction]) { case 1: { return HaskellLexerTokensKt.getBLOCK_COMMENT(); } case 128: break; case 2: { return TokenType.BAD_CHARACTER; } case 129: break; case 3: { return TokenType.WHITE_SPACE; } case 130: break; case 4: { return HaskellLexerTokensKt.getNEW_LINE(); } case 131: break; case 5: { return HaskellLexerTokens.INTEGER; } case 132: break; case 6: { return HaskellLexerTokens.BACKQUOTE; } case 133: break; case 7: { return HaskellLexerTokens.VARSYM; } case 134: break; case 8: { return HaskellLexerTokens.VARID; } case 135: break; case 9: { return HaskellLexerTokens.CONID; } case 136: break; case 10: { return HaskellLexerTokens.UNDERSCORE; } case 137: break; case 11: { return HaskellLexerTokens.COLON; } case 138: break; case 12: { return HaskellLexerTokens.SIMPLEQUOTE; } case 139: break; case 13: { return HaskellLexerTokens.STAR; } case 140: break; case 14: { yybegin(LAMBDA); return HaskellLexerTokens.LAM; } case 141: break; case 15: { return HaskellLexerTokens.OBRACK; } case 142: break; case 16: { return HaskellLexerTokens.AT; } case 143: break; case 17: { return HaskellLexerTokens.MINUS; } case 144: break; case 18: { return HaskellLexerTokens.OCURLY; } case 145: break; case 19: { return HaskellLexerTokens.CCURLY; } case 146: break; case 20: { return HaskellLexerTokens.VBAR; } case 147: break; case 21: { return HaskellLexerTokens.CBRACK; } case 148: break; case 22: { return HaskellLexerTokens.OPAREN; } case 149: break; case 23: { return HaskellLexerTokens.CPAREN; } case 150: break; case 24: { return HaskellLexerTokens.SEMI; } case 151: break; case 25: { return HaskellLexerTokens.DOT; } case 152: break; case 26: { return HaskellLexerTokens.COMMA; } case 153: break; case 27: { return HaskellLexerTokens.EQUAL; } case 154: break; case 28: { return HaskellLexerTokens.RARROW; } case 155: break; case 29: { return HaskellLexerTokens.TILDE; } case 156: break; case 30: { return HaskellLexerTokens.BANG; } case 157: break; case 31: { } case 158: break; case 32: { yypushback(1); yybegin(YYINITIAL); } case 159: break; case 33: { return HaskellLexerTokens.STRING; } case 160: break; case 34: { return HaskellLexerTokens.OF; } case 161: break; case 35: { return HaskellLexerTokens.CONSYM; } case 162: break; case 36: { return HaskellLexerTokens.DCOLON; } case 163: break; case 37: { return HaskellLexerTokens.CPABRACK; } case 164: break; case 38: { return HaskellLexerTokens.TYQUOTE; } case 165: break; case 39: { return HaskellLexerTokens.PARENESCAPE; } case 166: break; case 40: { return HaskellLexerTokens.IF; } case 167: break; case 41: { return HaskellLexerTokens.IN; } case 168: break; case 42: { return HaskellLexerTokens.DO; } case 169: break; case 43: { return HaskellLexerTokens.OPABRACK; } case 170: break; case 44: { return HaskellLexerTokens.OPENEXPQUOTE; } case 171: break; case 45: { return HaskellLexerTokens.AS; } case 172: break; case 46: { return HaskellLexerTokensKt.getEND_OF_LINE_COMMENT(); } case 173: break; case 47: { yybegin(BLOCK_COMMENT); commentDepth = 0; commentStart = getTokenStart(); } case 174: break; case 48: { return HaskellLexerTokens.CLOSEQUOTE; } case 175: break; case 49: { return HaskellLexerTokens.CPARENBAR; } case 176: break; case 50: { return HaskellLexerTokens.OUBXPAREN; } case 177: break; case 51: { return HaskellLexerTokens.CUBXPAREN; } case 178: break; case 52: { return HaskellLexerTokens.DOTDOT; } case 179: break; case 53: { return HaskellLexerTokens.DARROW; } case 180: break; case 54: { return HaskellLexerTokens.LARROW; } case 181: break; case 55: { return HaskellLexerTokens.DUPIPVARID; } case 182: break; case 56: { if (commentDepth > 0) { commentDepth--; } else { int state = yystate(); yybegin(YYINITIAL); zzStartRead = commentStart; return HaskellLexerTokensKt.getBLOCK_COMMENT(); } } case 183: break; case 57: { commentDepth++; } case 184: break; case 58: { yybegin(YYINITIAL); zzStartRead = qouteStart; return HaskellLexerTokens.QUASIQUOTE; } case 185: break; case 59: { return HaskellLexerTokens.LET; } case 186: break; case 60: { return HaskellLexerTokens.QVARID; } case 187: break; case 61: { return HaskellLexerTokens.QCONID; } case 188: break; case 62: { return HaskellLexerTokens.CHAR; } case 189: break; case 63: { return HaskellLexerTokens.PARENTYESCAPE; } case 190: break; case 64: { yybegin(QUASI_QUOTE); qouteStart = getTokenStart(); } case 191: break; case 65: { return HaskellLexerTokens.OPENTEXPQUOTE; } case 192: break; case 66: { return HaskellLexerTokens.CLOSETEXPQUOTE; } case 193: break; case 67: { return CPPTokens.IF; } case 194: break; case 68: { return HaskellLexerTokens.CLOSE_PRAG; } case 195: break; case 69: { return HaskellLexerTokens.SAFE; } case 196: break; case 70: { return HaskellLexerTokens.CASE; } case 197: break; case 71: { return HaskellLexerTokens.CAPICONV; } case 198: break; case 72: { return HaskellLexerTokens.DATA; } case 199: break; case 73: { return HaskellLexerTokens.ROLE; } case 200: break; case 74: { return HaskellLexerTokens.ELSE; } case 201: break; case 75: { return HaskellLexerTokens.TYPE; } case 202: break; case 76: { return HaskellLexerTokens.THEN; } case 203: break; case 77: { return HaskellLexerTokens.PRIMCALLCONV; } case 204: break; case 78: { yybegin(YYINITIAL); return HaskellLexerTokens.LCASE; } case 205: break; case 79: { return HaskellLexerTokens.CLASS; } case 206: break; case 80: { return HaskellLexerTokens.CCALLCONV; } case 207: break; case 81: { return HaskellLexerTokens.INFIX; } case 208: break; case 82: { return CPPTokens.ELSE; } case 209: break; case 83: { return HaskellLexerTokens.WHERE; } case 210: break; case 84: { return HaskellLexerTokens.MODULE; } case 211: break; case 85: { return HaskellLexerTokens.IMPORT; } case 212: break; case 86: { return HaskellLexerTokens.INFIXL; } case 213: break; case 87: { return HaskellLexerTokens.INFIXR; } case 214: break; case 88: { return HaskellLexerTokens.FORALL; } case 215: break; case 89: { return HaskellLexerTokens.FAMILY; } case 216: break; case 90: { return HaskellLexerTokens.EXPORT; } case 217: break; case 91: { return HaskellLexerTokensKt.getPRAGMA(); } case 218: break; case 92: { return CPPTokens.ENDIF; } case 219: break; case 93: { return HaskellLexerTokens.UNSAFE; } case 220: break; case 94: { return HaskellLexerTokens.HIDING; } case 221: break; case 95: { return HaskellLexerTokens.STDCALLCONV; } case 222: break; case 96: { return HaskellLexerTokens.DEFAULT; } case 223: break; case 97: { return HaskellLexerTokens.FOREIGN; } case 224: break; case 98: { return HaskellLexerTokens.NEWTYPE; } case 225: break; case 99: { return HaskellLexerTokens.SCC_PRAG; } case 226: break; case 100: { return HaskellLexerTokens.ANN_PRAG; } case 227: break; case 101: { return HaskellLexerTokens.INSTANCE; } case 228: break; case 102: { return HaskellLexerTokens.DERIVING; } case 229: break; case 103: { return HaskellLexerTokens.CORE_PRAG; } case 230: break; case 104: { return HaskellLexerTokens.CTYPE; } case 231: break; case 105: { return HaskellLexerTokens.RULES_PRAG; } case 232: break; case 106: { return HaskellLexerTokens.QUALIFIED; } case 233: break; case 107: { yybegin(TEX); return HaskellLexerTokensKt.getBLOCK_COMMENT(); } case 234: break; case 108: { return HaskellLexerTokens.UNPACK_PRAG; } case 235: break; case 109: { return HaskellLexerTokens.SOURCE_PRAG; } case 236: break; case 110: { return HaskellLexerTokens.INLINE_PRAG; } case 237: break; case 111: { yypushback(1); return HaskellLexerTokens.OPARENBAR; } case 238: break; case 112: { return HaskellLexerTokens.JAVASCRIPTCALLCONV; } case 239: break; case 113: { return HaskellLexerTokens.MINIMAL_PRAG; } case 240: break; case 114: { return HaskellLexerTokens.WARNING_PRAG; } case 241: break; case 115: { return HaskellLexerTokens.NOUNPACK_PRAG; } case 242: break; case 116: { return HaskellLexerTokens.OVERLAPS; } case 243: break; case 117: { yybegin(YYINITIAL); return HaskellLexerTokensKt.getBLOCK_COMMENT(); } case 244: break; case 118: { return HaskellLexerTokens.VECT_PRAG; } case 245: break; case 119: { return HaskellLexerTokens.GENERATED_PRAG; } case 246: break; case 120: { return HaskellLexerTokens.DEPRECATED_PRAG; } case 247: break; case 121: { return HaskellLexerTokens.SPEC_PRAG; } case 248: break; case 122: { return HaskellLexerTokens.INCOHERENT; } case 249: break; case 123: { return HaskellLexerTokens.NOVECT_PRAG; } case 250: break; case 124: { return HaskellLexerTokens.OVERLAPPING; } case 251: break; case 125: { return HaskellLexerTokens.OVERLAPPABLE; } case 252: break; case 126: { return HaskellLexerTokens.VECT_SCALAR_PRAG; } case 253: break; case 127: { return HaskellLexerTokens.SPEC_INLINE_PRAG; } case 254: break; default: if (zzInput == YYEOF && zzStartRead == zzCurrentPos) { zzAtEOF = true; switch (zzLexicalState) { case BLOCK_COMMENT: { int state = yystate(); yybegin(YYINITIAL); zzStartRead = commentStart; return HaskellLexerTokensKt.getBLOCK_COMMENT(); } case 504: break; case QUASI_QUOTE: { yybegin(YYINITIAL); zzStartRead = qouteStart; return HaskellLexerTokens.QUASIQUOTE; } case 505: break; default: return null; } } else { zzScanError(ZZ_NO_MATCH); } } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.routing.allocation; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.cluster.ESAllocationTestCase; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; import static org.elasticsearch.cluster.routing.allocation.RoutingNodesUtils.numberOfShardsOfType; import static org.hamcrest.Matchers.equalTo; /** */ public class ShardsLimitAllocationTests extends ESAllocationTestCase { private final Logger logger = Loggers.getLogger(ShardsLimitAllocationTests.class); public void testIndexLevelShardsLimitAllocate() { AllocationService strategy = createAllocationService(Settings.builder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); logger.info("Building initial routing table"); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 4) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), 2))) .build(); RoutingTable routingTable = RoutingTable.builder() .addAsNew(metaData.index("test")) .build(); ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(routingTable).build(); logger.info("Adding two nodes and performing rerouting"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().add(newNode("node1")).add(newNode("node2"))).build(); RoutingAllocation.Result routingResult = strategy.reroute(clusterState, "reroute"); clusterState = ClusterState.builder(clusterState).routingResult(routingResult).build(); assertThat(clusterState.getRoutingNodes().node("node1").numberOfShardsWithState(ShardRoutingState.INITIALIZING), equalTo(2)); assertThat(clusterState.getRoutingNodes().node("node2").numberOfShardsWithState(ShardRoutingState.INITIALIZING), equalTo(2)); logger.info("Start the primary shards"); RoutingNodes routingNodes = clusterState.getRoutingNodes(); routingResult = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)); clusterState = ClusterState.builder(clusterState).routingResult(routingResult).build(); assertThat(clusterState.getRoutingNodes().node("node1").numberOfShardsWithState(ShardRoutingState.STARTED), equalTo(2)); assertThat(clusterState.getRoutingNodes().node("node1").numberOfShardsWithState(ShardRoutingState.INITIALIZING), equalTo(0)); assertThat(clusterState.getRoutingNodes().node("node2").numberOfShardsWithState(ShardRoutingState.STARTED), equalTo(2)); assertThat(clusterState.getRoutingNodes().node("node2").numberOfShardsWithState(ShardRoutingState.INITIALIZING), equalTo(0)); assertThat(clusterState.getRoutingNodes().unassigned().size(), equalTo(4)); logger.info("Do another reroute, make sure its still not allocated"); routingNodes = clusterState.getRoutingNodes(); routingResult = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)); clusterState = ClusterState.builder(clusterState).routingResult(routingResult).build(); } public void testClusterLevelShardsLimitAllocate() { AllocationService strategy = createAllocationService(Settings.builder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), 1) .build()); logger.info("Building initial routing table"); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 4) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0))) .build(); RoutingTable routingTable = RoutingTable.builder() .addAsNew(metaData.index("test")) .build(); ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(routingTable).build(); logger.info("Adding two nodes and performing rerouting"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().add(newNode("node1")).add(newNode("node2"))).build(); RoutingAllocation.Result routingResult = strategy.reroute(clusterState, "reroute"); clusterState = ClusterState.builder(clusterState).routingResult(routingResult).build(); assertThat(clusterState.getRoutingNodes().node("node1").numberOfShardsWithState(ShardRoutingState.INITIALIZING), equalTo(1)); assertThat(clusterState.getRoutingNodes().node("node2").numberOfShardsWithState(ShardRoutingState.INITIALIZING), equalTo(1)); logger.info("Start the primary shards"); RoutingNodes routingNodes = clusterState.getRoutingNodes(); routingResult = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)); clusterState = ClusterState.builder(clusterState).routingResult(routingResult).build(); assertThat(clusterState.getRoutingNodes().node("node1").numberOfShardsWithState(ShardRoutingState.STARTED), equalTo(1)); assertThat(clusterState.getRoutingNodes().node("node2").numberOfShardsWithState(ShardRoutingState.STARTED), equalTo(1)); assertThat(clusterState.getRoutingNodes().unassigned().size(), equalTo(2)); // Bump the cluster total shards to 2 strategy = createAllocationService(Settings.builder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put(ShardsLimitAllocationDecider.CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), 2) .build()); logger.info("Do another reroute, make sure shards are now allocated"); routingResult = strategy.reroute(clusterState, "reroute"); clusterState = ClusterState.builder(clusterState).routingResult(routingResult).build(); assertThat(clusterState.getRoutingNodes().node("node1").numberOfShardsWithState(ShardRoutingState.INITIALIZING), equalTo(1)); assertThat(clusterState.getRoutingNodes().node("node2").numberOfShardsWithState(ShardRoutingState.INITIALIZING), equalTo(1)); routingNodes = clusterState.getRoutingNodes(); routingResult = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)); clusterState = ClusterState.builder(clusterState).routingResult(routingResult).build(); assertThat(clusterState.getRoutingNodes().node("node1").numberOfShardsWithState(ShardRoutingState.STARTED), equalTo(2)); assertThat(clusterState.getRoutingNodes().node("node2").numberOfShardsWithState(ShardRoutingState.STARTED), equalTo(2)); assertThat(clusterState.getRoutingNodes().unassigned().size(), equalTo(0)); } public void testIndexLevelShardsLimitRemain() { AllocationService strategy = createAllocationService(Settings.builder() .put("cluster.routing.allocation.node_concurrent_recoveries", 10) .put("cluster.routing.allocation.node_initial_primaries_recoveries", 10) .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) .put("cluster.routing.allocation.balance.index", 0.0f) .put("cluster.routing.allocation.balance.replica", 1.0f) .put("cluster.routing.allocation.balance.primary", 0.0f) .build()); logger.info("Building initial routing table"); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 5) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) )) .build(); RoutingTable initialRoutingTable = RoutingTable.builder() .addAsNew(metaData.index("test")) .build(); ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(initialRoutingTable).build(); logger.info("Adding one node and reroute"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().add(newNode("node1"))).build(); RoutingAllocation.Result routingResult = strategy.reroute(clusterState, "reroute"); clusterState = ClusterState.builder(clusterState).routingResult(routingResult).build(); logger.info("Start the primary shards"); RoutingNodes routingNodes = clusterState.getRoutingNodes(); routingResult = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)); clusterState = ClusterState.builder(clusterState).routingResult(routingResult).build(); assertThat(numberOfShardsOfType(clusterState.getRoutingNodes(), STARTED), equalTo(5)); logger.info("add another index with 5 shards"); metaData = MetaData.builder(clusterState.metaData()) .put(IndexMetaData.builder("test1").settings(settings(Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 5) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) )) .build(); RoutingTable updatedRoutingTable = RoutingTable.builder(clusterState.routingTable()) .addAsNew(metaData.index("test1")) .build(); clusterState = ClusterState.builder(clusterState).metaData(metaData).routingTable(updatedRoutingTable).build(); logger.info("Add another one node and reroute"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).add(newNode("node2"))).build(); routingResult = strategy.reroute(clusterState, "reroute"); clusterState = ClusterState.builder(clusterState).routingResult(routingResult).build(); routingNodes = clusterState.getRoutingNodes(); routingResult = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)); clusterState = ClusterState.builder(clusterState).routingResult(routingResult).build(); assertThat(numberOfShardsOfType(clusterState.getRoutingNodes(), STARTED), equalTo(10)); for (ShardRouting shardRouting : clusterState.getRoutingNodes().node("node1")) { assertThat(shardRouting.getIndexName(), equalTo("test")); } for (ShardRouting shardRouting : clusterState.getRoutingNodes().node("node2")) { assertThat(shardRouting.getIndexName(), equalTo("test1")); } logger.info("update {} for test, see that things move", ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey()); metaData = MetaData.builder(clusterState.metaData()) .put(IndexMetaData.builder(clusterState.metaData().index("test")).settings(settings(Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 5) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), 3) )) .build(); clusterState = ClusterState.builder(clusterState).metaData(metaData).build(); logger.info("reroute after setting"); routingResult = strategy.reroute(clusterState, "reroute"); clusterState = ClusterState.builder(clusterState).routingResult(routingResult).build(); assertThat(clusterState.getRoutingNodes().node("node1").numberOfShardsWithState(STARTED), equalTo(3)); assertThat(clusterState.getRoutingNodes().node("node1").numberOfShardsWithState(RELOCATING), equalTo(2)); assertThat(clusterState.getRoutingNodes().node("node2").numberOfShardsWithState(RELOCATING), equalTo(2)); assertThat(clusterState.getRoutingNodes().node("node2").numberOfShardsWithState(STARTED), equalTo(3)); // the first move will destroy the balance and the balancer will move 2 shards from node2 to node one right after // moving the nodes to node2 since we consider INITIALIZING nodes during rebalance routingNodes = clusterState.getRoutingNodes(); routingResult = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)); clusterState = ClusterState.builder(clusterState).routingResult(routingResult).build(); // now we are done compared to EvenShardCountAllocator since the Balancer is not soely based on the average assertThat(clusterState.getRoutingNodes().node("node1").numberOfShardsWithState(STARTED), equalTo(5)); assertThat(clusterState.getRoutingNodes().node("node2").numberOfShardsWithState(STARTED), equalTo(5)); } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2016.10.09 at 10:10:23 AM CST // package elong; import java.math.BigDecimal; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import java.util.List; import com.alibaba.fastjson.annotation.JSONField; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for ListRatePlan complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="ListRatePlan"> * &lt;complexContent> * &lt;extension base="{}BaseRatePlan"> * &lt;sequence> * &lt;element name="Status" type="{http://www.w3.org/2001/XMLSchema}boolean"/> * &lt;element name="CustomerType" type="{}EnumGuestTypeCode"/> * &lt;element name="CurrentAlloment" type="{http://www.w3.org/2001/XMLSchema}int"/> * &lt;element name="InstantConfirmation" type="{http://www.w3.org/2001/XMLSchema}boolean"/> * &lt;element name="ProductTypes" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="IsLastMinuteSale" type="{http://www.w3.org/2001/XMLSchema}boolean"/> * &lt;element name="StartTime" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="EndTime" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="TotalRate" type="{http://www.w3.org/2001/XMLSchema}decimal"/> * &lt;element name="AverageRate" type="{http://www.w3.org/2001/XMLSchema}decimal"/> * &lt;element name="AverageBaseRate" type="{http://www.w3.org/2001/XMLSchema}decimal"/> * &lt;element name="CurrencyCode" type="{}EnumCurrencyCode"/> * &lt;element name="Coupon" type="{http://www.w3.org/2001/XMLSchema}decimal"/> * &lt;element name="NightlyRates" type="{}ArrayOfNightlyRate" minOccurs="0"/> * &lt;element name="BookingRuleIds" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="GuaranteeRuleIds" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="PrepayRuleIds" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="DrrRuleIds" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="ValueAddIds" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="GiftIds" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="HAvailPolicyIds" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="RoomTypeId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="SuffixName" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="HotelCode" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="InvoiceMode" type="{}EnumInvoiceMode"/> * &lt;element name="BookingChannels" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;/sequence> * &lt;/extension> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "ListRatePlan", propOrder = { "status", "customerType", "currentAlloment", "instantConfirmation", "productTypes", "isLastMinuteSale", "startTime", "endTime", "totalRate", "averageRate", "averageBaseRate", "currencyCode", "coupon", "nightlyRates", "bookingRuleIds", "guaranteeRuleIds", "prepayRuleIds", "drrRuleIds", "valueAddIds", "giftIds", "hAvailPolicyIds", "roomTypeId", "suffixName", "hotelCode", "invoiceMode", "bookingChannels" }) public class ListRatePlan extends BaseRatePlan { @JSONField(name = "Status") protected boolean status; @JSONField(name = "CustomerType") @XmlSchemaType(name = "string") protected EnumGuestTypeCode customerType; @JSONField(name = "CurrentAlloment") protected int currentAlloment; @JSONField(name = "InstantConfirmation") protected boolean instantConfirmation; @JSONField(name = "ProductTypes") protected String productTypes; @JSONField(name = "IsLastMinuteSale") protected boolean isLastMinuteSale; @JSONField(name = "StartTime") protected String startTime; @JSONField(name = "EndTime") protected String endTime; @JSONField(name = "TotalRate") protected BigDecimal totalRate; @JSONField(name = "AverageRate") protected BigDecimal averageRate; @JSONField(name = "AverageBaseRate") protected BigDecimal averageBaseRate; @JSONField(name = "CurrencyCode") @XmlSchemaType(name = "string") protected EnumCurrencyCode currencyCode; @JSONField(name = "Coupon") protected BigDecimal coupon; @JSONField(name = "NightlyRates") protected List<NightlyRate> nightlyRates; @JSONField(name = "BookingRuleIds") protected String bookingRuleIds; @JSONField(name = "GuaranteeRuleIds") protected String guaranteeRuleIds; @JSONField(name = "PrepayRuleIds") protected String prepayRuleIds; @JSONField(name = "DrrRuleIds") protected String drrRuleIds; @JSONField(name = "ValueAddIds") protected String valueAddIds; @JSONField(name = "GiftIds") protected String giftIds; @JSONField(name = "HAvailPolicyIds") protected String hAvailPolicyIds; @JSONField(name = "RoomTypeId") protected String roomTypeId; @JSONField(name = "SuffixName") protected String suffixName; @JSONField(name = "HotelCode") protected String hotelCode; @JSONField(name = "InvoiceMode") @XmlSchemaType(name = "string") protected EnumInvoiceMode invoiceMode; @JSONField(name = "BookingChannels") protected String bookingChannels; /** * Gets the value of the status property. * */ public boolean isStatus() { return status; } /** * Sets the value of the status property. * */ public void setStatus(boolean value) { this.status = value; } /** * Gets the value of the customerType property. * * @return * possible object is * {@link EnumGuestTypeCode } * */ public EnumGuestTypeCode getCustomerType() { return customerType; } /** * Sets the value of the customerType property. * * @param value * allowed object is * {@link EnumGuestTypeCode } * */ public void setCustomerType(EnumGuestTypeCode value) { this.customerType = value; } /** * Gets the value of the currentAlloment property. * */ public int getCurrentAlloment() { return currentAlloment; } /** * Sets the value of the currentAlloment property. * */ public void setCurrentAlloment(int value) { this.currentAlloment = value; } /** * Gets the value of the instantConfirmation property. * */ public boolean isInstantConfirmation() { return instantConfirmation; } /** * Sets the value of the instantConfirmation property. * */ public void setInstantConfirmation(boolean value) { this.instantConfirmation = value; } /** * Gets the value of the productTypes property. * * @return * possible object is * {@link String } * */ public String getProductTypes() { return productTypes; } /** * Sets the value of the productTypes property. * * @param value * allowed object is * {@link String } * */ public void setProductTypes(String value) { this.productTypes = value; } /** * Gets the value of the isLastMinuteSale property. * */ public boolean isIsLastMinuteSale() { return isLastMinuteSale; } /** * Sets the value of the isLastMinuteSale property. * */ public void setIsLastMinuteSale(boolean value) { this.isLastMinuteSale = value; } /** * Gets the value of the startTime property. * * @return * possible object is * {@link String } * */ public String getStartTime() { return startTime; } /** * Sets the value of the startTime property. * * @param value * allowed object is * {@link String } * */ public void setStartTime(String value) { this.startTime = value; } /** * Gets the value of the endTime property. * * @return * possible object is * {@link String } * */ public String getEndTime() { return endTime; } /** * Sets the value of the endTime property. * * @param value * allowed object is * {@link String } * */ public void setEndTime(String value) { this.endTime = value; } /** * Gets the value of the totalRate property. * * @return * possible object is * {@link BigDecimal } * */ public BigDecimal getTotalRate() { return totalRate; } /** * Sets the value of the totalRate property. * * @param value * allowed object is * {@link BigDecimal } * */ public void setTotalRate(BigDecimal value) { this.totalRate = value; } /** * Gets the value of the averageRate property. * * @return * possible object is * {@link BigDecimal } * */ public BigDecimal getAverageRate() { return averageRate; } /** * Sets the value of the averageRate property. * * @param value * allowed object is * {@link BigDecimal } * */ public void setAverageRate(BigDecimal value) { this.averageRate = value; } /** * Gets the value of the averageBaseRate property. * * @return * possible object is * {@link BigDecimal } * */ public BigDecimal getAverageBaseRate() { return averageBaseRate; } /** * Sets the value of the averageBaseRate property. * * @param value * allowed object is * {@link BigDecimal } * */ public void setAverageBaseRate(BigDecimal value) { this.averageBaseRate = value; } /** * Gets the value of the currencyCode property. * * @return * possible object is * {@link EnumCurrencyCode } * */ public EnumCurrencyCode getCurrencyCode() { return currencyCode; } /** * Sets the value of the currencyCode property. * * @param value * allowed object is * {@link EnumCurrencyCode } * */ public void setCurrencyCode(EnumCurrencyCode value) { this.currencyCode = value; } /** * Gets the value of the coupon property. * * @return * possible object is * {@link BigDecimal } * */ public BigDecimal getCoupon() { return coupon; } /** * Sets the value of the coupon property. * * @param value * allowed object is * {@link BigDecimal } * */ public void setCoupon(BigDecimal value) { this.coupon = value; } /** * Gets the value of the nightlyRates property. * * @return * possible object is * {@link List<NightlyRate> } * */ public List<NightlyRate> getNightlyRates() { return nightlyRates; } /** * Sets the value of the nightlyRates property. * * @param value * allowed object is * {@link List<NightlyRate> } * */ public void setNightlyRates(List<NightlyRate> value) { this.nightlyRates = value; } /** * Gets the value of the bookingRuleIds property. * * @return * possible object is * {@link String } * */ public String getBookingRuleIds() { return bookingRuleIds; } /** * Sets the value of the bookingRuleIds property. * * @param value * allowed object is * {@link String } * */ public void setBookingRuleIds(String value) { this.bookingRuleIds = value; } /** * Gets the value of the guaranteeRuleIds property. * * @return * possible object is * {@link String } * */ public String getGuaranteeRuleIds() { return guaranteeRuleIds; } /** * Sets the value of the guaranteeRuleIds property. * * @param value * allowed object is * {@link String } * */ public void setGuaranteeRuleIds(String value) { this.guaranteeRuleIds = value; } /** * Gets the value of the prepayRuleIds property. * * @return * possible object is * {@link String } * */ public String getPrepayRuleIds() { return prepayRuleIds; } /** * Sets the value of the prepayRuleIds property. * * @param value * allowed object is * {@link String } * */ public void setPrepayRuleIds(String value) { this.prepayRuleIds = value; } /** * Gets the value of the drrRuleIds property. * * @return * possible object is * {@link String } * */ public String getDrrRuleIds() { return drrRuleIds; } /** * Sets the value of the drrRuleIds property. * * @param value * allowed object is * {@link String } * */ public void setDrrRuleIds(String value) { this.drrRuleIds = value; } /** * Gets the value of the valueAddIds property. * * @return * possible object is * {@link String } * */ public String getValueAddIds() { return valueAddIds; } /** * Sets the value of the valueAddIds property. * * @param value * allowed object is * {@link String } * */ public void setValueAddIds(String value) { this.valueAddIds = value; } /** * Gets the value of the giftIds property. * * @return * possible object is * {@link String } * */ public String getGiftIds() { return giftIds; } /** * Sets the value of the giftIds property. * * @param value * allowed object is * {@link String } * */ public void setGiftIds(String value) { this.giftIds = value; } /** * Gets the value of the hAvailPolicyIds property. * * @return * possible object is * {@link String } * */ public String getHAvailPolicyIds() { return hAvailPolicyIds; } /** * Sets the value of the hAvailPolicyIds property. * * @param value * allowed object is * {@link String } * */ public void setHAvailPolicyIds(String value) { this.hAvailPolicyIds = value; } /** * Gets the value of the roomTypeId property. * * @return * possible object is * {@link String } * */ public String getRoomTypeId() { return roomTypeId; } /** * Sets the value of the roomTypeId property. * * @param value * allowed object is * {@link String } * */ public void setRoomTypeId(String value) { this.roomTypeId = value; } /** * Gets the value of the suffixName property. * * @return * possible object is * {@link String } * */ public String getSuffixName() { return suffixName; } /** * Sets the value of the suffixName property. * * @param value * allowed object is * {@link String } * */ public void setSuffixName(String value) { this.suffixName = value; } /** * Gets the value of the hotelCode property. * * @return * possible object is * {@link String } * */ public String getHotelCode() { return hotelCode; } /** * Sets the value of the hotelCode property. * * @param value * allowed object is * {@link String } * */ public void setHotelCode(String value) { this.hotelCode = value; } /** * Gets the value of the invoiceMode property. * * @return * possible object is * {@link EnumInvoiceMode } * */ public EnumInvoiceMode getInvoiceMode() { return invoiceMode; } /** * Sets the value of the invoiceMode property. * * @param value * allowed object is * {@link EnumInvoiceMode } * */ public void setInvoiceMode(EnumInvoiceMode value) { this.invoiceMode = value; } /** * Gets the value of the bookingChannels property. * * @return * possible object is * {@link String } * */ public String getBookingChannels() { return bookingChannels; } /** * Sets the value of the bookingChannels property. * * @param value * allowed object is * {@link String } * */ public void setBookingChannels(String value) { this.bookingChannels = value; } }
package com.github.andlyticsproject.console.v2; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.net.Uri; import android.support.v4.app.NotificationCompat; import android.support.v4.app.NotificationCompat.Builder; import android.util.Log; import com.github.andlyticsproject.AndlyticsApp; import com.github.andlyticsproject.R; import com.github.andlyticsproject.console.AppAccessBlockedException; import com.github.andlyticsproject.console.AuthenticationException; import com.github.andlyticsproject.console.DevConsoleException; import com.github.andlyticsproject.model.DeveloperConsoleAccount; import com.github.andlyticsproject.util.FileUtils; import org.apache.commons.lang3.StringEscapeUtils; import org.apache.http.cookie.Cookie; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; public abstract class BaseAuthenticator implements DevConsoleAuthenticator { private static final String TAG = BaseAuthenticator.class.getSimpleName(); protected static final Pattern STARTUP_DATA_PATTERN = Pattern .compile("startupData = (\\{.+?\\});"); protected String accountName; protected BaseAuthenticator(String accountName) { this.accountName = accountName; } protected String findXsrfToken(JSONObject startupData) { try { return new JSONObject(startupData.getString("XsrfToken")).getString("1"); } catch (JSONException e) { throw new DevConsoleException(e); } } protected DeveloperConsoleAccount[] findDeveloperAccounts(JSONObject startupData) { List<DeveloperConsoleAccount> devAccounts = new ArrayList<DeveloperConsoleAccount>(); try { JSONObject devConsoleAccountsObj = new JSONObject( startupData.getString("DeveloperConsoleAccounts")); JSONArray devConsoleAccountsArr = devConsoleAccountsObj.getJSONArray("1"); for (int i = 0; i < devConsoleAccountsArr.length(); i++) { JSONObject accountObj = devConsoleAccountsArr.getJSONObject(i); String developerId = accountObj.getString("1"); String developerName = StringEscapeUtils.unescapeJava(accountObj.getString("2")); // Cannot access apps if e.g. a developer agreement needs to be accepted // XXX seems to be always false? Disable check for now // boolean canAccessApps = accountObj.getBoolean("3"); boolean canAccessApps = true; devAccounts.add(new DeveloperConsoleAccount(developerId, developerName, canAccessApps)); } return devAccounts.isEmpty() ? null : devAccounts .toArray(new DeveloperConsoleAccount[devAccounts.size()]); } catch (JSONException e) { throw new DevConsoleException(e); } } protected List<String> findWhitelistedFeatures(JSONObject startupData) { List<String> result = new ArrayList<String>(); try { JSONArray featuresArr = new JSONObject(startupData.getString("WhitelistedFeatures")) .getJSONArray("1"); for (int i = 0; i < featuresArr.length(); i++) { result.add(featuresArr.getString(i)); } return Collections.unmodifiableList(result); } catch (JSONException e) { throw new DevConsoleException(e); } } public JSONObject getStartupData(String responseStr) { try { Matcher m = STARTUP_DATA_PATTERN.matcher(responseStr); if (m.find()) { String startupDataStr = m.group(1); return new JSONObject(startupDataStr); } return null; } catch (JSONException e) { throw new DevConsoleException(e); } } protected String findPreferredCurrency(JSONObject startupData) { // fallback String result = "USD"; try { JSONObject userDetails = new JSONObject(startupData.getString("UserDetails")); if (userDetails.has("2")) { result = userDetails.getString("2"); } return result; } catch (JSONException e) { throw new DevConsoleException(e); } } public String getAccountName() { return accountName; } protected void debugAuthFailure(String responseStr, String webloginUrl) { FileUtils.writeToAndlyticsDir("console-response.html", responseStr); openAuthUrlInBrowser(webloginUrl); } protected void openAuthUrlInBrowser(String webloginUrl) { if (webloginUrl == null) { Log.d(TAG, "Null webloginUrl?"); return; } Log.d(TAG, "Opening login URL in browser: " + webloginUrl); Intent viewInBrowser = new Intent(Intent.ACTION_VIEW); viewInBrowser.setData(Uri.parse(webloginUrl)); // Always show the notification // When this occurs, it can often occur in batches, e.g. if a the user also clicks to view // comments which results in multiple dev consoles opening in their browser without an // explanation. This is even worse if they have multiple accounts and/or are currently // signed in via a different account Context ctx = AndlyticsApp.getInstance(); Builder builder = new NotificationCompat.Builder(ctx); builder.setSmallIcon(R.drawable.statusbar_andlytics); builder.setContentTitle(ctx.getResources().getString(R.string.auth_error, accountName)); builder.setContentText(ctx.getResources().getString(R.string.auth_error_open_browser, accountName)); builder.setAutoCancel(true); PendingIntent contentIntent = PendingIntent.getActivity(ctx, accountName.hashCode(), viewInBrowser, PendingIntent.FLAG_UPDATE_CURRENT); builder.setContentIntent(contentIntent); NotificationManager nm = (NotificationManager) ctx .getSystemService(Context.NOTIFICATION_SERVICE); nm.notify(accountName.hashCode(), builder.build()); } protected SessionCredentials createSessionCredentials(String accountName, String webloginUrl, String responseStr, List<Cookie> cookies) { JSONObject startupData = getStartupData(responseStr); if (startupData == null) { debugAuthFailure(responseStr, webloginUrl); throw new AuthenticationException("Couldn't find StartupData JSON object."); } DeveloperConsoleAccount[] developerAccounts = findDeveloperAccounts(startupData); if (developerAccounts == null) { debugAuthFailure(responseStr, webloginUrl); throw new AuthenticationException("Couldn't get developer account ID."); } boolean allowedToAccessAppsForSomeAccounts = false; for (DeveloperConsoleAccount account : developerAccounts) { if (account.getCanAccessApps()) { allowedToAccessAppsForSomeAccounts = true; } else { // TODO Report this to the user properly, but don't spam them because they may // never be able to resolve the problem e.g. the account owner needs to agree to new terms Log.w(TAG, "Not allowed to fetch app info for " + account.getDeveloperId() + ". " + "Log into the account via your browser to resolve the problem."); } } if (!allowedToAccessAppsForSomeAccounts) { throw new AppAccessBlockedException("Not allowed to fetch app info for any account."); } String xsrfToken = findXsrfToken(startupData); if (xsrfToken == null) { debugAuthFailure(responseStr, webloginUrl); throw new AuthenticationException("Couldn't get XSRF token."); } List<String> whitelistedFeatures = findWhitelistedFeatures(startupData); String preferredCurrency = findPreferredCurrency(startupData); SessionCredentials result = new SessionCredentials(accountName, xsrfToken, developerAccounts); result.addCookies(cookies); result.addWhitelistedFeatures(whitelistedFeatures); result.setPreferredCurrency(preferredCurrency); return result; } }
/* * Copyright (c) 2001, 2002, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * @test * @bug 4474255 * @summary Can no longer obtain a com.sun.net.ssl.HttpsURLConnection * @author Brad Wetmore */ import java.io.*; import java.net.*; import java.security.cert.*; import javax.net.ssl.*; /** * See if we can obtain a javax.net.ssl.HttpsURLConnection, * and then play with it a bit. */ public class JavaxHTTPSConnection { /* * ============================================================= * Set the various variables needed for the tests, then * specify what tests to run on each side. */ /* * Should we run the client or server in a separate thread? * Both sides can throw exceptions, but do you have a preference * as to which side should be the main thread. */ static boolean separateServerThread = true; /* * Where do we find the keystores? */ static String pathToStores = "../../../../../../etc"; static String keyStoreFile = "keystore"; static String trustStoreFile = "truststore"; static String passwd = "passphrase"; /* * Is the server ready to serve? */ volatile static boolean serverReady = false; /* * Turn on SSL debugging? */ static boolean debug = false; /* * If the client or server is doing some kind of object creation * that the other side depends on, and that thread prematurely * exits, you may experience a hang. The test harness will * terminate all hung threads after its timeout has expired, * currently 3 minutes by default, but you might try to be * smart about it.... */ /** * Returns the path to the file obtained from * parsing the HTML header. */ private static String getPath(DataInputStream in) throws IOException { String line = in.readLine(); String path = ""; // extract class from GET line if (line.startsWith("GET /")) { line = line.substring(5, line.length()-1).trim(); int index = line.indexOf(' '); if (index != -1) { path = line.substring(0, index); } } // eat the rest of header do { line = in.readLine(); } while ((line.length() != 0) && (line.charAt(0) != '\r') && (line.charAt(0) != '\n')); if (path.length() != 0) { return path; } else { throw new IOException("Malformed Header"); } } /** * Returns an array of bytes containing the bytes for * the file represented by the argument <b>path</b>. * * In our case, we just pretend to send something back. * * @return the bytes for the file * @exception FileNotFoundException if the file corresponding * to <b>path</b> could not be loaded. */ private byte[] getBytes(String path) throws IOException { return "Hello world, I am here".getBytes(); } /* * Define the server side of the test. * * If the server prematurely exits, serverReady will be set to true * to avoid infinite hangs. */ void doServerSide() throws Exception { SSLServerSocketFactory sslssf = (SSLServerSocketFactory) SSLServerSocketFactory.getDefault(); SSLServerSocket sslServerSocket = (SSLServerSocket) sslssf.createServerSocket(serverPort); serverPort = sslServerSocket.getLocalPort(); /* * Signal Client, we're ready for his connect. */ serverReady = true; SSLSocket sslSocket = (SSLSocket) sslServerSocket.accept(); DataOutputStream out = new DataOutputStream(sslSocket.getOutputStream()); try { // get path to class file from header DataInputStream in = new DataInputStream(sslSocket.getInputStream()); String path = getPath(in); // retrieve bytecodes byte[] bytecodes = getBytes(path); // send bytecodes in response (assumes HTTP/1.0 or later) try { out.writeBytes("HTTP/1.0 200 OK\r\n"); out.writeBytes("Content-Length: " + bytecodes.length + "\r\n"); out.writeBytes("Content-Type: text/html\r\n\r\n"); out.write(bytecodes); out.flush(); } catch (IOException ie) { ie.printStackTrace(); return; } } catch (Exception e) { e.printStackTrace(); // write out error response out.writeBytes("HTTP/1.0 400 " + e.getMessage() + "\r\n"); out.writeBytes("Content-Type: text/html\r\n\r\n"); out.flush(); } finally { // close the socket System.out.println("Server closing socket"); sslSocket.close(); serverReady = false; } } /* * Define the client side of the test. * * If the server prematurely exits, serverReady will be set to true * to avoid infinite hangs. */ void doClientSide() throws Exception { /* * Wait for server to get started. */ while (!serverReady) { Thread.sleep(50); } HttpsURLConnection.setDefaultHostnameVerifier(new NameVerifier()); URL url = new URL("https://" + "localhost:" + serverPort + "/etc/hosts"); URLConnection urlc = url.openConnection(); if (!(urlc instanceof javax.net.ssl.HttpsURLConnection)) { throw new Exception( "URLConnection ! instanceof javax.net.ssl.HttpsURLConnection"); } BufferedReader in = null; try { in = new BufferedReader(new InputStreamReader( urlc.getInputStream())); String inputLine; System.out.print("Client reading... "); while ((inputLine = in.readLine()) != null) System.out.println(inputLine); System.out.println("Cipher Suite: " + ((HttpsURLConnection)urlc).getCipherSuite()); Certificate[] certs = ((HttpsURLConnection)urlc).getServerCertificates(); for (int i = 0; i < certs.length; i++) { System.out.println(certs[0]); } in.close(); } catch (SSLException e) { if (in != null) in.close(); throw e; } System.out.println("Client reports: SUCCESS"); } static class NameVerifier implements HostnameVerifier { public boolean verify(String hostname, SSLSession session) { System.out.println( "HostnameVerifier: returning true"); return true; } } /* * ============================================================= * The remainder is just support stuff */ // use any free port by default volatile int serverPort = 0; volatile Exception serverException = null; volatile Exception clientException = null; public static void main(String[] args) throws Exception { String keyFilename = System.getProperty("test.src", "./") + "/" + pathToStores + "/" + keyStoreFile; String trustFilename = System.getProperty("test.src", "./") + "/" + pathToStores + "/" + trustStoreFile; System.setProperty("javax.net.ssl.keyStore", keyFilename); System.setProperty("javax.net.ssl.keyStorePassword", passwd); System.setProperty("javax.net.ssl.trustStore", trustFilename); System.setProperty("javax.net.ssl.trustStorePassword", passwd); if (debug) System.setProperty("javax.net.debug", "all"); /* * Start the tests. */ new JavaxHTTPSConnection(); } Thread clientThread = null; Thread serverThread = null; /* * Primary constructor, used to drive remainder of the test. * * Fork off the other side, then do your work. */ JavaxHTTPSConnection() throws Exception { if (separateServerThread) { startServer(true); startClient(false); } else { startClient(true); startServer(false); } /* * Wait for other side to close down. */ if (separateServerThread) { serverThread.join(); } else { clientThread.join(); } /* * When we get here, the test is pretty much over. * * If the main thread excepted, that propagates back * immediately. If the other thread threw an exception, we * should report back. */ if (serverException != null) { System.out.print("Server Exception:"); throw serverException; } if (clientException != null) { System.out.print("Client Exception:"); throw clientException; } } void startServer(boolean newThread) throws Exception { if (newThread) { serverThread = new Thread() { public void run() { try { doServerSide(); } catch (Exception e) { /* * Our server thread just died. * * Release the client, if not active already... */ System.err.println("Server died..."); serverReady = true; serverException = e; } } }; serverThread.start(); } else { doServerSide(); } } void startClient(boolean newThread) throws Exception { if (newThread) { clientThread = new Thread() { public void run() { try { doClientSide(); } catch (Exception e) { /* * Our client thread just died. */ System.err.println("Client died..."); clientException = e; } } }; clientThread.start(); } else { doClientSide(); } } }