gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.io; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ex.ApplicationInfoEx; import com.intellij.openapi.util.text.StringUtil; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFuture; import org.jboss.netty.channel.ChannelFutureListener; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.handler.codec.http.*; import org.jboss.netty.util.CharsetUtil; import org.jetbrains.annotations.Nullable; import java.nio.charset.Charset; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import java.util.Locale; import java.util.TimeZone; import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.*; import static org.jboss.netty.handler.codec.http.HttpHeaders.isKeepAlive; import static org.jboss.netty.handler.codec.http.HttpHeaders.setContentLength; import static org.jboss.netty.handler.codec.http.HttpResponseStatus.OK; import static org.jboss.netty.handler.codec.http.HttpVersion.HTTP_1_1; public final class Responses { static final ThreadLocal<DateFormat> DATE_FORMAT = new ThreadLocal<DateFormat>() { @Override protected DateFormat initialValue() { //noinspection SpellCheckingInspection SimpleDateFormat format = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss zzz", Locale.US); format.setTimeZone(TimeZone.getTimeZone("GMT")); return format; } }; private static String SERVER_HEADER_VALUE; public static void addAllowAnyOrigin(HttpResponse response) { response.setHeader(ACCESS_CONTROL_ALLOW_ORIGIN, "*"); } public static void addDate(HttpResponse response) { addDate(response, Calendar.getInstance().getTime()); } public static void addDate(HttpResponse response, Date date) { response.setHeader(DATE, DATE_FORMAT.get().format(date)); } @Nullable public static String getServerHeaderValue() { if (SERVER_HEADER_VALUE == null) { Application app = ApplicationManager.getApplication(); if (app != null && !app.isDisposed()) { SERVER_HEADER_VALUE = ApplicationInfoEx.getInstanceEx().getFullApplicationName(); } } return SERVER_HEADER_VALUE; } public static void addServer(HttpResponse response) { if (getServerHeaderValue() != null) { response.setHeader(SERVER, getServerHeaderValue()); } } public static void send(String contentType, CharSequence content, HttpRequest request, ChannelHandlerContext context) { HttpResponse response = create(contentType); response.setContent(ChannelBuffers.copiedBuffer(content, CharsetUtil.UTF_8)); send(response, request, context); } public static void send(HttpResponse response, @Nullable HttpRequest request, ChannelHandlerContext context) { send(response, context.getChannel(), request); } public static void send(HttpResponse response, Channel channel, @Nullable HttpRequest request) { ChannelBuffer content = response.getContent(); setContentLength(response, content == ChannelBuffers.EMPTY_BUFFER ? 0 : content.readableBytes()); boolean keepAlive = request != null && addKeepAliveIfNeed(response, request); addCommonHeaders(response); send(response, channel, !keepAlive); } public static boolean addKeepAliveIfNeed(HttpResponse response, HttpRequest request) { if (isKeepAlive(request)) { response.setHeader(CONNECTION, HttpHeaders.Values.KEEP_ALIVE); return true; } return false; } public static void addCommonHeaders(HttpResponse response) { addServer(response); addDate(response); addAllowAnyOrigin(response); } public static HttpResponse create(String contentType) { HttpResponse response = new DefaultHttpResponse(HTTP_1_1, OK); response.setHeader(CONTENT_TYPE, contentType); return response; } public static void send(CharSequence content, HttpRequest request, ChannelHandlerContext context) { send(content, CharsetUtil.US_ASCII, request, context); } public static void send(CharSequence content, Charset charset, HttpRequest request, ChannelHandlerContext context) { DefaultHttpResponse response = new DefaultHttpResponse(HTTP_1_1, OK); response.setContent(ChannelBuffers.copiedBuffer(content, charset)); send(response, request, context); } public static void send(byte[] bytes, HttpResponse response, HttpRequest request, ChannelHandlerContext context) { response.setContent(ChannelBuffers.wrappedBuffer(bytes)); send(response, request, context); } public static void send(HttpResponse response, ChannelHandlerContext context) { send(response, context.getChannel(), true); } public static void send(HttpResponseStatus status, ChannelHandlerContext context) { send(new DefaultHttpResponse(HTTP_1_1, status), context); } private static void send(HttpResponse response, Channel channel, boolean close) { if (!channel.isOpen()) { return; } ChannelFuture future = channel.write(response); if (close) { future.addListener(ChannelFutureListener.CLOSE); } } public static void sendStatus(HttpRequest request, ChannelHandlerContext context, HttpResponseStatus responseStatus) { sendStatus(request, context, responseStatus, null); } public static void sendStatus(HttpRequest request, ChannelHandlerContext context, HttpResponseStatus responseStatus, @Nullable String description) { sendStatus(new DefaultHttpResponse(HTTP_1_1, responseStatus), request, context.getChannel(), description); } public static void sendStatus(HttpResponse response, HttpRequest request, ChannelHandlerContext context) { sendStatus(response, request, context.getChannel(), null); } public static void sendStatus(HttpResponseStatus responseStatus, Channel channel) { sendStatus(new DefaultHttpResponse(HTTP_1_1, responseStatus), null, channel, null); } private static void sendStatus(HttpResponse response, @Nullable HttpRequest request, Channel channel, @Nullable String description) { response.setHeader(CONTENT_TYPE, "text/html"); if (request == null || request.getMethod() != HttpMethod.HEAD) { String message = response.getStatus().toString(); StringBuilder builder = new StringBuilder(); builder.append("<!doctype html><title>").append(message).append("</title>").append("<h1 style=\"text-align: center\">").append(message).append("</h1>"); if (description != null) { builder.append("<p>").append(description).append("</p>"); } builder.append("<hr/><p style=\"text-align: center\">").append(StringUtil.notNullize(getServerHeaderValue(), "")).append("</p>"); response.setContent(ChannelBuffers.copiedBuffer(builder, CharsetUtil.UTF_8)); } send(response, channel, request); } public static void sendOptionsResponse(String allowHeaders, HttpRequest request, ChannelHandlerContext context) { HttpResponse response = new DefaultHttpResponse(HTTP_1_1, OK); response.setHeader(ACCESS_CONTROL_ALLOW_ORIGIN, "*"); response.setHeader(ACCESS_CONTROL_ALLOW_METHODS, allowHeaders); response.setHeader(ALLOW, allowHeaders); send(response, request, context); } }
package com.fsck.k9.preferences; import java.io.File; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; import java.util.TreeMap; import android.content.Context; import android.os.Environment; import com.fsck.k9.Account; import com.fsck.k9.Account.SortType; import com.fsck.k9.DI; import com.fsck.k9.FontSizes; import com.fsck.k9.K9; import com.fsck.k9.K9.NotificationHideSubject; import com.fsck.k9.K9.NotificationQuickDelete; import com.fsck.k9.K9.SplitViewMode; import com.fsck.k9.K9.Theme; import com.fsck.k9.core.R; import com.fsck.k9.preferences.Settings.BooleanSetting; import com.fsck.k9.preferences.Settings.ColorSetting; import com.fsck.k9.preferences.Settings.EnumSetting; import com.fsck.k9.preferences.Settings.FontSizeSetting; import com.fsck.k9.preferences.Settings.IntegerRangeSetting; import com.fsck.k9.preferences.Settings.InvalidSettingValueException; import com.fsck.k9.preferences.Settings.PseudoEnumSetting; import com.fsck.k9.preferences.Settings.SettingsDescription; import com.fsck.k9.preferences.Settings.SettingsUpgrader; import com.fsck.k9.preferences.Settings.V; import com.fsck.k9.preferences.Settings.WebFontSizeSetting; import static com.fsck.k9.K9.LockScreenNotificationVisibility; public class GlobalSettings { static final Map<String, TreeMap<Integer, SettingsDescription>> SETTINGS; private static final Map<Integer, SettingsUpgrader> UPGRADERS; static { Map<String, TreeMap<Integer, SettingsDescription>> s = new LinkedHashMap<>(); /* * When adding new settings here, be sure to increment {@link Settings.VERSION} * and use that for whatever you add here. */ s.put("animations", Settings.versions( new V(1, new BooleanSetting(false)) )); s.put("attachmentdefaultpath", Settings.versions( new V(1, new DirectorySetting(Environment.getExternalStorageDirectory())), new V(41, new DirectorySetting(Environment.getExternalStoragePublicDirectory( Environment.DIRECTORY_DOWNLOADS))) )); s.put("backgroundOperations", Settings.versions( new V(1, new EnumSetting<>(K9.BACKGROUND_OPS.class, K9.BACKGROUND_OPS.WHEN_CHECKED_AUTO_SYNC)) )); s.put("changeRegisteredNameColor", Settings.versions( new V(1, new BooleanSetting(false)) )); s.put("confirmDelete", Settings.versions( new V(1, new BooleanSetting(false)) )); s.put("confirmDeleteStarred", Settings.versions( new V(2, new BooleanSetting(false)) )); s.put("confirmSpam", Settings.versions( new V(1, new BooleanSetting(false)) )); s.put("confirmMarkAllRead", Settings.versions( new V(44, new BooleanSetting(true)) )); s.put("countSearchMessages", Settings.versions( new V(1, new BooleanSetting(false)) )); s.put("enableDebugLogging", Settings.versions( new V(1, new BooleanSetting(false)) )); s.put("enableSensitiveLogging", Settings.versions( new V(1, new BooleanSetting(false)) )); s.put("fontSizeAccountDescription", Settings.versions( new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT)) )); s.put("fontSizeAccountName", Settings.versions( new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT)) )); s.put("fontSizeFolderName", Settings.versions( new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT)) )); s.put("fontSizeFolderStatus", Settings.versions( new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT)) )); s.put("fontSizeMessageComposeInput", Settings.versions( new V(5, new FontSizeSetting(FontSizes.FONT_DEFAULT)) )); s.put("fontSizeMessageListDate", Settings.versions( new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT)) )); s.put("fontSizeMessageListPreview", Settings.versions( new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT)) )); s.put("fontSizeMessageListSender", Settings.versions( new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT)) )); s.put("fontSizeMessageListSubject", Settings.versions( new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT)) )); s.put("fontSizeMessageViewAdditionalHeaders", Settings.versions( new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT)) )); s.put("fontSizeMessageViewCC", Settings.versions( new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT)) )); s.put("fontSizeMessageViewContent", Settings.versions( new V(1, new WebFontSizeSetting(3)), new V(31, null) )); s.put("fontSizeMessageViewDate", Settings.versions( new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT)) )); s.put("fontSizeMessageViewSender", Settings.versions( new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT)) )); s.put("fontSizeMessageViewSubject", Settings.versions( new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT)) )); s.put("fontSizeMessageViewTime", Settings.versions( new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT)) )); s.put("fontSizeMessageViewTo", Settings.versions( new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT)) )); s.put("gesturesEnabled", Settings.versions( new V(1, new BooleanSetting(true)), new V(4, new BooleanSetting(false)) )); s.put("hideSpecialAccounts", Settings.versions( new V(1, new BooleanSetting(false)) )); s.put("keyguardPrivacy", Settings.versions( new V(1, new BooleanSetting(false)), new V(12, null) )); s.put("language", Settings.versions( new V(1, new LanguageSetting()) )); s.put("measureAccounts", Settings.versions( new V(1, new BooleanSetting(true)) )); s.put("messageListCheckboxes", Settings.versions( new V(1, new BooleanSetting(false)) )); s.put("messageListPreviewLines", Settings.versions( new V(1, new IntegerRangeSetting(1, 100, 2)) )); s.put("messageListStars", Settings.versions( new V(1, new BooleanSetting(true)) )); s.put("messageViewFixedWidthFont", Settings.versions( new V(1, new BooleanSetting(false)) )); s.put("messageViewReturnToList", Settings.versions( new V(1, new BooleanSetting(false)) )); s.put("messageViewShowNext", Settings.versions( new V(1, new BooleanSetting(false)) )); s.put("quietTimeEnabled", Settings.versions( new V(1, new BooleanSetting(false)) )); s.put("quietTimeEnds", Settings.versions( new V(1, new TimeSetting("7:00")) )); s.put("quietTimeStarts", Settings.versions( new V(1, new TimeSetting("21:00")) )); s.put("registeredNameColor", Settings.versions( new V(1, new ColorSetting(0xFF00008F)) )); s.put("showContactName", Settings.versions( new V(1, new BooleanSetting(false)) )); s.put("showCorrespondentNames", Settings.versions( new V(1, new BooleanSetting(true)) )); s.put("sortTypeEnum", Settings.versions( new V(10, new EnumSetting<>(SortType.class, Account.DEFAULT_SORT_TYPE)) )); s.put("sortAscending", Settings.versions( new V(10, new BooleanSetting(Account.DEFAULT_SORT_ASCENDING)) )); s.put("startIntegratedInbox", Settings.versions( new V(1, new BooleanSetting(false)) )); s.put("theme", Settings.versions( new V(1, new ThemeSetting(K9.Theme.LIGHT)) )); s.put("messageViewTheme", Settings.versions( new V(16, new ThemeSetting(K9.Theme.LIGHT)), new V(24, new SubThemeSetting(K9.Theme.USE_GLOBAL)) )); s.put("useVolumeKeysForListNavigation", Settings.versions( new V(1, new BooleanSetting(false)) )); s.put("useVolumeKeysForNavigation", Settings.versions( new V(1, new BooleanSetting(false)) )); s.put("wrapFolderNames", Settings.versions( new V(22, new BooleanSetting(false)) )); s.put("notificationHideSubject", Settings.versions( new V(12, new EnumSetting<>(NotificationHideSubject.class, NotificationHideSubject.NEVER)) )); s.put("useBackgroundAsUnreadIndicator", Settings.versions( new V(19, new BooleanSetting(true)) )); s.put("threadedView", Settings.versions( new V(20, new BooleanSetting(true)) )); s.put("splitViewMode", Settings.versions( new V(23, new EnumSetting<>(SplitViewMode.class, SplitViewMode.NEVER)) )); s.put("messageComposeTheme", Settings.versions( new V(24, new SubThemeSetting(K9.Theme.USE_GLOBAL)) )); s.put("fixedMessageViewTheme", Settings.versions( new V(24, new BooleanSetting(true)) )); s.put("showContactPicture", Settings.versions( new V(25, new BooleanSetting(true)) )); s.put("autofitWidth", Settings.versions( new V(28, new BooleanSetting(true)) )); s.put("colorizeMissingContactPictures", Settings.versions( new V(29, new BooleanSetting(true)) )); s.put("messageViewDeleteActionVisible", Settings.versions( new V(30, new BooleanSetting(true)) )); s.put("messageViewArchiveActionVisible", Settings.versions( new V(30, new BooleanSetting(false)) )); s.put("messageViewMoveActionVisible", Settings.versions( new V(30, new BooleanSetting(false)) )); s.put("messageViewCopyActionVisible", Settings.versions( new V(30, new BooleanSetting(false)) )); s.put("messageViewSpamActionVisible", Settings.versions( new V(30, new BooleanSetting(false)) )); s.put("fontSizeMessageViewContentPercent", Settings.versions( new V(31, new IntegerRangeSetting(40, 250, 100)) )); s.put("hideUserAgent", Settings.versions( new V(32, new BooleanSetting(false)) )); s.put("hideTimeZone", Settings.versions( new V(32, new BooleanSetting(false)) )); s.put("lockScreenNotificationVisibility", Settings.versions( new V(37, new EnumSetting<>(LockScreenNotificationVisibility.class, LockScreenNotificationVisibility.MESSAGE_COUNT)) )); s.put("confirmDeleteFromNotification", Settings.versions( new V(38, new BooleanSetting(true)) )); s.put("messageListSenderAboveSubject", Settings.versions( new V(38, new BooleanSetting(false)) )); s.put("notificationQuickDelete", Settings.versions( new V(38, new EnumSetting<>(NotificationQuickDelete.class, NotificationQuickDelete.NEVER)) )); s.put("notificationDuringQuietTimeEnabled", Settings.versions( new V(39, new BooleanSetting(true)) )); s.put("confirmDiscardMessage", Settings.versions( new V(40, new BooleanSetting(true)) )); s.put("pgpInlineDialogCounter", Settings.versions( new V(43, new IntegerRangeSetting(0, Integer.MAX_VALUE, 0)) )); s.put("pgpSignOnlyDialogCounter", Settings.versions( new V(45, new IntegerRangeSetting(0, Integer.MAX_VALUE, 0)) )); s.put("fontSizeMessageViewBCC", Settings.versions( new V(48, new FontSizeSetting(FontSizes.FONT_DEFAULT)) )); s.put("hideHostnameWhenConnecting", Settings.versions( new V(49, new BooleanSetting(false)) )); SETTINGS = Collections.unmodifiableMap(s); Map<Integer, SettingsUpgrader> u = new HashMap<>(); u.put(12, new SettingsUpgraderV12()); u.put(24, new SettingsUpgraderV24()); u.put(31, new SettingsUpgraderV31()); UPGRADERS = Collections.unmodifiableMap(u); } static Map<String, Object> validate(int version, Map<String, String> importedSettings) { return Settings.validate(version, SETTINGS, importedSettings, false); } public static Set<String> upgrade(int version, Map<String, Object> validatedSettings) { return Settings.upgrade(version, UPGRADERS, SETTINGS, validatedSettings); } public static Map<String, String> convert(Map<String, Object> settings) { return Settings.convert(settings, SETTINGS); } static Map<String, String> getGlobalSettings(Storage storage) { Map<String, String> result = new HashMap<>(); for (String key : SETTINGS.keySet()) { String value = storage.getString(key, null); if (value != null) { result.put(key, value); } } return result; } /** * Upgrades the settings from version 11 to 12 * * Map the 'keyguardPrivacy' value to the new NotificationHideSubject enum. */ private static class SettingsUpgraderV12 implements SettingsUpgrader { @Override public Set<String> upgrade(Map<String, Object> settings) { Boolean keyguardPrivacy = (Boolean) settings.get("keyguardPrivacy"); if (keyguardPrivacy != null && keyguardPrivacy) { // current setting: only show subject when unlocked settings.put("notificationHideSubject", NotificationHideSubject.WHEN_LOCKED); } else { // always show subject [old default] settings.put("notificationHideSubject", NotificationHideSubject.NEVER); } return new HashSet<>(Collections.singletonList("keyguardPrivacy")); } } /** * Upgrades the settings from version 23 to 24. * * <p> * Set <em>messageViewTheme</em> to {@link K9.Theme#USE_GLOBAL} if <em>messageViewTheme</em> has * the same value as <em>theme</em>. * </p> */ private static class SettingsUpgraderV24 implements SettingsUpgrader { @Override public Set<String> upgrade(Map<String, Object> settings) { K9.Theme messageViewTheme = (K9.Theme) settings.get("messageViewTheme"); K9.Theme theme = (K9.Theme) settings.get("theme"); if (theme != null && messageViewTheme != null && theme == messageViewTheme) { settings.put("messageViewTheme", K9.Theme.USE_GLOBAL); } return null; } } /** * Upgrades the settings from version 30 to 31. * * <p> * Convert value from <em>fontSizeMessageViewContent</em> to * <em>fontSizeMessageViewContentPercent</em>. * </p> */ public static class SettingsUpgraderV31 implements SettingsUpgrader { @Override public Set<String> upgrade(Map<String, Object> settings) { int oldSize = (Integer) settings.get("fontSizeMessageViewContent"); int newSize = convertFromOldSize(oldSize); settings.put("fontSizeMessageViewContentPercent", newSize); return new HashSet<>(Collections.singletonList("fontSizeMessageViewContent")); } public static int convertFromOldSize(int oldSize) { switch (oldSize) { case 1: { return 40; } case 2: { return 75; } case 4: { return 175; } case 5: { return 250; } case 3: default: { return 100; } } } } private static class LanguageSetting extends PseudoEnumSetting<String> { private final Context context = DI.get(Context.class); private final Map<String, String> mapping; LanguageSetting() { super(""); Map<String, String> mapping = new HashMap<>(); String[] values = context.getResources().getStringArray(R.array.language_values); for (String value : values) { if (value.length() == 0) { mapping.put("", "default"); } else { mapping.put(value, value); } } this.mapping = Collections.unmodifiableMap(mapping); } @Override protected Map<String, String> getMapping() { return mapping; } @Override public String fromString(String value) throws InvalidSettingValueException { if (mapping.containsKey(value)) { return value; } throw new InvalidSettingValueException(); } } static class ThemeSetting extends SettingsDescription<K9.Theme> { private static final String THEME_LIGHT = "light"; private static final String THEME_DARK = "dark"; ThemeSetting(K9.Theme defaultValue) { super(defaultValue); } @Override public K9.Theme fromString(String value) throws InvalidSettingValueException { try { Integer theme = Integer.parseInt(value); if (theme == K9.Theme.LIGHT.ordinal() || // We used to store the resource ID of the theme in the preference storage, // but don't use the database upgrade mechanism to update the values. So // we have to deal with the old format here. theme == android.R.style.Theme_Light) { return K9.Theme.LIGHT; } else if (theme == K9.Theme.DARK.ordinal() || theme == android.R.style.Theme) { return K9.Theme.DARK; } } catch (NumberFormatException e) { /* do nothing */ } throw new InvalidSettingValueException(); } @Override public K9.Theme fromPrettyString(String value) throws InvalidSettingValueException { if (THEME_LIGHT.equals(value)) { return K9.Theme.LIGHT; } else if (THEME_DARK.equals(value)) { return K9.Theme.DARK; } throw new InvalidSettingValueException(); } @Override public String toPrettyString(K9.Theme value) { switch (value) { case DARK: { return THEME_DARK; } default: { return THEME_LIGHT; } } } @Override public String toString(K9.Theme value) { return Integer.toString(value.ordinal()); } } private static class SubThemeSetting extends ThemeSetting { private static final String THEME_USE_GLOBAL = "use_global"; SubThemeSetting(Theme defaultValue) { super(defaultValue); } @Override public K9.Theme fromString(String value) throws InvalidSettingValueException { try { Integer theme = Integer.parseInt(value); if (theme == K9.Theme.USE_GLOBAL.ordinal()) { return K9.Theme.USE_GLOBAL; } return super.fromString(value); } catch (NumberFormatException e) { throw new InvalidSettingValueException(); } } @Override public K9.Theme fromPrettyString(String value) throws InvalidSettingValueException { if (THEME_USE_GLOBAL.equals(value)) { return K9.Theme.USE_GLOBAL; } return super.fromPrettyString(value); } @Override public String toPrettyString(K9.Theme value) { if (value == K9.Theme.USE_GLOBAL) { return THEME_USE_GLOBAL; } return super.toPrettyString(value); } } private static class TimeSetting extends SettingsDescription<String> { private static final String VALIDATION_EXPRESSION = "[0-2]*[0-9]:[0-5]*[0-9]"; TimeSetting(String defaultValue) { super(defaultValue); } @Override public String fromString(String value) throws InvalidSettingValueException { if (!value.matches(VALIDATION_EXPRESSION)) { throw new InvalidSettingValueException(); } return value; } } private static class DirectorySetting extends SettingsDescription<String> { DirectorySetting(File defaultPath) { super(defaultPath.toString()); } @Override public String fromString(String value) throws InvalidSettingValueException { try { if (new File(value).isDirectory()) { return value; } } catch (Exception e) { /* do nothing */ } throw new InvalidSettingValueException(); } } }
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.bazel.rules.android; import com.android.repository.Revision; import com.google.common.base.Function; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.common.collect.Maps.EntryTransformer; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.RuleDefinition; import com.google.devtools.build.lib.packages.Rule; import com.google.devtools.build.lib.rules.repository.RepositoryDirectoryValue; import com.google.devtools.build.lib.rules.repository.RepositoryFunction; import com.google.devtools.build.lib.rules.repository.WorkspaceAttributeMapper; import com.google.devtools.build.lib.skyframe.DirectoryListingValue; import com.google.devtools.build.lib.skyframe.Dirents; import com.google.devtools.build.lib.skyframe.FileSymlinkException; import com.google.devtools.build.lib.skyframe.FileValue; import com.google.devtools.build.lib.skyframe.InconsistentFilesystemException; import com.google.devtools.build.lib.syntax.EvalException; import com.google.devtools.build.lib.syntax.Type; import com.google.devtools.build.lib.util.ResourceFileLoader; import com.google.devtools.build.lib.vfs.Dirent; import com.google.devtools.build.lib.vfs.FileSystem; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.skyframe.SkyFunction.Environment; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionException.Transience; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import com.google.devtools.build.skyframe.ValueOrException; import java.io.IOException; import java.util.Map; import java.util.Properties; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.annotation.Nullable; /** * Implementation of the {@code android_sdk_repository} rule. */ public class AndroidSdkRepositoryFunction extends RepositoryFunction { private static final PathFragment BUILD_TOOLS_DIR = new PathFragment("build-tools"); private static final PathFragment PLATFORMS_DIR = new PathFragment("platforms"); private static final PathFragment SYSTEM_IMAGES_DIR = new PathFragment("system-images"); private static final Pattern PLATFORMS_API_LEVEL_PATTERN = Pattern.compile("android-(\\d+)"); private static final Revision MIN_BUILD_TOOLS_REVISION = new Revision(24, 0, 3); private static final String PATH_ENV_VAR = "ANDROID_HOME"; @Override public boolean isLocal(Rule rule) { return true; } @Override public RepositoryDirectoryValue.Builder fetch(Rule rule, Path outputDirectory, BlazeDirectories directories, Environment env, Map<String, String> markerData) throws SkyFunctionException, InterruptedException { prepareLocalRepositorySymlinkTree(rule, outputDirectory); WorkspaceAttributeMapper attributes = WorkspaceAttributeMapper.of(rule); FileSystem fs = directories.getOutputBase().getFileSystem(); Path androidSdkPath; if (attributes.isAttributeValueExplicitlySpecified("path")) { androidSdkPath = fs.getPath(getTargetPath(rule, directories.getWorkspace())); } else if (clientEnvironment.containsKey(PATH_ENV_VAR)){ androidSdkPath = fs.getPath(getAndroidHomeEnvironmentVar(directories.getWorkspace(), clientEnvironment)); } else { throw new RepositoryFunctionException( new EvalException( rule.getLocation(), "Either the path attribute of android_sdk_repository or the ANDROID_HOME environment " + " variable must be set."), Transience.PERSISTENT); } if (!symlinkLocalRepositoryContents(outputDirectory, androidSdkPath)) { return null; } DirectoryListingValue platformsDirectoryValue = getDirectoryListing(androidSdkPath, PLATFORMS_DIR, env); if (platformsDirectoryValue == null) { return null; } ImmutableSortedSet<Integer> apiLevels = getApiLevels(platformsDirectoryValue.getDirents()); if (apiLevels.isEmpty()) { throw new RepositoryFunctionException( new EvalException( rule.getLocation(), "android_sdk_repository requires that at least one Android SDK Platform is installed " + "in the Android SDK. Please install an Android SDK Platform through the " + "Android SDK manager."), Transience.PERSISTENT); } String defaultApiLevel; if (attributes.isAttributeValueExplicitlySpecified("api_level")) { try { defaultApiLevel = attributes.get("api_level", Type.INTEGER).toString(); } catch (EvalException e) { throw new RepositoryFunctionException(e, Transience.PERSISTENT); } } else { // If the api_level attribute is not explicitly set, we select the highest api level that is // available in the SDK. defaultApiLevel = String.valueOf(apiLevels.first()); } String buildToolsDirectory; if (attributes.isAttributeValueExplicitlySpecified("build_tools_version")) { try { buildToolsDirectory = attributes.get("build_tools_version", Type.STRING); } catch (EvalException e) { throw new RepositoryFunctionException(e, Transience.PERSISTENT); } } else { // If the build_tools_version attribute is not explicitly set, we select the highest version // installed in the SDK. DirectoryListingValue directoryValue = getDirectoryListing(androidSdkPath, BUILD_TOOLS_DIR, env); if (directoryValue == null) { return null; } buildToolsDirectory = getNewestBuildToolsDirectory(rule, directoryValue.getDirents()); } // android_sdk_repository.build_tools_version is technically actually the name of the // directory in $sdk/build-tools. Most of the time this is just the actual build tools // version, but for preview build tools, the directory is something like 24.0.0-preview, and // the actual version is something like "24 rc3". The android_sdk rule in the template needs // the real version. String buildToolsVersion; if (buildToolsDirectory.contains("-preview")) { Properties sourceProperties = getBuildToolsSourceProperties(outputDirectory, buildToolsDirectory, env); if (env.valuesMissing()) { return null; } buildToolsVersion = sourceProperties.getProperty("Pkg.Revision"); } else { buildToolsVersion = buildToolsDirectory; } try { assertValidBuildToolsVersion(rule, buildToolsVersion); } catch (EvalException e) { throw new RepositoryFunctionException(e, Transience.PERSISTENT); } ImmutableSortedSet<PathFragment> androidDeviceSystemImageDirs = getAndroidDeviceSystemImageDirs(androidSdkPath, env); if (androidDeviceSystemImageDirs == null) { return null; } StringBuilder systemImageDirsList = new StringBuilder(); for (PathFragment systemImageDir : androidDeviceSystemImageDirs) { systemImageDirsList.append(String.format(" \"%s\",\n", systemImageDir)); } String template = getStringResource("android_sdk_repository_template.txt"); String buildFile = template .replace("%repository_name%", rule.getName()) .replace("%build_tools_version%", buildToolsVersion) .replace("%build_tools_directory%", buildToolsDirectory) .replace("%api_levels%", Iterables.toString(apiLevels)) .replace("%default_api_level%", defaultApiLevel) .replace("%system_image_dirs%", systemImageDirsList); // All local maven repositories that are shipped in the Android SDK. // TODO(ajmichael): Create SkyKeys so that if the SDK changes, this function will get rerun. Iterable<Path> localMavenRepositories = ImmutableList.of( outputDirectory.getRelative("extras/android/m2repository"), outputDirectory.getRelative("extras/google/m2repository")); try { SdkMavenRepository sdkExtrasRepository = SdkMavenRepository.create(Iterables.filter(localMavenRepositories, new Predicate<Path>() { @Override public boolean apply(@Nullable Path path) { return path.isDirectory(); } })); sdkExtrasRepository.writeBuildFiles(outputDirectory); buildFile = buildFile.replace( "%exported_files%", sdkExtrasRepository.getExportsFiles(outputDirectory)); } catch (IOException e) { throw new RepositoryFunctionException(e, Transience.TRANSIENT); } writeBuildFile(outputDirectory, buildFile); return RepositoryDirectoryValue.builder().setPath(outputDirectory); } @Override public Class<? extends RuleDefinition> getRuleDefinition() { return AndroidSdkRepositoryRule.class; } private static PathFragment getAndroidHomeEnvironmentVar( Path workspace, Map<String, String> env) { return workspace.getRelative(new PathFragment(env.get(PATH_ENV_VAR))).asFragment(); } private static String getStringResource(String name) { try { return ResourceFileLoader.loadResource( AndroidSdkRepositoryFunction.class, name); } catch (IOException e) { throw new IllegalStateException(e); } } /** Gets a DirectoryListingValue for {@code dirPath} or returns null. */ private static DirectoryListingValue getDirectoryListing( Path root, PathFragment dirPath, Environment env) throws RepositoryFunctionException, InterruptedException { try { return (DirectoryListingValue) env.getValueOrThrow( DirectoryListingValue.key(RootedPath.toRootedPath(root, dirPath)), InconsistentFilesystemException.class); } catch (InconsistentFilesystemException e) { throw new RepositoryFunctionException(new IOException(e), Transience.PERSISTENT); } } /** * Gets the numeric api levels from the contents of the platforms directory in descending order. */ private static ImmutableSortedSet<Integer> getApiLevels(Dirents platformsDirectories) { ImmutableSortedSet.Builder<Integer> apiLevels = ImmutableSortedSet.reverseOrder(); for (Dirent platformDirectory : platformsDirectories) { if (platformDirectory.getType() != Dirent.Type.DIRECTORY) { continue; } Matcher matcher = PLATFORMS_API_LEVEL_PATTERN.matcher(platformDirectory.getName()); if (matcher.matches()) { apiLevels.add(Integer.parseInt(matcher.group(1))); } } return apiLevels.build(); } /** * Gets the newest build tools directory according to {@link Revision}. * * @throws RepositoryFunctionException if none of the buildToolsDirectories are directories and * have names that are parsable as build tools version. */ private static String getNewestBuildToolsDirectory(Rule rule, Dirents buildToolsDirectories) throws RepositoryFunctionException { String newestBuildToolsDirectory = null; Revision newestBuildToolsRevision = null; for (Dirent buildToolsDirectory : buildToolsDirectories) { if (buildToolsDirectory.getType() != Dirent.Type.DIRECTORY) { continue; } try { Revision buildToolsRevision = Revision.parseRevision(buildToolsDirectory.getName()); if (newestBuildToolsRevision == null || buildToolsRevision.compareTo(newestBuildToolsRevision) > 0) { newestBuildToolsDirectory = buildToolsDirectory.getName(); newestBuildToolsRevision = buildToolsRevision; } } catch (NumberFormatException e) { // Ignore unparsable build tools directories. } } if (newestBuildToolsDirectory == null) { throw new RepositoryFunctionException( new EvalException( rule.getLocation(), String.format( "Bazel requires Android build tools version %s or newer but none are installed. " + "Please install a recent version through the Android SDK manager.", MIN_BUILD_TOOLS_REVISION)), Transience.PERSISTENT); } return newestBuildToolsDirectory; } private static Properties getBuildToolsSourceProperties( Path directory, String buildToolsDirectory, Environment env) throws RepositoryFunctionException, InterruptedException { Path sourcePropertiesFilePath = directory.getRelative( "build-tools/" + buildToolsDirectory + "/source.properties"); SkyKey releaseFileKey = FileValue.key( RootedPath.toRootedPath(directory, sourcePropertiesFilePath)); try { env.getValueOrThrow(releaseFileKey, IOException.class, FileSymlinkException.class, InconsistentFilesystemException.class); Properties properties = new Properties(); properties.load(sourcePropertiesFilePath.getInputStream()); return properties; } catch (IOException | FileSymlinkException | InconsistentFilesystemException e) { String error = String.format( "Could not read %s in Android SDK: %s", sourcePropertiesFilePath, e.getMessage()); throw new RepositoryFunctionException(new IOException(error), Transience.PERSISTENT); } } private static void assertValidBuildToolsVersion(Rule rule, String buildToolsVersion) throws EvalException { try { Revision buildToolsRevision = Revision.parseRevision(buildToolsVersion); if (buildToolsRevision.compareTo(MIN_BUILD_TOOLS_REVISION) < 0) { throw new EvalException( rule.getAttributeLocation("build_tools_version"), String.format( "Bazel requires Android build tools version %s or newer, %s was provided", MIN_BUILD_TOOLS_REVISION, buildToolsRevision)); } } catch (NumberFormatException e) { throw new EvalException( rule.getAttributeLocation("build_tools_version"), String.format( "Bazel does not recognize Android build tools version %s", buildToolsVersion), e); } } /** * Gets PathFragments for /sdk/system-images/*&#47;*&#47;*, which are the directories in the * SDK that contain system images needed for android_device. * * If the sdk/system-images directory does not exist, an empty set is returned. */ private static ImmutableSortedSet<PathFragment> getAndroidDeviceSystemImageDirs( Path androidSdkPath, Environment env) throws RepositoryFunctionException, InterruptedException { if (!androidSdkPath.getRelative(SYSTEM_IMAGES_DIR).exists()) { return ImmutableSortedSet.of(); } DirectoryListingValue systemImagesDirectoryValue = getDirectoryListing(androidSdkPath, SYSTEM_IMAGES_DIR, env); if (systemImagesDirectoryValue == null) { return null; } ImmutableMap<PathFragment, DirectoryListingValue> apiLevelSystemImageDirs = getSubdirectoryListingValues( androidSdkPath, SYSTEM_IMAGES_DIR, systemImagesDirectoryValue, env); if (apiLevelSystemImageDirs == null) { return null; } ImmutableSortedSet.Builder<PathFragment> pathFragments = ImmutableSortedSet.naturalOrder(); for (PathFragment apiLevelDir : apiLevelSystemImageDirs.keySet()) { ImmutableMap<PathFragment, DirectoryListingValue> apiTypeSystemImageDirs = getSubdirectoryListingValues( androidSdkPath, apiLevelDir, apiLevelSystemImageDirs.get(apiLevelDir), env); if (apiTypeSystemImageDirs == null) { return null; } for (PathFragment apiTypeDir : apiTypeSystemImageDirs.keySet()) { for (Dirent architectureSystemImageDir : apiTypeSystemImageDirs.get(apiTypeDir).getDirents()) { pathFragments.add(apiTypeDir.getRelative(architectureSystemImageDir.getName())); } } } return pathFragments.build(); } /** Gets DirectoryListingValues for subdirectories of the directory or returns null. */ private static ImmutableMap<PathFragment, DirectoryListingValue> getSubdirectoryListingValues( final Path root, final PathFragment path, DirectoryListingValue directory, Environment env) throws RepositoryFunctionException, InterruptedException { Map<PathFragment, SkyKey> skyKeysForSubdirectoryLookups = Maps.transformEntries( Maps.uniqueIndex( directory.getDirents(), new Function<Dirent, PathFragment>() { @Override public PathFragment apply(Dirent input) { return path.getRelative(input.getName()); } }), new EntryTransformer<PathFragment, Dirent, SkyKey>() { @Override public SkyKey transformEntry(PathFragment key, Dirent value) { return DirectoryListingValue.key( RootedPath.toRootedPath(root, root.getRelative(key))); } }); Map<SkyKey, ValueOrException<InconsistentFilesystemException>> values = env.getValuesOrThrow( skyKeysForSubdirectoryLookups.values(), InconsistentFilesystemException.class); ImmutableMap.Builder<PathFragment, DirectoryListingValue> directoryListingValues = new ImmutableMap.Builder<>(); for (PathFragment pathFragment : skyKeysForSubdirectoryLookups.keySet()) { try { SkyValue skyValue = values.get(skyKeysForSubdirectoryLookups.get(pathFragment)).get(); if (skyValue == null) { return null; } directoryListingValues.put(pathFragment, (DirectoryListingValue) skyValue); } catch (InconsistentFilesystemException e) { throw new RepositoryFunctionException(new IOException(e), Transience.PERSISTENT); } } return directoryListingValues.build(); } }
/*** Copyright 2021 by Sean Luke Licensed under the Apache License version 2.0 */ package edisyn.synth.rolandjv880; import edisyn.*; import edisyn.gui.*; import java.awt.*; import java.awt.geom.*; import javax.swing.border.*; import javax.swing.*; import java.awt.event.*; import java.util.*; import java.io.*; import javax.sound.midi.*; import edisyn.util.*; /** A patch editor for the Roland JV-880 and JV-80 (Drums) @author Sean Luke */ public class RolandJV880Drum extends Synth { public static final String[] BANKS = new String[] { "Internal", "Card" }; public static final String[] DISPLAYABLE_BANKS = new String[] { "Internal", "Card", "Preset A", "Preset B" }; public static final String[] WRITABLE_BANKS = new String[] { "Internal", "Card" }; public static final String[] WAVE_NAMES = new String[] { "Ac Piano 1", "SA Rhodes 1", "SA Rhodes 2", "E. Piano 1", "E. Piano 2", "Clav 1", "Organ 1", "Jazz Organ", "Pipe Organ", "Nylon GTR", "6STR GTR", "GTR HARM", "Mute GTR 1", "Pop Strat", "Stratus", "SYN GTR", "Harp 1", "SYN Bass", "Pick Bass", "E. Bass", "Fretless 1", "Upright BS", "Slap Bass 1", "Slap & Pop", "Slap Bass 2", "Slap Bass 3", "Flute 1", "Trumpet 1", "Trombone 1", "Harmon Mute 1", "Alto Sax 1", "Tenor Sax 1", "French 1", "Blow Pipe", "Bottle", "Trumpet SECT", "ST. Strings-R", "ST. Strings-L", "Mono Strings", "Pizz", "SYN VOX 1", "SYN VOX 2", "Male Ooh", "ORG VOX", "VOX Noise", "Soft Pad", "JP Strings", "Pop Voice", "Fine Wine", "Fantasynth", "Fanta Bell", "ORG Bell", "Agogo", "Bottle Hit", "Vibes", "Marimba wave", "Log Drum", "DIGI Bell 1", "DIGI Chime", "Steel Drums", "MMM VOX", "Spark VOX", "Wave Scan", "Wire String", "Lead Wave", "Synth Saw 1", "Synth Saw 2", "Synth Saw 3", "Synth Square", "Synth Pulse 2", "Synth Pulse 2", "Triangle", "Sine", "ORG Click", "White Noise", "Wind Agogo", "Metal Wind", "Feedbackwave", "Anklungs", "Wind Chimes", "Rattles", "Tin Wave", "Spectrum 1", "808 SNR 1", "90's Snare", "Piccolo SN", "LA Snare", "Whack Snare", "Rim Shot", "Bright Kick", "Verb Kick", "Round Kick", "808 Kick", "Closed HAT 1", "Closed HAT 2", "Open HAT 1", "Crash 1", "Ride 1", "Ride Bell 1", "Power Tom Hi", "Power Tom Lo", "Cross Stick 1", "808 Claps", "Cowbell 1", "Tambourine", "Timbale", "CGA Mute Hi", "CGA Mute Lo", "CGA Slap", "Conga Hi", "Conga Lo", "Maracas", "Cabasa Cut", "Cabasa Up", "Cabasa Down", "REV Steel DR", "REV Tin Wave", "REV SN 1", "REV SN 2", "REV SN 3", "REV SN 4", "REV Kick 1", "REV Cup", "REV Tom", "REV Cow Bell", "REV TAMB", "REV Conga", "REV Maracas", "REV Crash 1" }; public static final String[] KEYS = new String[] { "C", "C#", "D", "D#", "E", "F", "F#", "G", "G#", "A", "A#", "B" }; public static final String[] OUTPUT_SELECTS = new String[] { "Main", "Sub" }; public static final String[] RANDOM_PITCH_DEPTHS = new String[] { "0", "5", "10", "20", "30", "40", "50", "70", "100", "200", "300", "400", "500", "600", "800", "1200" }; public static final String[] TIME_SENSES = new String[] { "-100", "-70", "-50", "-40", "-30", "-20", "-10", "0", "10", "20", "30", "40", "50", "70", "100" }; public static final String[] FILTER_MODES = new String[] { "Off", "LPF", "HPF" }; public static final String[] RESONANCE_MODES = new String[] { "Soft", "Hard" }; public static final String[] WAVE_GROUPS = new String[] { "Internal", "Expansion", "PCM" }; public static final int MAX_WAVE_NUMBER = 256; public RolandJV880Drum() { for(int i = 0; i < drumParameters.length; i++) { drumParametersToIndex.put(drumParameters[i], Integer.valueOf(i)); } JComponent soundPanel = new SynthPanel(this); VBox vbox = new VBox(); HBox hbox = new HBox(); hbox.add(addNameGlobal(Style.COLOR_GLOBAL())); //hbox.add(addGlobal(Style.COLOR_A())); hbox.add(addKeys(Style.COLOR_B())); vbox.add(hbox); vbox.add(drumDisplay); soundPanel.add(vbox); addTab("Drum", soundPanel); model.set("bank", 0); // internal model.setStatus("note", model.STATUS_IMMUTABLE); loadDefaults(); } /** Add the global patch category (name, id, number, etc.) */ public JComponent addNameGlobal(Color color) { Category globalCategory = new Category(this, getSynthName(), color); //globalCategory.makeUnresettable(); JComponent comp; String[] params; HBox hbox = new HBox(); VBox vbox = new VBox(); comp = new PatchDisplay(this, 8); vbox.add(comp); hbox.add(vbox); // Not enough space to show the title hbox.addLast(Strut.makeHorizontalStrut(140)); globalCategory.add(hbox, BorderLayout.WEST); return globalCategory; } public JComponent addWave(int note, Color color) { Category category = new Category(this, "Wave", color); JComponent comp; String[] params; HBox hbox = new HBox(); VBox vbox = new VBox(); params = WAVE_GROUPS; comp = new Chooser("Wave Group", this, "note" + note + "wavegroup", params); vbox.add(comp); params = new String[MAX_WAVE_NUMBER]; for(int i = 0 ; i < WAVE_NAMES.length; i++) params[i] = "" + (i + 1) + " " + WAVE_NAMES[i]; for(int i = WAVE_NAMES.length; i < MAX_WAVE_NUMBER; i++) params[i] = "" + (i + 1); comp = new Chooser("Wave Number", this, "note" + note + "wavenumber", params); vbox.add(comp); hbox.add(vbox); vbox = new VBox(); comp = new CheckBox("Tone Switch", this, "note" + note + "toneswitch"); vbox.add(comp); comp = new LabelledDial("Mute Group", this, "note" + note + "mutegroup", color, 0, 31) { public String map(int val) { if (val == 0) return "Off"; else return "" + val; } }; hbox.add(comp); comp = new CheckBox("Envelope Sustain", this, "note" + note + "envelopemode"); vbox.add(comp); hbox.add(vbox); category.add(hbox, BorderLayout.CENTER); return category; } public JComponent addEffects(int note, Color color) { Category category = new Category(this, "Effects", color); // category.makePasteable("note" + note); category.makePasteable("tone"); JComponent comp; String[] params; HBox hbox = new HBox(); VBox vbox = new VBox(); params = OUTPUT_SELECTS; comp = new Chooser("Output Select", this, "note" + note + "outputselect", params); vbox.add(comp); hbox.add(vbox); comp = new LabelledDial("Dry Level", this, "note" + note + "drylevel", color, 0, 127); hbox.add(comp); comp = new LabelledDial("Reverb", this, "note" + note + "reverbsendlevel", color, 0, 127); ((LabelledDial)comp).addAdditionalLabel("Send Level"); hbox.add(comp); comp = new LabelledDial("Chorus", this, "note" + note + "chorussendlevel", color, 0, 127); ((LabelledDial)comp).addAdditionalLabel("Send Level"); hbox.add(comp); category.add(hbox, BorderLayout.CENTER); return category; } public JComponent addPitch(int note, Color color) { Category category = new Category(this, "Pitch", color); // category.makePasteable("note" + note); category.makePasteable("tone"); JComponent comp; String[] params; HBox hbox = new HBox(); comp = new LabelledDial("Coarse", this, "note" + note + "coarsetune", color, 0, 127) { public String map(int val) { return KEYS[val % 12] + (val / 12 + 1); // note integer division } }; hbox.add(comp); comp = new LabelledDial("Fine", this, "note" + note + "pitchfine", color, 14, 114, 64); hbox.add(comp); comp = new LabelledDial("Random", this, "note" + note + "randompitchdepth", color, 0, 15) { public String map(int value) { return RANDOM_PITCH_DEPTHS[value]; } }; ((LabelledDial)comp).addAdditionalLabel("Pitch Depth"); hbox.add(comp); comp = new LabelledDial("Pitch Bend Range", this, "note" + note + "pitchbendrange", color, 0, 12); hbox.add(comp); category.add(hbox, BorderLayout.CENTER); return category; } public JComponent addPitchEnvelope(int note, Color color) { Category category = new Category(this, "Pitch Envelope", color); // category.makePasteable("note" + note); category.makePasteable("tone"); JComponent comp; String[] params; HBox hbox = new HBox(); comp = new LabelledDial("Velocity", this, "note" + note + "penvvelocitylevelsense", color, 1, 127, 64); ((LabelledDial)comp).addAdditionalLabel("Level Sense"); hbox.add(comp); /// IMPORTANT: Note that this is velocity time sense, not velocity ON time sense (as it is in the JV880 editor) comp = new LabelledDial("Velocity", this, "note" + note + "penvvelocitytimesense", color, 0, 14) { public boolean isSymmetric() { return true; } public String map(int value) { return TIME_SENSES[value]; } }; ((LabelledDial)comp).addAdditionalLabel("Time Sense"); hbox.add(comp); comp = new LabelledDial("Depth", this, "note" + note + "penvdepth", color, 52, 76, 64); hbox.add(comp); comp = new LabelledDial("Time 1", this, "note" + note + "penvtime1", color, 0, 127); hbox.add(comp); comp = new LabelledDial("Level 1", this, "note" + note + "penvlevel1", color, 1, 127, 64); hbox.add(comp); comp = new LabelledDial("Time 2", this, "note" + note + "penvtime2", color, 0, 127); hbox.add(comp); comp = new LabelledDial("Level 2", this, "note" + note + "penvlevel2", color, 1, 127, 64); hbox.add(comp); comp = new LabelledDial("Time 3", this, "note" + note + "penvtime3", color, 0, 127); hbox.add(comp); comp = new LabelledDial("Level 3", this, "note" + note + "penvlevel3", color, 1, 127, 64); hbox.add(comp); comp = new LabelledDial("Time 4", this, "note" + note + "penvtime4", color, 0, 127); hbox.add(comp); comp = new LabelledDial("Level 4", this, "note" + note + "penvlevel4", color, 1, 127, 64); hbox.add(comp); comp = new EnvelopeDisplay(this, Style.ENVELOPE_COLOR(), new String[] { null, "note" + note + "penvtime1", "note" + note + "penvtime2", "note" + note + "penvtime3", null, "note" + note + "penvtime4" }, new String[] { null, "note" + note + "penvlevel1", "note" + note + "penvlevel2", "note" + note + "penvlevel3", "note" + note + "penvlevel3", "note" + note + "penvlevel4" }, new double[] { 0, 0.2 / 127.0, 0.2 / 127.0, 0.2 / 127.0, 0.2, 0.2 / 127.0 }, new double[] { 0, 1.0 / 127.0, 1.0 / 127.0, 1.0 / 127.0, 1.0 / 127.0, 1.0 / 127.0 }); ((EnvelopeDisplay)comp).setAxis(1.0 / 127.0 * 64.0); // is this centered right? hbox.addLast(comp); category.add(hbox, BorderLayout.CENTER); return category; } public JComponent addFilter(int note, Color color) { Category category = new Category(this, "Filter (TVF)", color); // category.makePasteable("note" + note); category.makePasteable("tone"); JComponent comp; String[] params; HBox hbox = new HBox(); VBox vbox = new VBox(); params = FILTER_MODES; comp = new Chooser("Filter Mode", this, "note" + note + "filtermode", params); vbox.add(comp); params = RESONANCE_MODES; comp = new Chooser("Resonance Mode", this, "note" + note + "resonancemode", params); vbox.add(comp); hbox.add(vbox); comp = new LabelledDial("Cutoff", this, "note" + note + "cutoff", color, 0, 127); hbox.add(comp); comp = new LabelledDial("Resonance", this, "note" + note + "resonance", color, 0, 127); hbox.add(comp); category.add(hbox, BorderLayout.CENTER); return category; } JComponent tvfenvdepth = null; JComponent tvfenvlevel4 = null; public JComponent addFilterEnvelope(int note, Color color) { Category category = new Category(this, "Filter (TVF) Envelope", color); // category.makePasteable("note" + note); category.makePasteable("tone"); JComponent comp; String[] params; HBox hbox = new HBox(); comp = new LabelledDial("Velocity", this, "note" + note + "tvfenvvelocitylevelsense", color, 1, 127, 64); ((LabelledDial)comp).addAdditionalLabel("Level Sense"); hbox.add(comp); /// IMPORTANT: Note that this is velocity time sense, not velocity ON time sense (as it is in the JV880 editor) comp = new LabelledDial("Velocity", this, "note" + note + "tvfenvvelocitytimesense", color, 0, 14) { public boolean isSymmetric() { return true; } public String map(int value) { /// FIXME if (value < TIME_SENSES.length) return TIME_SENSES[value]; else return "??"; } }; ((LabelledDial)comp).addAdditionalLabel("Time Sense"); hbox.add(comp); comp = new LabelledDial("Depth", this, "note" + note + "tvfenvdepth", color, 1, 127, 64); tvfenvdepth = comp; hbox.add(comp); comp = new LabelledDial("Time 1", this, "note" + note + "tvfenvtime1", color, 0, 127); hbox.add(comp); comp = new LabelledDial("Level 1", this, "note" + note + "tvfenvlevel1", color, 0, 127); hbox.add(comp); comp = new LabelledDial("Time 2", this, "note" + note + "tvfenvtime2", color, 0, 127); hbox.add(comp); comp = new LabelledDial("Level 2", this, "note" + note + "tvfenvlevel2", color, 0, 127); hbox.add(comp); comp = new LabelledDial("Time 3", this, "note" + note + "tvfenvtime3", color, 0, 127); hbox.add(comp); comp = new LabelledDial("Level 3", this, "note" + note + "tvfenvlevel3", color, 0, 127); hbox.add(comp); comp = new LabelledDial("Time 4", this, "note" + note + "tvfenvtime4", color, 0, 127); hbox.add(comp); comp = new LabelledDial("Level 4", this, "note" + note + "tvfenvlevel4", color, 0, 127); tvfenvlevel4 = comp; hbox.add(comp); comp = new EnvelopeDisplay(this, Style.ENVELOPE_COLOR(), new String[] { null, "note" + note + "tvfenvtime1", "note" + note + "tvfenvtime2", "note" + note + "tvfenvtime3", null, "note" + note + "tvfenvtime4" }, new String[] { null, "note" + note + "tvfenvlevel1", "note" + note + "tvfenvlevel2", "note" + note + "tvfenvlevel3", "note" + note + "tvfenvlevel3", "note" + note + "tvfenvlevel4" }, new double[] { 0, 0.2 / 127.0, 0.2 / 127.0, 0.2 / 127.0, 0.2, 0.2 / 127.0 }, new double[] { 0, 1.0 / 127.0, 1.0 / 127.0, 1.0 / 127.0, 1.0 / 127.0, 1.0 / 127.0 }); hbox.add(comp); category.add(hbox, BorderLayout.CENTER); return category; } public JComponent addAmplifier(int note, Color color) { Category category = new Category(this, "Amplifier (TVA)", color); // category.makePasteable("note" + note); category.makePasteable("tone"); JComponent comp; String[] params; HBox hbox = new HBox(); comp = new LabelledDial("Level", this, "note" + note + "level", color, 0, 127); hbox.add(comp); comp = new LabelledDial("Pan", this, "note" + note + "pan", color, 0, 128) { public boolean isSymmetric() { return true; } public String map(int value) { if (value < 64) { return "L" + (64 - value); } else if (value == 64) return "--"; else if (value < 128) { return "R" + (value - 64); } else return "Rand"; } }; hbox.add(comp); category.add(hbox, BorderLayout.CENTER); return category; } public JComponent addAmplifierEnvelope(int note, Color color) { Category category = new Category(this, "Amplifier (TVA) Envelope", color); // category.makePasteable("note" + note); category.makePasteable("tone"); JComponent comp; String[] params; HBox hbox = new HBox(); comp = new LabelledDial("Velocity", this, "note" + note + "tvaenvvelocitylevelsense", color, 1, 127, 64); ((LabelledDial)comp).addAdditionalLabel("Level Sense"); hbox.add(comp); /// IMPORTANT: Note that this is velocity time sense, not velocity ON time sense (as it is in the JV880 editor) comp = new LabelledDial("Velocity", this, "note" + note + "tvaenvvelocitytimesense", color, 0, 14) { public boolean isSymmetric() { return true; } public String map(int value) { return TIME_SENSES[value]; } }; ((LabelledDial)comp).addAdditionalLabel("Time Sense"); hbox.add(comp); //comp = new LabelledDial("Depth", this, "note" + note + "tvaenvdepth", color, 1, 127, 64); comp = Strut.makeStrut(tvfenvdepth); hbox.add(comp); comp = new LabelledDial("Time 1", this, "note" + note + "tvaenvtime1", color, 0, 127); hbox.add(comp); comp = new LabelledDial("Level 1", this, "note" + note + "tvaenvlevel1", color, 0, 127); hbox.add(comp); comp = new LabelledDial("Time 2", this, "note" + note + "tvaenvtime2", color, 0, 127); hbox.add(comp); comp = new LabelledDial("Level 2", this, "note" + note + "tvaenvlevel2", color, 0, 127); hbox.add(comp); comp = new LabelledDial("Time 3", this, "note" + note + "tvaenvtime3", color, 0, 127); hbox.add(comp); comp = new LabelledDial("Level 3", this, "note" + note + "tvaenvlevel3", color, 0, 127); hbox.add(comp); comp = new LabelledDial("Time 4", this, "note" + note + "tvaenvtime4", color, 0, 127); hbox.add(comp); //comp = new LabelledDial("Level 4", this, "note" + note + "tvaenvlevel4", color, 0, 127); comp = Strut.makeStrut(tvfenvlevel4); hbox.add(comp); comp = new EnvelopeDisplay(this, Style.ENVELOPE_COLOR(), new String[] { null, "note" + note + "tvaenvtime1", "note" + note + "tvaenvtime2", "note" + note + "tvaenvtime3", null, "note" + note + "tvaenvtime4" }, new String[] { null, "note" + note + "tvaenvlevel1", "note" + note + "tvaenvlevel2", "note" + note + "tvaenvlevel3", "note" + note + "tvaenvlevel3", null }, new double[] { 0, 0.2 / 127.0, 0.2 / 127.0, 0.2 / 127.0, 0.2, 0.2 / 127.0 }, new double[] { 0, 1.0 / 127.0, 1.0 / 127.0, 1.0 / 127.0, 1.0 / 127.0, 0 }); hbox.add(comp); category.add(hbox, BorderLayout.CENTER); return category; } public VBox buildKey(final int key, Color color) { JComponent comp; String[] params; HBox hbox = new HBox(); VBox vbox = new VBox(); hbox.add(addWave(key, Style.COLOR_A())); hbox.addLast(addEffects(key, Style.COLOR_A())); vbox.add(hbox); hbox = new HBox(); hbox.add(addPitch(key, Style.COLOR_A())); hbox.add(addFilter(key, Style.COLOR_C())); hbox.addLast(addAmplifier(key, Style.COLOR_A())); vbox.add(hbox); vbox.add(addPitchEnvelope(key, Style.COLOR_B())); vbox.add(addFilterEnvelope(key, Style.COLOR_C())); vbox.add(addAmplifierEnvelope(key, Style.COLOR_A())); return vbox; } VBox[] keys = new VBox[61]; VBox drumDisplay = new VBox(); public JComponent addKeys(Color color) { final Category category = new Category(this, "Drum Key", color); // category.makePasteable("key1"); // category.makeDistributable("key1"); // pretty useless category.makePasteable("key"); category.makeDistributable("key"); // pretty useless JComponent comp; String[] params; HBox hbox = new HBox(); for(int i = 0; i < keys.length; i++) { keys[i] = buildKey(i, color); } comp = new LabelledDial("Note", this, "note", color, 36, 96) { public String map(int val) { return KEYS[val % 12] + ((val / 12) - 1); // note integer division } }; hbox.add(comp); comp = new KeyDisplay("Note", this, "note", color, 36, 96, 0) { public void userPressed(int key) { doSendTestNote(key); } }; ((KeyDisplay)comp).setDynamicUpdate(true); ((KeyDisplay)comp).setOctavesBelowZero(KeyDisplay.OCTAVES_BELOW_ZERO_SPN); hbox.addLast(comp); model.register("note", new Updatable() { public void update(String key, Model model) { drumDisplay.removeAll(); drumDisplay.add(keys[model.get(key, 36) - 36]); drumDisplay.revalidate(); drumDisplay.repaint(); } }); // set it once model.set("note", model.get("note", 36)); category.add(hbox, BorderLayout.CENTER); return category; } public boolean gatherPatchInfo(String title, Model change, boolean writing) { String[] banks = BANKS; JComboBox bank = new JComboBox(banks); int b = model.get("bank"); if (b >= banks.length) // as in "preset a" or "preset b" b = 0; bank.setSelectedIndex(b); while(true) { boolean result = showMultiOption(this, new String[] { "Bank" }, new JComponent[] { bank }, title, "Enter the Bank"); if (result == false) return false; change.set("bank", bank.getSelectedIndex()); return true; } } public int parse(byte[] data, boolean fromFile) { // I have set it up here so that we could load ONE or SEVERAL drum sounds from the data int offset = 0; int note = -1; for(offset = 0; offset <= data.length - 63 && data[offset] == (byte)0xF0; offset += 63) { note = data[offset + 7] - 0x40; // this better be between 0x00 and 0x3C if (note < 0 || note > 60) { note = -1; break; } int pos = offset + 9; for(int i = 0; i < drumParameters.length; i++) { if (drumParameters[i].equals("-")) { pos++; } else if (drumParameters[i].equals("wavenumber")) { model.set("note" + note + drumParameters[i], (data[pos] << 4) + data[pos + 1]); pos++; } else if (drumParameters[i].equals("pan")) { model.set("note" + note + drumParameters[i], (data[pos] << 4) + data[pos + 1]); pos++; } else { model.set("note" + note + drumParameters[i], data[pos]); pos++; } } } revise(); if (note == -1) { return PARSE_FAILED; } else if (note == 0x3C) { return PARSE_SUCCEEDED; } else { return PARSE_INCOMPLETE; } } public static String getSynthName() { return "Roland JV-80/880 [Drum]"; } String defaultResourceFileName = null; public String getDefaultResourceFileName() { // See the Menu (preset options) if (defaultResourceFileName != null) return defaultResourceFileName; else return "RolandJV880Drum.init"; } public String getHTMLResourceFileName() { return "RolandJV880Drum.html"; } public byte getID() { try { byte b = (byte)(Byte.parseByte(tuple.id)); if (b >= 17) return (byte)(b - 1); } catch (NullPointerException e) { } // expected. Happens when tuple's not built yet catch (NumberFormatException e) { Synth.handleException(e); } return (byte)16; // IDs start at 17 } /** Map of parameter -> index in the drumParameters array. */ HashMap drumParametersToIndex = new HashMap(); public byte[] emit(String key) { if (key.equals("bank")) return new byte[0]; // this is not emittable if (key.equals("note")) return new byte[0]; // this is not emittable int note = StringUtility.getFirstInt(key); // n, o, t, e, + digits int param = ((Integer)(drumParametersToIndex.get(key.substring(note > 9 ? 6 : 5)))).intValue(); int dataSize = 12; byte dataVal1 = 0; byte dataVal2 = 0; if (drumParameters[param].equals("-")) { System.err.println("This shouldn't be able to happen"); return new byte[0]; // can never happen } else if (drumParameters[param].equals("wavenumber") || drumParameters[param].equals("pan")) { dataSize = 13; dataVal1 = (byte)(model.get(key) / 16); dataVal2 = (byte)(model.get(key) % 16); } else { dataVal1 = (byte)model.get(key); } byte[] data = new byte[dataSize]; data[0] = (byte)0xF0; data[1] = (byte)0x41; data[2] = (byte)getID(); data[3] = (byte)0x46; data[4] = (byte)0x12; data[5] = (byte)0x00; data[6] = (byte)0x07; data[7] = (byte)(0x40 + note); data[8] = (byte)param; if (dataSize == 12) { data[9] = (byte)dataVal1; data[10] = produceChecksum(data, 5, 10); data[11] = (byte) 0xF7; } else { data[9] = (byte)dataVal1; data[10] = (byte)dataVal2; data[11] = produceChecksum(data, 5, 11); data[12] = (byte) 0xF7; } return data; } public void messageFromController(MidiMessage message, boolean interceptedForInternalUse, boolean routedToSynth) { if (message instanceof ShortMessage) { ShortMessage s = (ShortMessage)message; int status = s.getStatus(); // NOTE_ON has a status from 0x90 to 0x9F (for all 16 channels) // and also cannot be velocity=0, since that would be equivalent to a NOTE OFF if (status >= ShortMessage.NOTE_ON && status <= ShortMessage.NOTE_ON + 15 && s.getData2() > 0) // 0x90 to 0x9F { int key = s.getData1(); if (key >= 36 && key <= 96) { model.set("note", key); } } } } /** When we load Internal, the JV880 doesn't change the patch because there's no notion of "changing the patch". So we have to explicitly send the information back. */ // public boolean getSendsParametersAfterNonMergeParse() { return true; } public byte produceChecksum(byte[] data) { return produceChecksum(data, 0, data.length); } /** The checksum is computed on all the ADDRESS and DATA data. Just add up the data, mod 128, and subtract the result from 128. Return that, unless it is 128, in which case return 0. */ public byte produceChecksum(byte[] data, int start, int end) { // The checksum works as follows: // 1. Add all the data // 2. mod by 128 (that is, & 127) // 3. Subtract from 128 // 4. If the result is 128, return 0 // 5. Else return the result int check = 0; for(int i = start; i < end; i++) { check += data[i]; } check = check & 0x7F; check = 0x80 - check; if (check == 0x80) check = 0; return (byte) check; } public Object[] emitAll(Model tempModel, boolean toWorkingMemory, boolean toFile) { if (tempModel == null) tempModel = getModel(); byte[][] data = new byte[61][52 + 11]; for(int i = 0; i < 61; i++) { data[i][0] = (byte)0xF0; data[i][1] = (byte)0x41; data[i][2] = (byte)getID(); data[i][3] = (byte)0x46; data[i][4] = (byte)0x12; if (toWorkingMemory) { data[i][5] = (byte)0x00; data[i][6] = (byte)0x07; data[i][7] = (byte)(0x40 + i); data[i][8] = (byte)0x00; } else if (tempModel.get("bank", 0) == 0) // internal { data[i][5] = (byte)0x01; data[i][6] = (byte)0x7F; data[i][7] = (byte)(0x40 + i); data[i][8] = (byte)0x00; } else // card { data[i][5] = (byte)0x02; data[i][6] = (byte)0x7F; data[i][7] = (byte)(0x40 + i); data[i][8] = (byte)0x00; } int pos = 9; for(int j = 0; j < drumParameters.length; j++) { int val = model.get("note" + i + drumParameters[j]); if (drumParameters[j].equals("-")) { pos++; } else if (drumParameters[j].equals("wavenumber")) { data[i][pos] = (byte)((val >> 4) & 0x0F); data[i][pos + 1] = (byte)(val & 0x0F); pos++; } else if (drumParameters[j].equals("pan")) { data[i][pos] = (byte)((val >> 4) & 0x0F); data[i][pos + 1] = (byte)(val & 0x0F); pos++; } else { data[i][pos] = (byte)val; pos++; } } data[i][data[i].length - 2] = produceChecksum(data[i], 5, data[i].length - 2); data[i][data[i].length - 1] = (byte) 0xF7; } return (Object[])data; } //// I CAN GET ALL 4 PATCH SEGMENTS WITH //// F0 41 10 46 11 01 48 20 00 00 00 0C 00 0B F7 public byte[] requestDump(Model tempModel) { // Change the patch/performance button to "performance" -- this is parameter 0 in system tryToSendSysex(new byte[] { (byte)0xF0, 0x41, getID(), 0x46, 0x12, 0x00, 0x00, 0x00, 0x00, 0x00, produceChecksum(new byte[] { 0x00, 0x00, 0x00, 0x00, 0x00 }), (byte)0xF7 }); // It takes a second for this to take effect simplePause(200); // Internal byte AA = (byte)(0x01); byte BB = (byte)(0x7F); byte CC = (byte)(0x40); byte DD = (byte)(0x00); if (tempModel.get("bank") == 1) { AA = 0x02; } // 24 * 128 + 100 = 3172 = 61 * 52 byte checksum = produceChecksum(new byte[] { AA, BB, CC, DD, (byte)0x00, (byte)0x00, (byte)61, (byte)00 }); byte[] b = new byte[] { (byte)0xF0, (byte)0x41, getID(), (byte)0x46, (byte)0x11, AA, BB, CC , DD, (byte)0x00, (byte)0x00, (byte)61, (byte)0, checksum, (byte)0xF7 }; return b; } public byte[] requestCurrentDump() { // Change the patch/performance button to "performance" -- this is parameter 0 in system tryToSendSysex(new byte[] { (byte)0xF0, 0x41, getID(), 0x46, 0x12, 0x00, 0x00, 0x00, 0x00, 0x00, produceChecksum(new byte[] { 0x00, 0x00, 0x00, 0x00, 0x00 }), (byte)0xF7 }); // It takes a second for this to take effect simplePause(200); byte AA = (byte)(0x00); byte BB = (byte)(0x07); byte CC = (byte)(0x40); byte DD = (byte)(0x00); // 24 * 128 + 100 = 3172 = 61 * 52 byte checksum = produceChecksum(new byte[] { AA, BB, CC, DD, (byte)0x00, (byte)0x00, (byte)61, (byte)00 }); byte[] b = new byte[] { (byte)0xF0, (byte)0x41, getID(), (byte)0x46, (byte)0x11, AA, BB, CC , DD, (byte)0x00, (byte)0x00, (byte)61, (byte)0, checksum, (byte)0xF7 }; return b; } ////// YOU MAY WANT TO IMPLEMENT SOME OF THE FOLLOWING public int getTestNotePitch() { return model.get("note"); } // public int getTestNoteChannel() { return model.get("channel"); } public JFrame sprout() { JFrame frame = super.sprout(); // It doesn't make sense to send to current patch transmitTo.setEnabled(false); addJV880DrumMenu(); return frame; } public void resetToA() { defaultResourceFileName = "DrumPresetA.init"; doReset(); defaultResourceFileName = null; model.set("bank", 2); } public void resetToB() { defaultResourceFileName = "DrumPresetB.init"; doReset(); defaultResourceFileName = null; model.set("bank", 3); } public void addJV880DrumMenu() { JMenu menu = new JMenu("JV-80/880"); menubar.add(menu); JMenuItem preseta = new JMenuItem("Load Preset A"); menu.add(preseta); preseta.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { resetToA(); } }); JMenuItem presetb = new JMenuItem("Load Preset B"); menu.add(presetb); presetb.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { resetToB(); } }); menu.addSeparator(); JMenuItem swap = new JMenuItem("Swap With Note..."); menu.add(swap); swap.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { int current = model.get("note"); String[] notes = new String[61]; for(int i = 0; i < notes.length; i++) { notes[i] = KEYS[i % 12] + (i / 12 + 2); } JComboBox combo = new JComboBox(notes); combo.setSelectedIndex(current - 36); int res = showMultiOption(RolandJV880Drum.this, new String[] { "Note" }, new JComponent[] { combo }, new String[] { "Swap", "Cancel" }, 0, "Swap With Note...", new JLabel("Enter Note to Swap with.")); int select = combo.getSelectedIndex(); if (res == 0 && select != (current - 36)) // swap with someone relevant { setSendMIDI(false); undo.setWillPush(false); Model backup = (Model)(model.clone()); for(int i = 0; i < drumParameters.length; i++) { if (!drumParameters[i].equals("-")) { String swap = "note" + (current - 36) + drumParameters[i]; String with = "note" + select + drumParameters[i]; int temp = model.get(swap); model.set(swap, model.get(with)); model.set(with, temp); } } setSendMIDI(true); undo.setWillPush(true); if (!backup.keyEquals(getModel())) // it's changed, do an undo push { undo.push(backup); sendAllParameters(); // FIXME we could update the two notes in question } } } }); JMenuItem copy = new JMenuItem("Copy To Note..."); menu.add(copy); copy.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { int current = model.get("note"); String[] notes = new String[61]; for(int i = 0; i < notes.length; i++) { notes[i] = KEYS[i % 12] + (i / 12 + 2); } JComboBox combo = new JComboBox(notes); combo.setSelectedIndex(current - 36); int res = showMultiOption(RolandJV880Drum.this, new String[] { "Note" }, new JComponent[] { combo }, new String[] { "Swap", "Cancel" }, 0, "Copy To Note...", new JLabel("Enter Note to Copy to.")); int select = combo.getSelectedIndex(); if (res == 0 && select != (current - 36)) // copy to someone relevant { setSendMIDI(false); undo.setWillPush(false); Model backup = (Model)(model.clone()); for(int i = 0; i < drumParameters.length; i++) { if (!drumParameters[i].equals("-")) { String swap = "note" + (current - 36) + drumParameters[i]; String with = "note" + select + drumParameters[i]; model.set(with, model.get(swap)); } } setSendMIDI(true); undo.setWillPush(true); if (!backup.keyEquals(getModel())) // it's changed, do an undo push { undo.push(backup); sendAllParameters(); // FIXME we could update the two notes in question } } } }); } // The problem with this is that if we pick a sound that's not a drum sound, // like an organ or whatnot, it will play forever // public boolean getClearsTestNotes() { return false; } public int getPauseAfterChangePatch() { return 100; } public int getPauseAfterSendAllParameters() { return 100; } public String getPatchName(Model model) { return "Drum"; } public Model getNextPatchLocation(Model model) { int bank = model.get("bank"); bank++; if (bank >= 4) bank = 0; Model newModel = buildModel(); newModel.set("bank", bank); return newModel; } public String getPatchLocationName(Model model) { // getPatchLocationName() is called from sprout() as a test to see if we should enable // batch downloading. If we haven't yet created an .init file, then parameters won't exist // yet and this method will bomb badly. So we return null in this case. if (!model.exists("bank")) return null; return DISPLAYABLE_BANKS[model.get("bank")]; } public static final String[] drumParameters = { "wavegroup", "wavenumber", "-", // also wavenumber "toneswitch", "coarsetune", "mutegroup", "envelopemode", "pitchfine", "randompitchdepth", "pitchbendrange", "penvvelocitylevelsense", "penvvelocitytimesense", "penvdepth", "penvtime1", "penvlevel1", "penvtime2", "penvlevel2", "penvtime3", "penvlevel3", "penvtime4", "penvlevel4", "filtermode", "cutoff", "resonance", "resonancemode", "tvfenvvelocitylevelsense", "tvfenvvelocitytimesense", "tvfenvdepth", "tvfenvtime1", "tvfenvlevel1", "tvfenvtime2", "tvfenvlevel2", "tvfenvtime3", "tvfenvlevel3", "tvfenvtime4", "tvfenvlevel4", "level", "pan", "-", // also pan "tvaenvvelocitylevelsense", "tvaenvvelocitytimesense", "tvaenvtime1", "tvaenvlevel1", "tvaenvtime2", "tvaenvlevel2", "tvaenvtime3", "tvaenvlevel3", "tvaenvtime4", "drylevel", "reverbsendlevel", "chorussendlevel", "outputselect" }; public boolean librarianTested() { return true; } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.idea.svn.history; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.FixedSizeButton; import com.intellij.openapi.ui.TextFieldWithBrowseButton; import com.intellij.openapi.util.Pair; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.ui.JBColor; import com.intellij.util.NotNullFunction; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.idea.svn.SvnBundle; import org.jetbrains.idea.svn.SvnUtil; import org.jetbrains.idea.svn.branchConfig.SelectBranchPopup; import org.jetbrains.idea.svn.branchConfig.SvnBranchConfigurationNew; import org.jetbrains.idea.svn.branchConfig.SvnBranchMapperManager; import org.jetbrains.idea.svn.dialogs.WCInfoWithBranches; import org.jetbrains.idea.svn.integrate.IntegratedSelectedOptionsDialog; import org.jetbrains.idea.svn.integrate.WorkingCopyInfo; import org.tmatesoft.svn.core.SVNURL; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.Map; import java.util.Set; public class SvnMergeInfoRootPanelManual { private JCheckBox myInclude; private TextFieldWithBrowseButton myBranchField; private FixedSizeButton myFixedSelectLocal; private JPanel myContentPanel; private JTextArea myUrlText; private JTextArea myLocalArea; private JTextArea myMixedRevisions; @NotNull private final Project myProject; @NotNull private final NotNullFunction<WCInfoWithBranches, WCInfoWithBranches> myRefresher; @NotNull private final Runnable myListener; private boolean myOnlyOneRoot; @NotNull private WCInfoWithBranches myInfo; @NotNull private final Map<String, String> myBranchToLocal; private WCInfoWithBranches.Branch mySelectedBranch; public SvnMergeInfoRootPanelManual(@NotNull Project project, @NotNull NotNullFunction<WCInfoWithBranches, WCInfoWithBranches> refresher, @NotNull Runnable listener, boolean onlyOneRoot, @NotNull WCInfoWithBranches info) { myOnlyOneRoot = onlyOneRoot; myInfo = info; myProject = project; myRefresher = refresher; myListener = listener; myBranchToLocal = ContainerUtil.newHashMap(); init(); myInclude.setVisible(!onlyOneRoot); initWithData(); } private void initWithData() { myInclude.addActionListener(new ActionListener() { public void actionPerformed(final ActionEvent e) { myListener.run(); } }); myUrlText.setText(myInfo.getUrl().toString()); myFixedSelectLocal.addActionListener(new ActionListener() { public void actionPerformed(final ActionEvent e) { if (mySelectedBranch != null) { Pair<WorkingCopyInfo, SVNURL> info = IntegratedSelectedOptionsDialog.selectWorkingCopy(myProject, myInfo.getUrl(), mySelectedBranch.getUrl(), false, null, null); if (info != null) { calculateBranchPathByBranch(mySelectedBranch.getUrl(), info.getFirst().getLocalPath()); } myListener.run(); } } }); myBranchField.getTextField().setEditable(false); myBranchField.addActionListener(new ActionListener() { public void actionPerformed(final ActionEvent e) { final VirtualFile vf = SvnUtil.getVirtualFile(myInfo.getPath()); if (vf != null) { SelectBranchPopup.show(myProject, vf, new SelectBranchPopup.BranchSelectedCallback() { public void branchSelected(final Project project, final SvnBranchConfigurationNew configuration, final String url, final long revision) { refreshSelectedBranch(new WCInfoWithBranches.Branch(url)); calculateBranchPathByBranch(mySelectedBranch.getUrl(), null); myListener.run(); } }, SvnBundle.message("select.branch.popup.general.title")); } } }); if (myInfo.getBranches().isEmpty()) { calculateBranchPathByBranch(null, null); } else { refreshSelectedBranch(myInfo.getBranches().get(0)); calculateBranchPathByBranch(mySelectedBranch.getUrl(), null); } } private void init() { myContentPanel = new JPanel(new GridBagLayout()) { @Override public void setBounds(final Rectangle r) { super.setBounds(r); } }; myContentPanel.setMinimumSize(new Dimension(200, 100)); final GridBagConstraints gb = new GridBagConstraints(0, 0, 1, 1, 1, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, JBUI.insets(1), 0, 0); myInclude = new JCheckBox(); gb.fill = GridBagConstraints.NONE; gb.weightx = 0; myContentPanel.add(myInclude, gb); // newline JLabel hereLabel = new JLabel("From:"); ++ gb.gridy; gb.gridx = 0; myContentPanel.add(hereLabel, gb); myUrlText = new JTextArea(); myUrlText.setLineWrap(true); myUrlText.setBackground(UIUtil.getLabelBackground()); myUrlText.setWrapStyleWord(true); gb.weightx = 1; ++ gb.gridx; gb.gridwidth = 2; gb.fill = GridBagConstraints.HORIZONTAL; myContentPanel.add(myUrlText, gb); // newline gb.fill = GridBagConstraints.NONE; JLabel thereLabel = new JLabel("To:"); gb.weightx = 0; gb.gridwidth = 1; ++ gb.gridy; gb.gridx = 0; myContentPanel.add(thereLabel, gb); myBranchField = new TextFieldWithBrowseButton(); gb.weightx = 1; ++ gb.gridx; gb.gridwidth = 2; gb.fill = GridBagConstraints.HORIZONTAL; myContentPanel.add(myBranchField, gb); // newline gb.gridx = 1; ++ gb.gridy; gb.gridwidth = 1; myLocalArea = new JTextArea(); myLocalArea.setBackground(UIUtil.getLabelBackground()); myLocalArea.setLineWrap(true); myLocalArea.setWrapStyleWord(true); myContentPanel.add(myLocalArea, gb); ++ gb.gridx; gb.weightx = 0; gb.fill = GridBagConstraints.NONE; myFixedSelectLocal = new FixedSizeButton(20); myContentPanel.add(myFixedSelectLocal, gb); ++ gb.gridy; gb.gridx = 0; gb.gridwidth = 2; myMixedRevisions = new JTextArea("Mixed Revision Working Copy"); myMixedRevisions.setForeground(JBColor.RED); myMixedRevisions.setBackground(myContentPanel.getBackground()); myContentPanel.add(myMixedRevisions, gb); myMixedRevisions.setVisible(false); } public void setMixedRevisions(final boolean value) { myMixedRevisions.setVisible(value); } @Nullable private static String getLocal(@NotNull String url, @Nullable String localPath) { String result = null; Set<String> paths = SvnBranchMapperManager.getInstance().get(url); if (!ContainerUtil.isEmpty(paths)) { result = localPath != null ? ContainerUtil.find(paths, localPath) : ContainerUtil.getFirstItem(ContainerUtil.sorted(paths)); } return result; } // always assign to local area here private void calculateBranchPathByBranch(@Nullable String url, @Nullable String localPath) { final String local = url == null ? null : getLocal(url, localPath == null ? myBranchToLocal.get(url) : localPath); if (local == null) { myLocalArea.setForeground(JBColor.RED); myLocalArea.setText(SvnBundle.message("tab.repository.merge.panel.root.panel.select.local")); } else { myLocalArea.setForeground(UIUtil.getInactiveTextColor()); myLocalArea.setText(local); myBranchToLocal.put(url, local); } } // always assign to selected branch here private void refreshSelectedBranch(@NotNull WCInfoWithBranches.Branch branch) { myBranchField.setText(branch.getName()); if (!initSelectedBranch(branch)) { myInfo = myRefresher.fun(myInfo); initSelectedBranch(branch); } } private boolean initSelectedBranch(@NotNull WCInfoWithBranches.Branch branch) { boolean found = myInfo.getBranches().contains(branch); if (found) { mySelectedBranch = branch; } return found; } public void setOnlyOneRoot(final boolean onlyOneRoot) { myOnlyOneRoot = onlyOneRoot; myInclude.setEnabled(! myOnlyOneRoot); myInclude.setSelected(true); } public JPanel getContentPanel() { return myContentPanel; } private void createUIComponents() { myFixedSelectLocal = new FixedSizeButton(20); } @NotNull public InfoHolder getInfo() { return new InfoHolder(mySelectedBranch, getLocalBranch(), myInclude.isSelected()); } public void initSelection(@NotNull InfoHolder holder) { myInclude.setSelected(holder.isEnabled()); if (holder.getBranch() != null) { refreshSelectedBranch(holder.getBranch()); calculateBranchPathByBranch(mySelectedBranch.getUrl(), holder.getLocal()); } } public static class InfoHolder { @Nullable private final WCInfoWithBranches.Branch myBranch; @Nullable private final String myLocal; private final boolean myEnabled; public InfoHolder(@Nullable WCInfoWithBranches.Branch branch, @Nullable String local, boolean enabled) { myBranch = branch; myLocal = local; myEnabled = enabled; } @Nullable public WCInfoWithBranches.Branch getBranch() { return myBranch; } @Nullable public String getLocal() { return myLocal; } public boolean isEnabled() { return myEnabled; } } @NotNull public WCInfoWithBranches getWcInfo() { return myInfo; } @Nullable public WCInfoWithBranches.Branch getBranch() { return mySelectedBranch; } @Nullable public String getLocalBranch() { return mySelectedBranch != null ? myBranchToLocal.get(mySelectedBranch.getUrl()) : null; } public boolean isEnabled() { return myOnlyOneRoot || myInclude.isSelected(); } }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2021 DBeaver Corp and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ui.controls.resultset; import org.eclipse.jface.dialogs.ControlEnableState; import org.eclipse.jface.dialogs.IDialogConstants; import org.eclipse.jface.dialogs.IDialogSettings; import org.eclipse.jface.viewers.*; import org.eclipse.swt.SWT; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.layout.FillLayout; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.*; import org.eclipse.ui.ISharedImages; import org.eclipse.ui.dialogs.FilteredTree; import org.jkiss.code.NotNull; import org.jkiss.code.Nullable; import org.jkiss.dbeaver.model.DBIcon; import org.jkiss.dbeaver.model.DBValueFormatting; import org.jkiss.dbeaver.model.data.DBDAttributeBinding; import org.jkiss.dbeaver.model.data.DBDAttributeConstraint; import org.jkiss.dbeaver.model.data.DBDDataFilter; import org.jkiss.dbeaver.model.exec.DBCExecutionContext; import org.jkiss.dbeaver.model.sql.SQLUtils; import org.jkiss.dbeaver.ui.*; import org.jkiss.dbeaver.ui.controls.*; import org.jkiss.dbeaver.ui.controls.resultset.internal.ResultSetMessages; import org.jkiss.dbeaver.ui.controls.resultset.spreadsheet.SpreadsheetPresentation; import org.jkiss.dbeaver.ui.dialogs.HelpEnabledDialog; import org.jkiss.dbeaver.utils.RuntimeUtils; import org.jkiss.utils.CommonUtils; import java.util.List; import java.util.*; class FilterSettingsDialog extends HelpEnabledDialog { private static final String DIALOG_ID = "DBeaver.FilterSettingsDialog";//$NON-NLS-1$ private final Comparator<DBDAttributeBinding> POSITION_SORTER = (o1, o2) -> { final DBDAttributeConstraint c1 = getBindingConstraint(o1); final DBDAttributeConstraint c2 = getBindingConstraint(o2); return c1.getVisualPosition() - c2.getVisualPosition(); }; private final Comparator<DBDAttributeBinding> ALPHA_SORTER = Comparator.comparing(DBDAttributeBinding::getName); private final ResultSetViewer resultSetViewer; private final List<DBDAttributeBinding> attributes; private TreeViewer columnsViewer; private ViewerColumnController<Object, Object> columnsController; private DBDDataFilter dataFilter; private Text whereText; private Text orderText; // Keep constraints in a copy because we use this list as table viewer model private java.util.List<DBDAttributeConstraint> constraints; private ToolItem moveTopButton; private ToolItem moveUpButton; private ToolItem moveDownButton; private ToolItem moveBottomButton; private Comparator<DBDAttributeBinding> activeSorter = POSITION_SORTER; private FilterSettingsTreeEditor treeEditor; FilterSettingsDialog(ResultSetViewer resultSetViewer) { super(resultSetViewer.getControl().getShell(), IHelpContextIds.CTX_DATA_FILTER); this.resultSetViewer = resultSetViewer; this.dataFilter = new DBDDataFilter(resultSetViewer.getModel().getDataFilter()); this.constraints = new ArrayList<>(dataFilter.getConstraints()); DBDAttributeBinding[] modelAttrs = resultSetViewer.getModel().getAttributes(); this.attributes = new ArrayList<>(modelAttrs.length); Collections.addAll(this.attributes, modelAttrs); } @Override protected IDialogSettings getDialogBoundsSettings() { return UIUtils.getDialogSettings(DIALOG_ID); } @Override protected Control createDialogArea(Composite parent) { getShell().setText(ResultSetMessages.controls_resultset_filter_title); getShell().setImage(DBeaverIcons.getImage(UIIcon.FILTER)); Composite composite = (Composite) super.createDialogArea(parent); TabFolder tabFolder = new TabFolder(composite, SWT.NONE); tabFolder.setLayoutData(new GridData(GridData.FILL_BOTH)); { Composite columnsGroup = UIUtils.createPlaceholder(tabFolder, 1); new FilteredTree(columnsGroup, SWT.SINGLE | SWT.FULL_SELECTION, new NamedObjectPatternFilter(), true, false) { @Override protected TreeViewer doCreateTreeViewer(Composite parent, int style) { columnsViewer = new TreeViewer(parent, style); columnsController = new ViewerColumnController<>(getClass().getSimpleName(), columnsViewer); return columnsViewer; } }; columnsController.addColumn(ResultSetMessages.controls_resultset_filter_column_name, null, SWT.LEFT, true, false, new CellLabelProvider() { @Override public void update(ViewerCell cell) { final DBDAttributeBinding binding = (DBDAttributeBinding) cell.getElement(); final DBDAttributeConstraint constraint = getBindingConstraint(binding); cell.setText(constraint.getAttribute().getName()); cell.setImage(DBeaverIcons.getImage(DBValueFormatting.getObjectImage(binding.getMetaAttribute()))); } }); columnsController.addColumn("#", null, SWT.LEFT, true, false, new CellLabelProvider() { @Override public void update(ViewerCell cell) { final DBDAttributeBinding binding = (DBDAttributeBinding) cell.getElement(); final DBDAttributeConstraint constraint = getBindingConstraint(binding); cell.setText(String.valueOf(constraint.getOriginalVisualPosition() + 1)); } }); columnsController.addBooleanColumn(ResultSetMessages.controls_resultset_filter_column_visible, null, SWT.LEFT, true, false, item -> { final DBDAttributeBinding binding = (DBDAttributeBinding) item; final DBDAttributeConstraint constraint = getBindingConstraint(binding); return constraint.isVisible(); }, new EditingSupport(columnsViewer) { @Override protected CellEditor getCellEditor(Object element) { return new CustomCheckboxCellEditor(((TreeViewer) getViewer()).getTree()); } @Override protected boolean canEdit(Object element) { return true; } @Override protected Object getValue(Object element) { final DBDAttributeBinding binding = (DBDAttributeBinding) element; final DBDAttributeConstraint constraint = getBindingConstraint(binding); return constraint.isVisible(); } @Override protected void setValue(Object element, Object value) { final DBDAttributeBinding binding = (DBDAttributeBinding) element; final DBDAttributeConstraint constraint = getBindingConstraint(binding); constraint.setVisible((Boolean) value); } }); columnsController.addBooleanColumn(ResultSetMessages.controls_resultset_filter_column_pinned, null, SWT.LEFT, true, false, item -> { final DBDAttributeBinding binding = (DBDAttributeBinding) item; if (binding.getTopParent() != binding) { return null; } final DBDAttributeConstraint constraint = getBindingConstraint(binding); return constraint.hasOption(SpreadsheetPresentation.ATTR_OPTION_PINNED); }, new EditingSupport(columnsViewer) { @Override protected CellEditor getCellEditor(Object element) { return new CustomCheckboxCellEditor(((TreeViewer) getViewer()).getTree()); } @Override protected boolean canEdit(Object element) { final DBDAttributeBinding binding = (DBDAttributeBinding) element; return binding == binding.getTopParent(); } @Override protected Object getValue(Object element) { final DBDAttributeBinding binding = (DBDAttributeBinding) element; final DBDAttributeConstraint constraint = getBindingConstraint(binding); return constraint.hasOption(SpreadsheetPresentation.ATTR_OPTION_PINNED); } @Override protected void setValue(Object element, Object value) { final DBDAttributeBinding binding = (DBDAttributeBinding) element; final DBDAttributeConstraint constraint = getBindingConstraint(binding); if (CommonUtils.getBoolean(value, false)) { constraint.setOption(SpreadsheetPresentation.ATTR_OPTION_PINNED, SpreadsheetPresentation.getNextPinIndex(dataFilter)); } else { constraint.removeOption(SpreadsheetPresentation.ATTR_OPTION_PINNED); } } }); columnsController.addColumn(ResultSetMessages.controls_resultset_filter_column_order, null, SWT.LEFT, true, false, new CellLabelProvider() { @Override public void update(ViewerCell cell) { final DBDAttributeBinding binding = (DBDAttributeBinding) cell.getElement(); final DBDAttributeConstraint constraint = getBindingConstraint(binding); if (constraint.getOrderPosition() > 0) { cell.setText(" " + constraint.getOrderPosition()); cell.setImage(DBeaverIcons.getImage(constraint.isOrderDescending() ? UIIcon.SORT_INCREASE : UIIcon.SORT_DECREASE)); } else { cell.setText(null); cell.setImage(null); } } }); columnsController.addColumn(ResultSetMessages.controls_resultset_filter_column_criteria, null, SWT.LEFT, true, false, new CellLabelProvider() { @Override public void update(ViewerCell cell) { final DBDAttributeBinding binding = (DBDAttributeBinding) cell.getElement(); final DBDAttributeConstraint constraint = getBindingConstraint(binding); final DBCExecutionContext executionContext = resultSetViewer.getExecutionContext(); if (executionContext != null) { cell.setText(SQLUtils.getConstraintCondition(executionContext.getDataSource(), constraint, null, true)); } else { cell.setText(null); } } }); columnsController.createColumns(false); columnsViewer.setContentProvider(new TreeContentProvider() { @Override public Object[] getChildren(Object parentElement) { final List<DBDAttributeBinding> nestedBindings = ((DBDAttributeBinding) parentElement).getNestedBindings(); if (nestedBindings == null || nestedBindings.isEmpty()) { return null; } final DBDAttributeBinding[] res = nestedBindings.toArray(new DBDAttributeBinding[0]); Arrays.sort(res, activeSorter); return res; } @Override public boolean hasChildren(Object element) { final List<DBDAttributeBinding> nestedBindings = ((DBDAttributeBinding) element).getNestedBindings(); return nestedBindings != null && !nestedBindings.isEmpty(); } }); final Tree columnsTree = columnsViewer.getTree(); GridData gd = new GridData(GridData.FILL_BOTH); gd.heightHint = 300; columnsTree.setLayoutData(gd); columnsTree.setHeaderVisible(true); columnsTree.setLinesVisible(true); treeEditor = new FilterSettingsTreeEditor(columnsTree); { ToolBar toolbar = new ToolBar(columnsGroup, SWT.HORIZONTAL | SWT.RIGHT); gd = new GridData(GridData.FILL_HORIZONTAL); gd.verticalIndent = 3; toolbar.setLayoutData(gd); toolbar.setLayout(new FillLayout()); moveTopButton = createToolItem(toolbar, ResultSetMessages.dialog_toolbar_move_to_top, UIIcon.ARROW_TOP, () -> { int selectionIndex = getSelectionIndex(columnsViewer.getTree()); moveColumns(selectionIndex, 0); }); moveTopButton.setEnabled(false); moveUpButton = createToolItem(toolbar, ResultSetMessages.dialog_toolbar_move_up, UIIcon.ARROW_UP, () -> { int selectionIndex = getSelectionIndex(columnsViewer.getTree()); swapColumns(selectionIndex, selectionIndex - 1); }); moveUpButton.setEnabled(false); moveDownButton = createToolItem(toolbar, ResultSetMessages.dialog_toolbar_move_down, UIIcon.ARROW_DOWN, () -> { int selectionIndex = getSelectionIndex(columnsViewer.getTree()); swapColumns(selectionIndex, selectionIndex + 1); }); moveDownButton.setEnabled(false); moveBottomButton = createToolItem(toolbar, ResultSetMessages.dialog_toolbar_move_to_bottom, UIIcon.ARROW_BOTTOM, () -> { int selectionIndex = getSelectionIndex(columnsViewer.getTree()); moveColumns(selectionIndex, getItemsCount() - 1); }); moveBottomButton.setEnabled(false); UIUtils.createToolBarSeparator(toolbar, SWT.VERTICAL); createToolItem(toolbar, ResultSetMessages.dialog_toolbar_sort, UIIcon.SORT, () -> { attributes.sort(ALPHA_SORTER); for (int i = 0; i < attributes.size(); i++) { final DBDAttributeConstraint constraint = getBindingConstraint(attributes.get(i)); constraint.setVisualPosition(i); } refreshData(); }); UIUtils.createToolBarSeparator(toolbar, SWT.VERTICAL); ToolItem showAllButton = createToolItem(toolbar, ResultSetMessages.dialog_toolbar_show_all, null, () -> { for (DBDAttributeConstraint constraint : constraints) { constraint.setVisible(true); } refreshData(); }); showAllButton.setImage(UIUtils.getShardImage(ISharedImages.IMG_ETOOL_DEF_PERSPECTIVE)); ToolItem showNoneButton = createToolItem(toolbar, ResultSetMessages.dialog_toolbar_show_none, null, () -> { for (DBDAttributeConstraint constraint : constraints) { constraint.setVisible(false); } refreshData(); }); showNoneButton.setImage(UIUtils.getShardImage(ISharedImages.IMG_ELCL_REMOVEALL)); createToolItem(toolbar, ResultSetMessages.dialog_toolbar_reset, UIIcon.REFRESH, () -> { dataFilter.reset(); constraints = new ArrayList<>(dataFilter.getConstraints()); refreshData(); //columnsViewer.refresh(); orderText.setText(""); //$NON-NLS-1$ whereText.setText(""); //$NON-NLS-1$ }); columnsViewer.addSelectionChangedListener(event -> { int selectionIndex = getSelectionIndex(columnsViewer.getTree()); moveTopButton.setEnabled(selectionIndex > 0); moveUpButton.setEnabled(selectionIndex > 0); moveDownButton.setEnabled(selectionIndex >= 0 && selectionIndex < getItemsCount() - 1); moveBottomButton.setEnabled(selectionIndex >= 0 && selectionIndex < getItemsCount() - 1); }); } TabItem libsTab = new TabItem(tabFolder, SWT.NONE); libsTab.setText(ResultSetMessages.controls_resultset_filter_group_columns); libsTab.setToolTipText("Set criteria and order for individual column(s)"); libsTab.setControl(columnsGroup); } createCustomFilters(tabFolder); // Fill columns columnsViewer.setInput(attributes); refreshData(); // Pack UI UIUtils.asyncExec(() -> UIUtils.packColumns(columnsViewer.getTree(), true, new float[] { 0.45f, 0.05f, 0.05f, 0.05f, 0.05f, 0.35f})); //UIUtils.packColumns(filterViewer.getTable()); if (!resultSetViewer.supportsDataFilter()) { Label warnLabel = new Label(composite, SWT.NONE); warnLabel.setText(ResultSetMessages.controls_resultset_filter_warning_custom_order_disabled); warnLabel.setForeground(parent.getDisplay().getSystemColor(SWT.COLOR_RED)); } return parent; } private int getItemsCount() { return columnsViewer.getTree().getItemCount(); } private void refreshData() { attributes.sort(activeSorter); columnsViewer.refresh(); columnsViewer.expandAll(); } private int getSelectionIndex(Tree tree) { final TreeItem[] selection = tree.getSelection(); if (selection.length == 0) { return 0; } return tree.indexOf(selection[0]); } private void swapColumns(int curIndex, int newIndex) { final DBDAttributeConstraint c1 = getBindingConstraint((DBDAttributeBinding) columnsViewer.getTree().getItem(curIndex).getData()); final DBDAttributeConstraint c2 = getBindingConstraint((DBDAttributeBinding) columnsViewer.getTree().getItem(newIndex).getData()); final int vp2 = c2.getVisualPosition(); c2.setVisualPosition(c1.getVisualPosition()); c1.setVisualPosition(vp2); refreshData(); moveTopButton.setEnabled(newIndex > 0); moveUpButton.setEnabled(newIndex > 0); moveDownButton.setEnabled(newIndex < getItemsCount() - 1); moveBottomButton.setEnabled(newIndex < getItemsCount() - 1); } private void moveColumns(int curIndex, int newIndex) { if (curIndex == newIndex) { return; } final DBDAttributeConstraint curAttr = getBindingConstraint((DBDAttributeBinding) columnsViewer.getTree().getItem(curIndex).getData()); // Update other constraints indexes for (DBDAttributeConstraint c : constraints) { if (newIndex < curIndex) { if (c.getVisualPosition() >= newIndex && c.getVisualPosition() < curIndex) { c.setVisualPosition(c.getVisualPosition() + 1); } } else { if (c.getVisualPosition() > curIndex && c.getVisualPosition() <= newIndex) { c.setVisualPosition(c.getVisualPosition() - 1); } } } curAttr.setVisualPosition(newIndex); refreshData(); moveTopButton.setEnabled(newIndex > 0); moveUpButton.setEnabled(newIndex > 0); moveDownButton.setEnabled(newIndex < getItemsCount() - 1); moveBottomButton.setEnabled(newIndex < getItemsCount() - 1); } private void createCustomFilters(TabFolder tabFolder) { Composite filterGroup = new Composite(tabFolder, SWT.NONE); filterGroup.setLayoutData(new GridData(GridData.FILL_BOTH)); filterGroup.setLayout(new GridLayout(1, false)); UIUtils.createControlLabel(filterGroup, ResultSetMessages.controls_resultset_filter_label_where); whereText = new Text(filterGroup, SWT.BORDER | SWT.MULTI | SWT.V_SCROLL | SWT.H_SCROLL); whereText.setLayoutData(new GridData(GridData.FILL_BOTH)); if (dataFilter.getWhere() != null) { whereText.setText(dataFilter.getWhere()); } UIUtils.createControlLabel(filterGroup, ResultSetMessages.controls_resultset_filter_label_orderby); orderText = new Text(filterGroup, SWT.BORDER | SWT.MULTI | SWT.V_SCROLL | SWT.H_SCROLL); orderText.setLayoutData(new GridData(GridData.FILL_BOTH)); if (dataFilter.getOrder() != null) { orderText.setText(dataFilter.getOrder()); } if (!resultSetViewer.supportsDataFilter()) { filterGroup.setEnabled(false); ControlEnableState.disable(filterGroup); } TabItem libsTab = new TabItem(tabFolder, SWT.NONE); libsTab.setText(ResultSetMessages.controls_resultset_filter_group_custom); libsTab.setToolTipText("Set custom criteria and order for whole query"); libsTab.setControl(filterGroup); } @Override public int open() { return super.open(); } @Override protected void createButtonsForButtonBar(Composite parent) { createButton(parent, IDialogConstants.OK_ID, IDialogConstants.OK_LABEL, true); createButton(parent, IDialogConstants.CANCEL_ID, IDialogConstants.CANCEL_LABEL, false); } @Override protected void buttonPressed(int buttonId) { super.buttonPressed(buttonId); } @Override protected void okPressed() { treeEditor.okPressed(); boolean hasVisibleColumns = false; for (DBDAttributeConstraint constraint : dataFilter.getConstraints()) { // Set correct visible position // constraint.setVisualPosition(this.constraints.indexOf(constraint)); if (constraint.isVisible()) { hasVisibleColumns = true; } } if (!hasVisibleColumns) { UIUtils.showMessageBox(getShell(), "Bad filter", "You have to set at least one column visible", SWT.ICON_WARNING); return; } if (!CommonUtils.isEmpty(orderText.getText())) { dataFilter.setOrder(orderText.getText()); } else { dataFilter.setOrder(null); } if (!CommonUtils.isEmpty(whereText.getText())) { dataFilter.setWhere(whereText.getText()); } else { dataFilter.setWhere(null); } boolean filtersChanged = true; if (dataFilter.equalFilters(resultSetViewer.getModel().getDataFilter(), true)) { // Only attribute visibility was changed filtersChanged = false; } resultSetViewer.setDataFilter( dataFilter, filtersChanged); super.okPressed(); } class ColumnLabelProvider extends LabelProvider implements ITableLabelProvider { @Nullable @Override public Image getColumnImage(Object element, int columnIndex) { DBDAttributeBinding binding = (DBDAttributeBinding) element; if (columnIndex == 0) { return DBeaverIcons.getImage( DBValueFormatting.getObjectImage(binding.getMetaAttribute())); } if (columnIndex == 2) { DBDAttributeConstraint constraint = getBindingConstraint(binding); if (constraint.getOrderPosition() > 0) { return DBeaverIcons.getImage(constraint.isOrderDescending() ? UIIcon.SORT_INCREASE : UIIcon.SORT_DECREASE); } } return null; } @Override public String getColumnText(Object element, int columnIndex) { DBDAttributeBinding binding = (DBDAttributeBinding) element; DBDAttributeConstraint constraint = getBindingConstraint(binding); switch (columnIndex) { case 0: return constraint.getAttribute().getName(); case 1: return String.valueOf(constraint.getOriginalVisualPosition() + 1); case 2: { int orderPosition = constraint.getOrderPosition(); if (orderPosition > 0) { return " " + String.valueOf(orderPosition); } return ""; //$NON-NLS-1$ } case 3: { DBCExecutionContext executionContext = resultSetViewer.getExecutionContext(); if (executionContext != null) { String condition = SQLUtils.getConstraintCondition(executionContext.getDataSource(), constraint, null, true); if (condition != null) { return condition; } } return ""; //$NON-NLS-1$ } default: return ""; //$NON-NLS-1$ } } } @NotNull private DBDAttributeConstraint getBindingConstraint(DBDAttributeBinding binding) { for (DBDAttributeConstraint constraint : constraints) { if (constraint.matches(binding, true)) { return constraint; } } throw new IllegalStateException("Can't find constraint for binding " + binding); } class CheckStateProvider implements ICheckStateProvider { @Override public boolean isChecked(Object element) { return getBindingConstraint(((DBDAttributeBinding)element)).isVisible(); } @Override public boolean isGrayed(Object element) { return false; } } private static ToolItem createToolItem(ToolBar toolBar, String text, DBIcon icon, final Runnable action) { ToolItem item = new ToolItem(toolBar, SWT.PUSH); if (icon != null) { item.setImage(DBeaverIcons.getImage(icon)); } if (text != null) { //item.setText(text); item.setToolTipText(text); } item.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { action.run(); } }); return item; } /** * This class was introduced exclusively to bypass the issue with * macos buttons not getting focus when dialog closes. * * See https://github.com/dbeaver/dbeaver/issues/10346 * See org.jkiss.dbeaver.ui.properties.PropertyTreeViewer.saveEditorValues() */ private class FilterSettingsTreeEditor extends CustomTreeEditor { private static final int COLUMN_ORDER_INDEX = 4; private static final int COLUMN_CRITERIA_INDEX = 5; private final Tree columnsTree; @Nullable private TreeItem lastTreeItem; @Nullable private Control lastEditor; public FilterSettingsTreeEditor(Tree columnsTree) { super(columnsTree); firstTraverseIndex = COLUMN_CRITERIA_INDEX; lastTraverseIndex = COLUMN_CRITERIA_INDEX; this.columnsTree = columnsTree; } @Override protected Control createEditor(Tree tree, int index, TreeItem item) { if (index == COLUMN_ORDER_INDEX) { toggleColumnOrder(item); return null; } else if (index == COLUMN_CRITERIA_INDEX && resultSetViewer.supportsDataFilter()) { Text text = new Text(columnsTree, SWT.BORDER); text.setText(item.getText(index)); text.selectAll(); lastEditor = text; lastTreeItem = item; return text; } return null; } @Override protected void saveEditorValue(Control control, int index, TreeItem item) { Text text = (Text) control; String criteria = text.getText().trim(); DBDAttributeConstraint constraint = getBindingConstraint((DBDAttributeBinding) item.getData()); if (CommonUtils.isEmpty(criteria)) { constraint.setCriteria(null); } else { constraint.setCriteria(criteria); } item.setText(COLUMN_CRITERIA_INDEX, criteria); } public void okPressed() { if (RuntimeUtils.isMacOS() && lastTreeItem != null && lastEditor != null) { saveEditorValue(lastEditor, COLUMN_CRITERIA_INDEX, lastTreeItem); } } private void toggleColumnOrder(TreeItem item) { DBDAttributeConstraint constraint = getBindingConstraint((DBDAttributeBinding) item.getData()); if (constraint.getOrderPosition() == 0) { // Add new ordered column constraint.setOrderPosition(dataFilter.getMaxOrderingPosition() + 1); constraint.setOrderDescending(false); } else if (!constraint.isOrderDescending()) { constraint.setOrderDescending(true); } else { constraint.setOrderPosition(0); constraint.setOrderDescending(false); } columnsViewer.refresh(); } } }
/* * This file is generated by jOOQ. */ package com.epam.ta.reportportal.jooq.tables.records; import com.epam.ta.reportportal.jooq.tables.JAclObjectIdentity; import javax.annotation.processing.Generated; import org.jooq.Field; import org.jooq.Record1; import org.jooq.Record6; import org.jooq.Row6; import org.jooq.impl.UpdatableRecordImpl; /** * This class is generated by jOOQ. */ @Generated( value = { "http://www.jooq.org", "jOOQ version:3.12.4" }, comments = "This class is generated by jOOQ" ) @SuppressWarnings({ "all", "unchecked", "rawtypes" }) public class JAclObjectIdentityRecord extends UpdatableRecordImpl<JAclObjectIdentityRecord> implements Record6<Long, Long, String, Long, Long, Boolean> { private static final long serialVersionUID = 1370439583; /** * Setter for <code>public.acl_object_identity.id</code>. */ public void setId(Long value) { set(0, value); } /** * Getter for <code>public.acl_object_identity.id</code>. */ public Long getId() { return (Long) get(0); } /** * Setter for <code>public.acl_object_identity.object_id_class</code>. */ public void setObjectIdClass(Long value) { set(1, value); } /** * Getter for <code>public.acl_object_identity.object_id_class</code>. */ public Long getObjectIdClass() { return (Long) get(1); } /** * Setter for <code>public.acl_object_identity.object_id_identity</code>. */ public void setObjectIdIdentity(String value) { set(2, value); } /** * Getter for <code>public.acl_object_identity.object_id_identity</code>. */ public String getObjectIdIdentity() { return (String) get(2); } /** * Setter for <code>public.acl_object_identity.parent_object</code>. */ public void setParentObject(Long value) { set(3, value); } /** * Getter for <code>public.acl_object_identity.parent_object</code>. */ public Long getParentObject() { return (Long) get(3); } /** * Setter for <code>public.acl_object_identity.owner_sid</code>. */ public void setOwnerSid(Long value) { set(4, value); } /** * Getter for <code>public.acl_object_identity.owner_sid</code>. */ public Long getOwnerSid() { return (Long) get(4); } /** * Setter for <code>public.acl_object_identity.entries_inheriting</code>. */ public void setEntriesInheriting(Boolean value) { set(5, value); } /** * Getter for <code>public.acl_object_identity.entries_inheriting</code>. */ public Boolean getEntriesInheriting() { return (Boolean) get(5); } // ------------------------------------------------------------------------- // Primary key information // ------------------------------------------------------------------------- @Override public Record1<Long> key() { return (Record1) super.key(); } // ------------------------------------------------------------------------- // Record6 type implementation // ------------------------------------------------------------------------- @Override public Row6<Long, Long, String, Long, Long, Boolean> fieldsRow() { return (Row6) super.fieldsRow(); } @Override public Row6<Long, Long, String, Long, Long, Boolean> valuesRow() { return (Row6) super.valuesRow(); } @Override public Field<Long> field1() { return JAclObjectIdentity.ACL_OBJECT_IDENTITY.ID; } @Override public Field<Long> field2() { return JAclObjectIdentity.ACL_OBJECT_IDENTITY.OBJECT_ID_CLASS; } @Override public Field<String> field3() { return JAclObjectIdentity.ACL_OBJECT_IDENTITY.OBJECT_ID_IDENTITY; } @Override public Field<Long> field4() { return JAclObjectIdentity.ACL_OBJECT_IDENTITY.PARENT_OBJECT; } @Override public Field<Long> field5() { return JAclObjectIdentity.ACL_OBJECT_IDENTITY.OWNER_SID; } @Override public Field<Boolean> field6() { return JAclObjectIdentity.ACL_OBJECT_IDENTITY.ENTRIES_INHERITING; } @Override public Long component1() { return getId(); } @Override public Long component2() { return getObjectIdClass(); } @Override public String component3() { return getObjectIdIdentity(); } @Override public Long component4() { return getParentObject(); } @Override public Long component5() { return getOwnerSid(); } @Override public Boolean component6() { return getEntriesInheriting(); } @Override public Long value1() { return getId(); } @Override public Long value2() { return getObjectIdClass(); } @Override public String value3() { return getObjectIdIdentity(); } @Override public Long value4() { return getParentObject(); } @Override public Long value5() { return getOwnerSid(); } @Override public Boolean value6() { return getEntriesInheriting(); } @Override public JAclObjectIdentityRecord value1(Long value) { setId(value); return this; } @Override public JAclObjectIdentityRecord value2(Long value) { setObjectIdClass(value); return this; } @Override public JAclObjectIdentityRecord value3(String value) { setObjectIdIdentity(value); return this; } @Override public JAclObjectIdentityRecord value4(Long value) { setParentObject(value); return this; } @Override public JAclObjectIdentityRecord value5(Long value) { setOwnerSid(value); return this; } @Override public JAclObjectIdentityRecord value6(Boolean value) { setEntriesInheriting(value); return this; } @Override public JAclObjectIdentityRecord values(Long value1, Long value2, String value3, Long value4, Long value5, Boolean value6) { value1(value1); value2(value2); value3(value3); value4(value4); value5(value5); value6(value6); return this; } // ------------------------------------------------------------------------- // Constructors // ------------------------------------------------------------------------- /** * Create a detached JAclObjectIdentityRecord */ public JAclObjectIdentityRecord() { super(JAclObjectIdentity.ACL_OBJECT_IDENTITY); } /** * Create a detached, initialised JAclObjectIdentityRecord */ public JAclObjectIdentityRecord(Long id, Long objectIdClass, String objectIdIdentity, Long parentObject, Long ownerSid, Boolean entriesInheriting) { super(JAclObjectIdentity.ACL_OBJECT_IDENTITY); set(0, id); set(1, objectIdClass); set(2, objectIdIdentity); set(3, parentObject); set(4, ownerSid); set(5, entriesInheriting); } }
package com.pgmacdesign.pgmactips.mapzen; import com.google.gson.annotations.SerializedName; import java.util.List; /** * Created by pmacdowell on 2017-02-17. */ public class MapzenPOJO { @SerializedName("type") private String type; @SerializedName("geocoding") private Geocoding geocoding; @SerializedName("features") private List<MapzenFeatures> features; //2 sets of coords, first set of lat, lng == bottom left. 2nd set lat, lng == top right. //LONGITUDE IS FIRST (lng, lat, lng, lat) @SerializedName("bbox") private double[] bbox; public double[] getBbox() { return bbox; } public void setBbox(double[] bbox) { this.bbox = bbox; } public String getType() { return type; } public void setType(String type) { this.type = type; } public Geocoding getGeocoding() { return geocoding; } public void setGeocoding(Geocoding geocoding) { this.geocoding = geocoding; } public List<MapzenFeatures> getFeatures() { return features; } public void setFeatures(List<MapzenFeatures> features) { this.features = features; } public static class Geocoding { @SerializedName("query") private GeocodingQuery query; @SerializedName("version") private String version; @SerializedName("timeStamp") private long timeStamp; public GeocodingQuery getQuery() { return query; } public void setQuery(GeocodingQuery query) { this.query = query; } public String getVersion() { return version; } public void setVersion(String version) { this.version = version; } public long getTimeStamp() { return timeStamp; } public void setTimeStamp(long timeStamp) { this.timeStamp = timeStamp; } } public static class GeocodingQuery { //Size of returned resulsts array @SerializedName("size") private int size; @SerializedName("private") private boolean isThisPrivate; @SerializedName("focus.point.lat") private double focusPointLat; @SerializedName("focus.point.lon") private boolean focusPointLng; public int getSize() { return size; } public void setSize(int size) { this.size = size; } public boolean isThisPrivate() { return isThisPrivate; } public void setThisPrivate(boolean thisPrivate) { isThisPrivate = thisPrivate; } public double getFocusPointLat() { return focusPointLat; } public void setFocusPointLat(double focusPointLat) { this.focusPointLat = focusPointLat; } public boolean isFocusPointLng() { return focusPointLng; } public void setFocusPointLng(boolean focusPointLng) { this.focusPointLng = focusPointLng; } } public static class MapzenFeatures { @SerializedName("type") private String type; @SerializedName("geometry") private MapzenGeometry geometry; @SerializedName("properties") private MapzenProperties properties; //2 sets of coords, first set of lat, lng == bottom left. 2nd set lat, lng == top right. //LONGITUDE IS FIRST (lng, lat, lng, lat) @SerializedName("bbox") private double[] bbox; public double[] getBbox() { return bbox; } public void setBbox(double[] bbox) { this.bbox = bbox; } public String getType() { return type; } public void setType(String type) { this.type = type; } public MapzenGeometry getGeometry() { return geometry; } public void setGeometry(MapzenGeometry geometry) { this.geometry = geometry; } public MapzenProperties getProperties() { return properties; } public void setProperties(MapzenProperties properties) { this.properties = properties; } } public static class MapzenGeometry { @SerializedName("type") private String type; //Structured with lat, lng separated by array positions where LONGITUDE IS FIRST, // IE [-117.222, 33.999] @SerializedName("coordinates") private double[] coordinates; public String getType() { return type; } public void setType(String type) { this.type = type; } public double[] getCoordinates() { return coordinates; } public void setCoordinates(double[] coordinates) { this.coordinates = coordinates; } } public static class MapzenProperties { @SerializedName("id") private String id; @SerializedName("gid") private String gid; @SerializedName("layer") private String layer; @SerializedName("source") private String source; @SerializedName("source_id") private String source_id; @SerializedName("name") private String name; @SerializedName("distance") private double distance; @SerializedName("accuracy") private String accuracy; @SerializedName("country") private String country; //Region == state (IE California) @SerializedName("region") private String state; //State abbreviation (IE Calirofnia == CA) @SerializedName("region_a") private String stateAbbreviation; @SerializedName("county") private String county; //locality == city @SerializedName("locality") private String city; //Zip code @SerializedName("postalcode") private String postalcode; @SerializedName("neighbourhood") private String neighbourhood; //String structured like this: "Saint George Parish School, Ontario, CA, USA" @SerializedName("label") private String label; public String getPostalcode() { return postalcode; } public void setPostalcode(String postalcode) { this.postalcode = postalcode; } public String getId() { return id; } public void setId(String id) { this.id = id; } public String getGid() { return gid; } public void setGid(String gid) { this.gid = gid; } public String getLayer() { return layer; } public void setLayer(String layer) { this.layer = layer; } public String getSource() { return source; } public void setSource(String source) { this.source = source; } public String getSource_id() { return source_id; } public void setSource_id(String source_id) { this.source_id = source_id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public double getDistance() { return distance; } public void setDistance(double distance) { this.distance = distance; } public String getAccuracy() { return accuracy; } public void setAccuracy(String accuracy) { this.accuracy = accuracy; } public String getCountry() { return country; } public void setCountry(String country) { this.country = country; } public String getState() { return state; } public void setState(String state) { this.state = state; } public String getStateAbbreviation() { return stateAbbreviation; } public void setStateAbbreviation(String stateAbbreviation) { this.stateAbbreviation = stateAbbreviation; } public String getCounty() { return county; } public void setCounty(String county) { this.county = county; } public String getCity() { return city; } public void setCity(String city) { this.city = city; } public String getNeighbourhood() { return neighbourhood; } public void setNeighbourhood(String neighbourhood) { this.neighbourhood = neighbourhood; } public String getLabel() { return label; } public void setLabel(String label) { this.label = label; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.get; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.IgnoredFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import static java.util.Collections.emptyMap; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; public class GetResult implements Streamable, Iterable<DocumentField>, ToXContentObject { public static final String _INDEX = "_index"; public static final String _TYPE = "_type"; public static final String _ID = "_id"; private static final String _VERSION = "_version"; private static final String FOUND = "found"; private static final String FIELDS = "fields"; private String index; private String type; private String id; private long version; private boolean exists; private Map<String, DocumentField> fields; private Map<String, Object> sourceAsMap; private BytesReference source; private byte[] sourceAsBytes; GetResult() { } public GetResult(String index, String type, String id, long version, boolean exists, BytesReference source, Map<String, DocumentField> fields) { this.index = index; this.type = type; this.id = id; this.version = version; this.exists = exists; this.source = source; this.fields = fields; if (this.fields == null) { this.fields = emptyMap(); } } /** * Does the document exist. */ public boolean isExists() { return exists; } /** * The index the document was fetched from. */ public String getIndex() { return index; } /** * The type of the document. */ public String getType() { return type; } /** * The id of the document. */ public String getId() { return id; } /** * The version of the doc. */ public long getVersion() { return version; } /** * The source of the document if exists. */ public byte[] source() { if (source == null) { return null; } if (sourceAsBytes != null) { return sourceAsBytes; } this.sourceAsBytes = BytesReference.toBytes(sourceRef()); return this.sourceAsBytes; } /** * Returns bytes reference, also un compress the source if needed. */ public BytesReference sourceRef() { if (source == null) { return null; } try { this.source = CompressorFactory.uncompressIfNeeded(this.source); return this.source; } catch (IOException e) { throw new ElasticsearchParseException("failed to decompress source", e); } } /** * Internal source representation, might be compressed.... */ public BytesReference internalSourceRef() { return source; } /** * Is the source empty (not available) or not. */ public boolean isSourceEmpty() { return source == null; } /** * The source of the document (as a string). */ public String sourceAsString() { if (source == null) { return null; } BytesReference source = sourceRef(); try { return XContentHelper.convertToJson(source, false); } catch (IOException e) { throw new ElasticsearchParseException("failed to convert source to a json string"); } } /** * The source of the document (As a map). */ public Map<String, Object> sourceAsMap() throws ElasticsearchParseException { if (source == null) { return null; } if (sourceAsMap != null) { return sourceAsMap; } sourceAsMap = SourceLookup.sourceAsMap(source); return sourceAsMap; } public Map<String, Object> getSource() { return sourceAsMap(); } public Map<String, DocumentField> getFields() { return fields; } public DocumentField field(String name) { return fields.get(name); } @Override public Iterator<DocumentField> iterator() { if (fields == null) { return Collections.emptyIterator(); } return fields.values().iterator(); } public XContentBuilder toXContentEmbedded(XContentBuilder builder, Params params) throws IOException { List<DocumentField> metaFields = new ArrayList<>(); List<DocumentField> otherFields = new ArrayList<>(); if (fields != null && !fields.isEmpty()) { for (DocumentField field : fields.values()) { if (field.getValues().isEmpty()) { continue; } if (field.isMetadataField()) { metaFields.add(field); } else { otherFields.add(field); } } } for (DocumentField field : metaFields) { // TODO: can we avoid having an exception here? if (field.getName().equals(IgnoredFieldMapper.NAME)) { builder.field(field.getName(), field.getValues()); } else { builder.field(field.getName(), field.<Object>getValue()); } } builder.field(FOUND, exists); if (source != null) { XContentHelper.writeRawField(SourceFieldMapper.NAME, source, builder, params); } if (!otherFields.isEmpty()) { builder.startObject(FIELDS); for (DocumentField field : otherFields) { field.toXContent(builder, params); } builder.endObject(); } return builder; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(_INDEX, index); if (params.paramAsBoolean("include_type_name", true)) { builder.field(_TYPE, type); } builder.field(_ID, id); if (isExists()) { if (version != -1) { builder.field(_VERSION, version); } toXContentEmbedded(builder, params); } else { builder.field(FOUND, false); } builder.endObject(); return builder; } public static GetResult fromXContentEmbedded(XContentParser parser) throws IOException { XContentParser.Token token = parser.nextToken(); ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation); return fromXContentEmbedded(parser, null, null, null); } public static GetResult fromXContentEmbedded(XContentParser parser, String index, String type, String id) throws IOException { XContentParser.Token token = parser.currentToken(); ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation); String currentFieldName = parser.currentName(); long version = -1; Boolean found = null; BytesReference source = null; Map<String, DocumentField> fields = new HashMap<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token.isValue()) { if (_INDEX.equals(currentFieldName)) { index = parser.text(); } else if (_TYPE.equals(currentFieldName)) { type = parser.text(); } else if (_ID.equals(currentFieldName)) { id = parser.text(); } else if (_VERSION.equals(currentFieldName)) { version = parser.longValue(); } else if (FOUND.equals(currentFieldName)) { found = parser.booleanValue(); } else { fields.put(currentFieldName, new DocumentField(currentFieldName, Collections.singletonList(parser.objectText()))); } } else if (token == XContentParser.Token.START_OBJECT) { if (SourceFieldMapper.NAME.equals(currentFieldName)) { try (XContentBuilder builder = XContentBuilder.builder(parser.contentType().xContent())) { //the original document gets slightly modified: whitespaces or pretty printing are not preserved, //it all depends on the current builder settings builder.copyCurrentStructure(parser); source = BytesReference.bytes(builder); } } else if (FIELDS.equals(currentFieldName)) { while(parser.nextToken() != XContentParser.Token.END_OBJECT) { DocumentField getField = DocumentField.fromXContent(parser); fields.put(getField.getName(), getField); } } else { parser.skipChildren(); // skip potential inner objects for forward compatibility } } else if (token == XContentParser.Token.START_ARRAY) { if (IgnoredFieldMapper.NAME.equals(currentFieldName)) { fields.put(currentFieldName, new DocumentField(currentFieldName, parser.list())); } else { parser.skipChildren(); // skip potential inner arrays for forward compatibility } } } return new GetResult(index, type, id, version, found, source, fields); } public static GetResult fromXContent(XContentParser parser) throws IOException { XContentParser.Token token = parser.nextToken(); ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation); return fromXContentEmbedded(parser); } public static GetResult readGetResult(StreamInput in) throws IOException { GetResult result = new GetResult(); result.readFrom(in); return result; } @Override public void readFrom(StreamInput in) throws IOException { index = in.readString(); type = in.readOptionalString(); id = in.readString(); version = in.readLong(); exists = in.readBoolean(); if (exists) { source = in.readBytesReference(); if (source.length() == 0) { source = null; } int size = in.readVInt(); if (size == 0) { fields = emptyMap(); } else { fields = new HashMap<>(size); for (int i = 0; i < size; i++) { DocumentField field = DocumentField.readDocumentField(in); fields.put(field.getName(), field); } } } } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(index); out.writeOptionalString(type); out.writeString(id); out.writeLong(version); out.writeBoolean(exists); if (exists) { out.writeBytesReference(source); if (fields == null) { out.writeVInt(0); } else { out.writeVInt(fields.size()); for (DocumentField field : fields.values()) { field.writeTo(out); } } } } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } GetResult getResult = (GetResult) o; return version == getResult.version && exists == getResult.exists && Objects.equals(index, getResult.index) && Objects.equals(type, getResult.type) && Objects.equals(id, getResult.id) && Objects.equals(fields, getResult.fields) && Objects.equals(sourceAsMap(), getResult.sourceAsMap()); } @Override public int hashCode() { return Objects.hash(version, exists, index, type, id, fields, sourceAsMap()); } @Override public String toString() { return Strings.toString(this, true, true); } }
/******************************************************************************* * Copyright (c) 2015 * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. *******************************************************************************/ package jsettlers.graphics.map; import java.util.Iterator; import go.graphics.GLDrawContext; import go.graphics.UIPoint; import jsettlers.common.Color; import jsettlers.common.landscape.ELandscapeType; import jsettlers.common.map.IGraphicsGrid; import jsettlers.common.map.shapes.IMapArea; import jsettlers.common.map.shapes.MapNeighboursArea; import jsettlers.common.map.shapes.MapRectangle; import jsettlers.common.position.FloatRectangle; import jsettlers.common.position.ShortPoint2D; import jsettlers.graphics.map.draw.DrawBuffer; import jsettlers.graphics.map.draw.DrawConstants; import jsettlers.graphics.map.geometry.MapCoordinateConverter; /** * This is the drawing context for a map. It is used to translate the visible screen space to local coordinate space and holds the current gl context. * <p> * This class uses 3 coordiante systems and provides conversion methods between them. * <ul> * <li>The map space. It is the map coordinate system (x, y) * <li>The draw space (used by all draw stuff). * <li>The screen space (visible space on which is drawn). * </ul> * <h2>gl setup</h2> With {@link #begin(GLDrawContext)}, the gl state machine is initialized for drawing the map. The draw coordinates can then be * given in draw space. * <h2>Draw buffer</h2> We hold a draw buffer everyone drawing with the map draw context can use. The buffer should be flushed when drawing one * component finished. When the draw buffer is used after a call to end(), the buffer is invalid. * * @author Michael Zangl */ public final class MapDrawContext implements IGLProvider { private GLDrawContext gl = null; private final IGraphicsGrid map; private final ScreenPosition screen; private final MapCoordinateConverter converter; /** * Those are the colors used for the players. The first color is used for the first player. If there are more players than colors, colors are * re-used. * <p> * Color for settler player, the first 20 colors are copied from an original settler 3 editor screenshot, the other 12 colors are choosen so they * are unique */ private static final Color[] PLAYER_COLORS = new Color[] { // Color 0 .. 19 Original settler colors new Color(0xFFF71000), new Color(0xFF108CF7), new Color(0xFFF7F700), new Color(0xFF29B552), new Color(0xFFF78C00), new Color(0xFF00F7F7), new Color(0xFFF700F7), new Color(0xFF292929), new Color(0xFFF7F7F7), new Color(0xFF0010F7), new Color(0xFFCE4A10), new Color(0xFF8C8C8C), new Color(0xFFAD08DE), new Color(0xFF006B00), new Color(0xFFF7BDBD), new Color(0xFF84EFA5), new Color(0xFF9C0831), new Color(0xFFCE8CE7), new Color(0xFFF7CE94), new Color(0xFF8CBDEF), // Additional 12 Colors new Color(0xFFBAFF45), new Color(0xFFCD0973), new Color(0xFFD9F1AF), new Color(0xFF6E005F), new Color(0xFFA3C503), new Color(0xFF64B3B9), new Color(0xFFB3F6FB), new Color(0xFF8E592B), new Color(0xFF8E882B), new Color(0xFFD9E0FF), new Color(0xFFD4D4D4), new Color(0xFFFF578F) }; public boolean ENABLE_ORIGINAL = true; /** * The basic draw buffer we use. */ private final DrawBuffer buffer; /** * Creates a new map context for a given map. * * @param map * The map. * @param textDrawer * The text drawer to use. */ public MapDrawContext(IGraphicsGrid map) { this.map = map; float incline = DrawConstants.DISTANCE_X / 2.0f / DrawConstants.DISTANCE_Y; int mapHeight = map.getHeight() * DrawConstants.DISTANCE_Y; int mapWidth = map.getWidth() * DrawConstants.DISTANCE_X; this.screen = new ScreenPosition(mapWidth, mapHeight, incline); this.converter = MapCoordinateConverter.get(DrawConstants.DISTANCE_X, DrawConstants.DISTANCE_Y, map.getWidth(), map.getHeight()); buffer = new DrawBuffer(this); } /** * Sets the size of the context to width/height. * * @param windowWidth * The width. * @param windowHeight * The height. */ public void setSize(float windowWidth, float windowHeight) { this.screen.setSize(windowWidth, windowHeight); } /** * Begin a new draw session (=> draw a new image). Sets up the gl screen assuming the current viewport is set to (0,0,width,height) * * @param gl2 * The gl context to use. * @see #end() */ public void begin(GLDrawContext gl2) { this.gl = gl2; // beginTime = System.nanoTime(); gl2.glPushMatrix(); float zoom = screen.getZoom(); gl2.glScalef(zoom, zoom, 1); gl2.glTranslatef((int) -this.screen.getLeft() + .5f, (int) -this.screen.getBottom() + .5f, 0); } /** * Ends a drawing session. */ public void end() { this.gl.glPopMatrix(); this.gl = null; } /** * Gets the current gl context, of <code>null</code> if it is called outside a gl drawing session. * * @return The gl context that was given to {@link #begin(GLDrawContext)} */ @Override public GLDrawContext getGl() { return this.gl; } /** * Gets the current draw buffer used for this context. You can add draws to this buffer instead of directly calling OpenGL since this object * buffers the calls. * * @return The buffer. */ public DrawBuffer getDrawBuffer() { return buffer; } /** * Gets the region of the draw space that is drawn on the screen and therefore rendered. * * @return The region displayed on the screen as Rectangle. */ public ScreenPosition getScreen() { return this.screen; } /** * @param x * The x coordinate in draw space * @param y * The y coordinate in draw space. * @return The map position under the point. */ public ShortPoint2D getPositionUnder(float screenx, float screeny) { ShortPoint2D currentPoint = converter.getMap(screenx, screeny); UIPoint desiredOnScreen = new UIPoint(screenx, screeny); UIPoint onscreen = converter.getView(currentPoint.x, currentPoint.y, getHeight(currentPoint.x, currentPoint.y)); double currentbest = onscreen.distance(desiredOnScreen); boolean couldBeImproved; do { couldBeImproved = false; for (ShortPoint2D p : new MapNeighboursArea(currentPoint)) { onscreen = converter.getView(p.x, p.y, getHeight(p.x, p.y)); double newDistance = onscreen.distance(desiredOnScreen); if (newDistance < currentbest) { currentbest = newDistance; currentPoint = p; couldBeImproved = true; } } } while (couldBeImproved); return currentPoint; } /** * @param x * The x coordinate in screen space * @param y * The y coordinate in screen space. * @return The map position under the screen point. */ public ShortPoint2D getPositionOnScreen(float x, float y) { return getPositionUnder( x / this.screen.getZoom() + this.screen.getLeft(), y / this.screen.getZoom() + this.screen.getBottom()); } /** * Checks two map coordiantes if they are on the map. * * @param x * The y coordinate in map space. * @param y * The x coordinate in map space. * @return If the map coordinates are on the map. */ public boolean checkMapCoordinates(int x, int y) { return x >= 0 && x < this.map.getWidth() && y >= 0 && y < this.map.getHeight(); } /** * Gets the color for a given player. * * @param player * The player to get the color for. * @return The color. */ public Color getPlayerColor(byte player) { if (player >= 0) { return PLAYER_COLORS[player % PLAYER_COLORS.length]; } else { return Color.BLACK; } } /** * Gets the converter for the map coordinate system to screen coordinates. * * @return The map coordinate converter. */ public MapCoordinateConverter getConverter() { return this.converter; } /** * sets up the gl drawing context to draw a given tile. * * @param x * The tile to draw. * @param y * The tile to draw. */ public void beginTileContext(int x, int y) { this.gl.glPushMatrix(); int height = getHeight(x, y); this.gl.glTranslatef(this.converter.getViewX(x, y, height), this.converter.getViewY(x, y, height), 0); } /** * Assumes that the user begun drawing a tile recently, and ends drawing the tile. This also resets the view matrix to the one before starting to * draw. */ public void endTileContext() { this.gl.glPopMatrix(); } /** * Sets up drawing between two tiles. This is e.g. used to draw walking settlers. * * @param startx * The start tile * @param starty * The start tile * @param destinationx * The second tile * @param destinationy * The second tile * @param progress * The progress between those two bytes. */ public void beginBetweenTileContext(int startx, int starty, int destinationx, int destinationy, float progress) { this.gl.glPushMatrix(); float theight = getHeight(startx, starty); float dheight = getHeight(destinationx, destinationy); float x = (1 - progress) * this.converter.getViewX(startx, starty, theight) + progress * this.converter.getViewX(destinationx, destinationy, dheight); float y = (1 - progress) * this.converter.getViewY(startx, starty, theight) + progress * this.converter.getViewY(destinationx, destinationy, dheight); this.gl.glTranslatef(x, y, 0); } /** * Converts a screen rectangle to an area on the map. Map heights are respected. * * @param x1 * one x (not ordered) * @param y1 * one y * @param x2 * an other x * @param y2 * an other y * @return The rectangle on the map */ public IMapArea getRectangleOnScreen(int x1, int y1, int x2, int y2) { float drawx1 = x1 / this.screen.getZoom() + this.screen.getLeft(); float drawx2 = x2 / this.screen.getZoom() + this.screen.getLeft(); float drawy1 = y1 / this.screen.getZoom() + this.screen.getBottom(); float drawy2 = y2 / this.screen.getZoom() + this.screen.getBottom(); return new HeightedMapRectangle(new FloatRectangle(drawx1, drawy1, drawx2, drawy2)); } /** * This class represents an area of the map that looks rectangular on the screen. Due to height differences, this is not a rectangle on the map. * * @author Michael Zangl * */ private class HeightedMapRectangle implements IMapArea { /** * Note: This class is nor serializeable. */ private static final long serialVersionUID = 5868822981883722458L; /** * Helper rectangle. */ private final MapRectangle base; private final FloatRectangle drawRect; /** * Creates a new IMapArea that contains the points that are in the rectangle on the screen. * * @param drawRect * The rectangle in draw space */ HeightedMapRectangle(FloatRectangle drawRect) { this.drawRect = drawRect; base = converter.getMapForScreen(drawRect); } @Override public boolean contains(ShortPoint2D point) { int height = getHeight(point.x, point.y); float x = converter.getViewX(point.x, point.y, height); float y = converter.getViewY(point.x, point.y, height); return drawRect.contains(x, y); } @Override public Iterator<ShortPoint2D> iterator() { return new ScreenIterator(); } /** * This class iterates over a {@link HeightedMapRectangle}. * * @author Michael Zangl * */ private final class ScreenIterator implements Iterator<ShortPoint2D> { /** * How many lines to search at least. */ private static final int MIN_SEARCH_LINES = 20; private ShortPoint2D next; private int currentLine = 0; private int currentX; private ScreenIterator() { currentX = base.getLineStartX(0); next = new ShortPoint2D(currentX, base.getLineY(0)); if (!contains(next)) { next = searchNext(); } } private ShortPoint2D searchNext() { int startLine = currentLine; while (startLine >= currentLine - 2 || currentLine < MIN_SEARCH_LINES) { currentX++; if (currentX > base.getLineEndX(currentLine)) { currentLine++; currentX = base.getLineStartX(currentLine); } ShortPoint2D point = new ShortPoint2D(currentX, base.getLineY(currentLine)); if (contains(point)) { return point; } } return null; } @Override public boolean hasNext() { return next != null; } @Override public ShortPoint2D next() { ShortPoint2D ret = next; next = searchNext(); return ret; } @Override public void remove() { throw new UnsupportedOperationException(); } } } /** * Gets the area of the screen. * * @return An rectangle of the map in which the screen lies completely. The rectangle can be bigger than the screen. */ public MapRectangle getScreenArea() { return this.converter.getMapForScreen(this.screen.getPosition()); } /** * Move the view center to a given point. * * @param point * The point to move the view to. */ public void scrollTo(ShortPoint2D point) { int height = getHeight(point.x, point.y); float x = converter.getViewX(point.x, point.y, height); float y = converter.getViewY(point.x, point.y, height); screen.setScreenCenter(x, y); } /** * Gets the landscape at a given position. * * @param x * x * @param y * y * @return The landscape type. */ public ELandscapeType getLandscape(int x, int y) { return map.getLandscapeTypeAt(x, y); } public int getHeight(int x, int y) { if (x >= 0 && x < map.getWidth() && y >= 0 && y < map.getHeight()) { return map.getHeightAt(x, y); } else { return 0; } } public byte getVisibleStatus(int x, int y) { return map.getVisibleStatus(x, y); } public IGraphicsGrid getMap() { return map; } }
/* * Copyright 2010-2013 Ning, Inc. * * Ning licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.killbill.billing.payment.dao; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.UUID; import org.joda.time.DateTime; import org.killbill.billing.callcontext.InternalCallContext; import org.killbill.billing.catalog.api.Currency; import org.killbill.billing.payment.PaymentTestSuiteWithEmbeddedDB; import org.killbill.billing.payment.api.PluginProperty; import org.killbill.billing.payment.api.TransactionStatus; import org.killbill.billing.payment.api.TransactionType; import org.killbill.billing.payment.dao.PluginPropertySerializer.PluginPropertySerializerException; import org.killbill.billing.util.callcontext.CallOrigin; import org.killbill.billing.util.callcontext.InternalCallContextFactory; import org.killbill.billing.util.callcontext.UserType; import org.killbill.billing.util.entity.Pagination; import org.testng.Assert; import org.testng.annotations.Test; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertNull; public class TestPaymentDao extends PaymentTestSuiteWithEmbeddedDB { @Test(groups = "slow") public void testPaymentAttempt() throws PluginPropertySerializerException { final UUID transactionId = UUID.randomUUID(); final String paymentExternalKey = "vraiment?"; final String transactionExternalKey = "tduteuqweq"; final String stateName = "INIT"; final TransactionType transactionType = TransactionType.AUTHORIZE; final String pluginName = "superPlugin"; final UUID accountId = UUID.randomUUID(); final List<PluginProperty> properties = new ArrayList<PluginProperty>(); properties.add(new PluginProperty("key1", "value1", false)); properties.add(new PluginProperty("key2", "value2", false)); final byte[] serialized = PluginPropertySerializer.serialize(properties); final PaymentAttemptModelDao attempt = new PaymentAttemptModelDao(UUID.randomUUID(), UUID.randomUUID(), clock.getUTCNow(), clock.getUTCNow(), paymentExternalKey, transactionId, transactionExternalKey, transactionType, stateName, BigDecimal.ZERO, Currency.ALL, ImmutableList.<String>of(pluginName), serialized); PaymentAttemptModelDao savedAttempt = paymentDao.insertPaymentAttemptWithProperties(attempt, internalCallContext); assertEquals(savedAttempt.getTransactionExternalKey(), transactionExternalKey); assertEquals(savedAttempt.getTransactionType(), transactionType); assertEquals(savedAttempt.getStateName(), stateName); assertEquals(savedAttempt.getPluginName(), pluginName); final Iterable<PluginProperty> deserialized = PluginPropertySerializer.deserialize(savedAttempt.getPluginProperties()); int i = 0; for (PluginProperty cur : deserialized) { Assert.assertEquals(cur, properties.get(i++)); } final PaymentAttemptModelDao retrievedAttempt1 = paymentDao.getPaymentAttempt(attempt.getId(), internalCallContext); assertEquals(retrievedAttempt1.getTransactionExternalKey(), transactionExternalKey); assertEquals(retrievedAttempt1.getTransactionType(), transactionType); assertEquals(retrievedAttempt1.getStateName(), stateName); assertEquals(retrievedAttempt1.getPluginName(), pluginName); final List<PaymentAttemptModelDao> retrievedAttempts = paymentDao.getPaymentAttemptByTransactionExternalKey(transactionExternalKey, internalCallContext); assertEquals(retrievedAttempts.size(), 1); assertEquals(retrievedAttempts.get(0).getTransactionExternalKey(), transactionExternalKey); assertEquals(retrievedAttempts.get(0).getTransactionType(), transactionType); assertEquals(retrievedAttempts.get(0).getStateName(), stateName); assertEquals(retrievedAttempts.get(0).getPluginName(), pluginName); } @Test(groups = "slow") public void testPaymentAndTransactions() { final UUID paymentMethodId = UUID.randomUUID(); final UUID accountId = UUID.randomUUID(); final String externalKey = "hhhhooo"; final String transactionExternalKey = "grrrrrr"; final String transactionExternalKey2 = "hahahaha"; final DateTime utcNow = clock.getUTCNow(); final PaymentModelDao paymentModelDao = new PaymentModelDao(utcNow, utcNow, accountId, paymentMethodId, externalKey); final PaymentTransactionModelDao transactionModelDao = new PaymentTransactionModelDao(utcNow, utcNow, null, transactionExternalKey, paymentModelDao.getId(), TransactionType.AUTHORIZE, utcNow, TransactionStatus.SUCCESS, BigDecimal.TEN, Currency.AED, "success", ""); final PaymentModelDao savedPayment = paymentDao.insertPaymentWithFirstTransaction(paymentModelDao, transactionModelDao, internalCallContext); assertEquals(savedPayment.getId(), paymentModelDao.getId()); assertEquals(savedPayment.getAccountId(), paymentModelDao.getAccountId()); assertEquals(savedPayment.getExternalKey(), paymentModelDao.getExternalKey()); assertEquals(savedPayment.getPaymentMethodId(), paymentModelDao.getPaymentMethodId()); assertNull(savedPayment.getStateName()); final PaymentModelDao savedPayment2 = paymentDao.getPayment(savedPayment.getId(), internalCallContext); assertEquals(savedPayment2.getId(), paymentModelDao.getId()); assertEquals(savedPayment2.getAccountId(), paymentModelDao.getAccountId()); assertEquals(savedPayment2.getExternalKey(), paymentModelDao.getExternalKey()); assertEquals(savedPayment2.getPaymentMethodId(), paymentModelDao.getPaymentMethodId()); assertNull(savedPayment2.getStateName()); final PaymentModelDao savedPayment3 = paymentDao.getPaymentByExternalKey(externalKey, internalCallContext); assertEquals(savedPayment3.getId(), paymentModelDao.getId()); assertEquals(savedPayment3.getAccountId(), paymentModelDao.getAccountId()); assertEquals(savedPayment3.getExternalKey(), paymentModelDao.getExternalKey()); assertEquals(savedPayment3.getPaymentMethodId(), paymentModelDao.getPaymentMethodId()); assertNull(savedPayment3.getStateName()); final PaymentTransactionModelDao savedTransaction = paymentDao.getPaymentTransaction(transactionModelDao.getId(), internalCallContext); assertEquals(savedTransaction.getTransactionExternalKey(), transactionExternalKey); assertEquals(savedTransaction.getPaymentId(), paymentModelDao.getId()); assertEquals(savedTransaction.getTransactionType(), TransactionType.AUTHORIZE); assertEquals(savedTransaction.getTransactionStatus(), TransactionStatus.SUCCESS); assertEquals(savedTransaction.getAmount().compareTo(BigDecimal.TEN), 0); assertEquals(savedTransaction.getCurrency(), Currency.AED); final List<PaymentTransactionModelDao> savedTransactions = paymentDao.getPaymentTransactionsByExternalKey(transactionExternalKey, internalCallContext); assertEquals(savedTransactions.size(), 1); final PaymentTransactionModelDao savedTransaction2 = savedTransactions.get(0); assertEquals(savedTransaction2.getTransactionExternalKey(), transactionExternalKey); assertEquals(savedTransaction2.getPaymentId(), paymentModelDao.getId()); assertEquals(savedTransaction2.getTransactionType(), TransactionType.AUTHORIZE); assertEquals(savedTransaction2.getTransactionStatus(), TransactionStatus.SUCCESS); assertEquals(savedTransaction2.getAmount().compareTo(BigDecimal.TEN), 0); assertEquals(savedTransaction2.getCurrency(), Currency.AED); final PaymentTransactionModelDao transactionModelDao2 = new PaymentTransactionModelDao(utcNow, utcNow, null, transactionExternalKey2, paymentModelDao.getId(), TransactionType.AUTHORIZE, utcNow, TransactionStatus.UNKNOWN, BigDecimal.TEN, Currency.AED, "success", ""); final PaymentTransactionModelDao savedTransactionModelDao2 = paymentDao.updatePaymentWithNewTransaction(savedPayment.getId(), transactionModelDao2, internalCallContext); assertEquals(savedTransactionModelDao2.getTransactionExternalKey(), transactionExternalKey2); assertEquals(savedTransactionModelDao2.getPaymentId(), paymentModelDao.getId()); assertEquals(savedTransactionModelDao2.getTransactionType(), TransactionType.AUTHORIZE); assertEquals(savedTransactionModelDao2.getTransactionStatus(), TransactionStatus.UNKNOWN); assertEquals(savedTransactionModelDao2.getAmount().compareTo(BigDecimal.TEN), 0); assertEquals(savedTransactionModelDao2.getCurrency(), Currency.AED); final List<PaymentTransactionModelDao> transactions = paymentDao.getTransactionsForPayment(savedPayment.getId(), internalCallContext); assertEquals(transactions.size(), 2); paymentDao.updatePaymentAndTransactionOnCompletion(accountId, savedPayment.getId(), savedTransactionModelDao2.getTransactionType(), "AUTH_ABORTED", "AUTH_SUCCESS", transactionModelDao2.getId(), TransactionStatus.SUCCESS, BigDecimal.ONE, Currency.USD, null, "nothing", internalCallContext); final PaymentModelDao savedPayment4 = paymentDao.getPayment(savedPayment.getId(), internalCallContext); assertEquals(savedPayment4.getId(), paymentModelDao.getId()); assertEquals(savedPayment4.getAccountId(), paymentModelDao.getAccountId()); assertEquals(savedPayment4.getExternalKey(), paymentModelDao.getExternalKey()); assertEquals(savedPayment4.getPaymentMethodId(), paymentModelDao.getPaymentMethodId()); assertEquals(savedPayment4.getStateName(), "AUTH_ABORTED"); assertEquals(savedPayment4.getLastSuccessStateName(), "AUTH_SUCCESS"); final PaymentTransactionModelDao savedTransactionModelDao4 = paymentDao.getPaymentTransaction(savedTransactionModelDao2.getId(), internalCallContext); assertEquals(savedTransactionModelDao4.getTransactionExternalKey(), transactionExternalKey2); assertEquals(savedTransactionModelDao4.getPaymentId(), paymentModelDao.getId()); assertEquals(savedTransactionModelDao4.getTransactionType(), TransactionType.AUTHORIZE); assertEquals(savedTransactionModelDao4.getTransactionStatus(), TransactionStatus.SUCCESS); assertEquals(savedTransactionModelDao4.getAmount().compareTo(BigDecimal.TEN), 0); assertEquals(savedTransactionModelDao4.getCurrency(), Currency.AED); assertEquals(savedTransactionModelDao4.getProcessedAmount().compareTo(BigDecimal.ONE), 0); assertEquals(savedTransactionModelDao4.getProcessedCurrency(), Currency.USD); assertNull(savedTransactionModelDao4.getGatewayErrorCode()); assertEquals(savedTransactionModelDao4.getGatewayErrorMsg(), "nothing"); paymentDao.updatePaymentAndTransactionOnCompletion(accountId, savedPayment.getId(), savedTransactionModelDao2.getTransactionType(), "AUTH_ABORTED", null, transactionModelDao2.getId(), TransactionStatus.SUCCESS, BigDecimal.ONE, Currency.USD, null, "nothing", internalCallContext); final PaymentModelDao savedPayment4Again = paymentDao.getPayment(savedPayment.getId(), internalCallContext); assertEquals(savedPayment4Again.getId(), paymentModelDao.getId()); assertEquals(savedPayment4Again.getStateName(), "AUTH_ABORTED"); assertEquals(savedPayment4Again.getLastSuccessStateName(), "AUTH_SUCCESS"); paymentDao.updatePaymentAndTransactionOnCompletion(accountId, savedPayment.getId(), savedTransactionModelDao2.getTransactionType(), "AUTH_ABORTED", "AUTH_SUCCESS", transactionModelDao2.getId(), TransactionStatus.SUCCESS, BigDecimal.ONE, Currency.USD, null, "nothing", internalCallContext); final PaymentModelDao savedPayment4Final = paymentDao.getPayment(savedPayment.getId(), internalCallContext); assertEquals(savedPayment4Final.getId(), paymentModelDao.getId()); assertEquals(savedPayment4Final.getStateName(), "AUTH_ABORTED"); assertEquals(savedPayment4Final.getLastSuccessStateName(), "AUTH_SUCCESS"); final List<PaymentModelDao> payments = paymentDao.getPaymentsForAccount(accountId, internalCallContext); assertEquals(payments.size(), 1); final List<PaymentTransactionModelDao> transactions2 = paymentDao.getTransactionsForAccount(accountId, internalCallContext); assertEquals(transactions2.size(), 2); } @Test(groups = "slow") public void testPaymentMethod() { final UUID paymentMethodId = UUID.randomUUID(); final UUID accountId = UUID.randomUUID(); final String pluginName = "nobody"; final Boolean isActive = Boolean.TRUE; final PaymentMethodModelDao method = new PaymentMethodModelDao(paymentMethodId, UUID.randomUUID().toString(), null, null, accountId, pluginName, isActive); PaymentMethodModelDao savedMethod = paymentDao.insertPaymentMethod(method, internalCallContext); assertEquals(savedMethod.getId(), paymentMethodId); assertEquals(savedMethod.getAccountId(), accountId); assertEquals(savedMethod.getPluginName(), pluginName); assertEquals(savedMethod.isActive(), isActive); final List<PaymentMethodModelDao> result = paymentDao.getPaymentMethods(accountId, internalCallContext); assertEquals(result.size(), 1); savedMethod = result.get(0); assertEquals(savedMethod.getId(), paymentMethodId); assertEquals(savedMethod.getAccountId(), accountId); assertEquals(savedMethod.getPluginName(), pluginName); assertEquals(savedMethod.isActive(), isActive); paymentDao.deletedPaymentMethod(paymentMethodId, internalCallContext); PaymentMethodModelDao deletedPaymentMethod = paymentDao.getPaymentMethod(paymentMethodId, internalCallContext); assertNull(deletedPaymentMethod); deletedPaymentMethod = paymentDao.getPaymentMethodIncludedDeleted(paymentMethodId, internalCallContext); assertNotNull(deletedPaymentMethod); assertFalse(deletedPaymentMethod.isActive()); assertEquals(deletedPaymentMethod.getAccountId(), accountId); assertEquals(deletedPaymentMethod.getId(), paymentMethodId); assertEquals(deletedPaymentMethod.getPluginName(), pluginName); } @Test(groups = "slow") public void testPendingTransactions() { final UUID paymentMethodId = UUID.randomUUID(); final UUID accountId = UUID.randomUUID(); final String externalKey = "hhhhooo"; final String transactionExternalKey1 = "transaction1"; final String transactionExternalKey2 = "transaction2"; final String transactionExternalKey3 = "transaction3"; final String transactionExternalKey4 = "transaction4"; final DateTime initialTime = clock.getUTCNow().minusMinutes(1); final PaymentModelDao paymentModelDao = new PaymentModelDao(initialTime, initialTime, accountId, paymentMethodId, externalKey); final PaymentTransactionModelDao transaction1 = new PaymentTransactionModelDao(initialTime, initialTime, null, transactionExternalKey1, paymentModelDao.getId(), TransactionType.AUTHORIZE, initialTime, TransactionStatus.PENDING, BigDecimal.TEN, Currency.AED, "pending", ""); final PaymentModelDao payment = paymentDao.insertPaymentWithFirstTransaction(paymentModelDao, transaction1, internalCallContext); final PaymentTransactionModelDao transaction2 = new PaymentTransactionModelDao(initialTime, initialTime, null, transactionExternalKey2, paymentModelDao.getId(), TransactionType.AUTHORIZE, initialTime, TransactionStatus.PENDING, BigDecimal.TEN, Currency.AED, "pending", ""); paymentDao.updatePaymentWithNewTransaction(paymentModelDao.getId(), transaction2, internalCallContext); final PaymentTransactionModelDao transaction3 = new PaymentTransactionModelDao(initialTime, initialTime, null, transactionExternalKey3, paymentModelDao.getId(), TransactionType.AUTHORIZE, initialTime, TransactionStatus.SUCCESS, BigDecimal.TEN, Currency.AED, "success", ""); paymentDao.updatePaymentWithNewTransaction(paymentModelDao.getId(), transaction3, internalCallContext); clock.addDays(1); final DateTime newTime = clock.getUTCNow(); final InternalCallContext internalCallContextWithNewTime = new InternalCallContext(InternalCallContextFactory.INTERNAL_TENANT_RECORD_ID, 1687L, UUID.randomUUID(), UUID.randomUUID().toString(), CallOrigin.TEST, UserType.TEST, "Testing", "This is a test", newTime, newTime); final PaymentTransactionModelDao transaction4 = new PaymentTransactionModelDao(initialTime, initialTime, null, transactionExternalKey4, paymentModelDao.getId(), TransactionType.AUTHORIZE, newTime, TransactionStatus.PENDING, BigDecimal.TEN, Currency.AED, "pending", ""); paymentDao.updatePaymentWithNewTransaction(paymentModelDao.getId(), transaction4, internalCallContextWithNewTime); final List<PaymentTransactionModelDao> result = getPendingTransactions(paymentModelDao.getId()); Assert.assertEquals(result.size(), 3); final Iterable<PaymentTransactionModelDao> transactions1 = paymentDao.getByTransactionStatusAcrossTenants(ImmutableList.of(TransactionStatus.PENDING), newTime, initialTime, 0L, 3L); for (PaymentTransactionModelDao paymentTransaction : transactions1) { final String newPaymentState = "XXX_FAILED"; paymentDao.updatePaymentAndTransactionOnCompletion(payment.getAccountId(), payment.getId(), paymentTransaction.getTransactionType(), newPaymentState, payment.getLastSuccessStateName(), paymentTransaction.getId(), TransactionStatus.PAYMENT_FAILURE, paymentTransaction.getProcessedAmount(), paymentTransaction.getProcessedCurrency(), paymentTransaction.getGatewayErrorCode(), paymentTransaction.getGatewayErrorMsg(), internalCallContext); } final List<PaymentTransactionModelDao> result2 = getPendingTransactions(paymentModelDao.getId()); Assert.assertEquals(result2.size(), 1); // Just to guarantee that next clock.getUTCNow() > newTime try { Thread.sleep(1000); } catch (InterruptedException e) { } ; final Iterable<PaymentTransactionModelDao> transactions2 = paymentDao.getByTransactionStatusAcrossTenants(ImmutableList.of(TransactionStatus.PENDING), clock.getUTCNow(), initialTime, 0L, 1L); for (PaymentTransactionModelDao paymentTransaction : transactions2) { final String newPaymentState = "XXX_FAILED"; paymentDao.updatePaymentAndTransactionOnCompletion(payment.getAccountId(), payment.getId(), paymentTransaction.getTransactionType(), newPaymentState, payment.getLastSuccessStateName(), paymentTransaction.getId(), TransactionStatus.PAYMENT_FAILURE, paymentTransaction.getProcessedAmount(), paymentTransaction.getProcessedCurrency(), paymentTransaction.getGatewayErrorCode(), paymentTransaction.getGatewayErrorMsg(), internalCallContext); } final List<PaymentTransactionModelDao> result3 = getPendingTransactions(paymentModelDao.getId()); Assert.assertEquals(result3.size(), 0); } @Test(groups = "slow") public void testPaymentByStatesAcrossTenants() { final UUID paymentMethodId = UUID.randomUUID(); final UUID accountId = UUID.randomUUID(); final String externalKey1 = "XXhhhhooo1"; final String transactionExternalKey1 = "transactionXX1"; final String externalKey2 = "XXhhhhooo2"; final String transactionExternalKey2 = "transactionXX2"; final String externalKey3 = "XXhhhhooo3"; final String transactionExternalKey3 = "transactionXX3"; final String externalKey4 = "XXhhhhooo4"; final String transactionExternalKey4 = "transactionXX4"; final String externalKey5 = "XXhhhhooo5"; final String transactionExternalKey5 = "transactionXX5"; final DateTime createdAfterDate = clock.getUTCNow().minusDays(10); final DateTime createdBeforeDate = clock.getUTCNow().minusDays(1); // Right before createdAfterDate, so should not be returned final DateTime createdDate1 = createdAfterDate.minusHours(1); final PaymentModelDao paymentModelDao1 = new PaymentModelDao(createdDate1, createdDate1, accountId, paymentMethodId, externalKey1); paymentModelDao1.setStateName("AUTH_ERRORED"); final PaymentTransactionModelDao transaction1 = new PaymentTransactionModelDao(createdDate1, createdDate1, null, transactionExternalKey1, paymentModelDao1.getId(), TransactionType.AUTHORIZE, createdDate1, TransactionStatus.UNKNOWN, BigDecimal.TEN, Currency.AED, "unknown", ""); final InternalCallContext context1 = new InternalCallContext(1L, 1L, internalCallContext.getUserToken(), internalCallContext.getCreatedBy(), internalCallContext.getCallOrigin(), internalCallContext.getContextUserType(), internalCallContext.getReasonCode(), internalCallContext.getComments(), createdDate1, createdDate1); paymentDao.insertPaymentWithFirstTransaction(paymentModelDao1, transaction1, context1); // Right after createdAfterDate, so it should be returned final DateTime createdDate2 = createdAfterDate.plusHours(1); final PaymentModelDao paymentModelDao2 = new PaymentModelDao(createdDate2, createdDate2, accountId, paymentMethodId, externalKey2); paymentModelDao2.setStateName("CAPTURE_ERRORED"); final PaymentTransactionModelDao transaction2 = new PaymentTransactionModelDao(createdDate2, createdDate2, null, transactionExternalKey2, paymentModelDao2.getId(), TransactionType.AUTHORIZE, createdDate2, TransactionStatus.UNKNOWN, BigDecimal.TEN, Currency.AED, "unknown", ""); final InternalCallContext context2 = new InternalCallContext(2L, 2L, internalCallContext.getUserToken(), internalCallContext.getCreatedBy(), internalCallContext.getCallOrigin(), internalCallContext.getContextUserType(), internalCallContext.getReasonCode(), internalCallContext.getComments(), createdDate2, createdDate2); paymentDao.insertPaymentWithFirstTransaction(paymentModelDao2, transaction2, context2); // Right before createdBeforeDate, so it should be returned final DateTime createdDate3 = createdBeforeDate.minusDays(1); final PaymentModelDao paymentModelDao3 = new PaymentModelDao(createdDate3, createdDate3, accountId, paymentMethodId, externalKey3); paymentModelDao3.setStateName("CAPTURE_ERRORED"); final PaymentTransactionModelDao transaction3 = new PaymentTransactionModelDao(createdDate3, createdDate3, null, transactionExternalKey3, paymentModelDao3.getId(), TransactionType.AUTHORIZE, createdDate3, TransactionStatus.UNKNOWN, BigDecimal.TEN, Currency.AED, "unknown", ""); final InternalCallContext context3 = new InternalCallContext(3L, 3L, internalCallContext.getUserToken(), internalCallContext.getCreatedBy(), internalCallContext.getCallOrigin(), internalCallContext.getContextUserType(), internalCallContext.getReasonCode(), internalCallContext.getComments(), createdDate3, createdDate3); paymentDao.insertPaymentWithFirstTransaction(paymentModelDao3, transaction3, context3); // Right before createdBeforeDate but with a SUCCESS state so it should NOT be returned final DateTime createdDate4 = createdBeforeDate.minusDays(1); final PaymentModelDao paymentModelDao4 = new PaymentModelDao(createdDate4, createdDate4, accountId, paymentMethodId, externalKey4); paymentModelDao4.setStateName("CAPTURE_SUCCESS"); final PaymentTransactionModelDao transaction4 = new PaymentTransactionModelDao(createdDate4, createdDate4, null, transactionExternalKey4, paymentModelDao4.getId(), TransactionType.AUTHORIZE, createdDate4, TransactionStatus.UNKNOWN, BigDecimal.TEN, Currency.AED, "unknown", ""); final InternalCallContext context4 = new InternalCallContext(4L, 4L, internalCallContext.getUserToken(), internalCallContext.getCreatedBy(), internalCallContext.getCallOrigin(), internalCallContext.getContextUserType(), internalCallContext.getReasonCode(), internalCallContext.getComments(), createdDate4, createdDate4); paymentDao.insertPaymentWithFirstTransaction(paymentModelDao4, transaction4, context4); // Right after createdBeforeDate, so it should NOT be returned final DateTime createdDate5 = createdBeforeDate.plusDays(1); final PaymentModelDao paymentModelDao5 = new PaymentModelDao(createdDate5, createdDate5, accountId, paymentMethodId, externalKey5); paymentModelDao5.setStateName("CAPTURE_ERRORED"); final PaymentTransactionModelDao transaction5 = new PaymentTransactionModelDao(createdDate5, createdDate5, null, transactionExternalKey5, paymentModelDao5.getId(), TransactionType.AUTHORIZE, createdDate5, TransactionStatus.UNKNOWN, BigDecimal.TEN, Currency.AED, "unknown", ""); final InternalCallContext context5 = new InternalCallContext(5L, 5L, internalCallContext.getUserToken(), internalCallContext.getCreatedBy(), internalCallContext.getCallOrigin(), internalCallContext.getContextUserType(), internalCallContext.getReasonCode(), internalCallContext.getComments(), createdDate5, createdDate5); paymentDao.insertPaymentWithFirstTransaction(paymentModelDao5, transaction5, context5); final String[] errorStates = {"AUTH_ERRORED", "CAPTURE_ERRORED", "REFUND_ERRORED", "CREDIT_ERRORED"}; final List<PaymentModelDao> result = paymentDao.getPaymentsByStatesAcrossTenants(errorStates, createdBeforeDate, createdAfterDate, 10); assertEquals(result.size(), 2); } @Test(groups = "slow") public void testPaginationForPaymentByStatesAcrossTenants() { // Right before createdAfterDate, so should not be returned final DateTime createdDate1 = clock.getUTCNow().minusHours(1); final int NB_ENTRIES = 30; for (int i = 0; i < NB_ENTRIES; i++) { final PaymentModelDao paymentModelDao1 = new PaymentModelDao(createdDate1, createdDate1, UUID.randomUUID(), UUID.randomUUID(), UUID.randomUUID().toString()); final PaymentTransactionModelDao transaction1 = new PaymentTransactionModelDao(createdDate1, createdDate1, null, UUID.randomUUID().toString(), paymentModelDao1.getId(), TransactionType.AUTHORIZE, createdDate1, TransactionStatus.UNKNOWN, BigDecimal.TEN, Currency.AED, "unknown", ""); final InternalCallContext context1 = new InternalCallContext(1L, 1L, internalCallContext.getUserToken(), internalCallContext.getCreatedBy(), internalCallContext.getCallOrigin(), internalCallContext.getContextUserType(), internalCallContext.getReasonCode(), internalCallContext.getComments(), createdDate1, createdDate1); paymentDao.insertPaymentWithFirstTransaction(paymentModelDao1, transaction1, context1); } final Pagination<PaymentTransactionModelDao> result = paymentDao.getByTransactionStatusAcrossTenants(ImmutableList.of(TransactionStatus.UNKNOWN), clock.getUTCNow(), createdDate1, 0L, new Long(NB_ENTRIES)); Assert.assertEquals(result.getTotalNbRecords(), new Long(NB_ENTRIES)); final Iterator<PaymentTransactionModelDao> iterator = result.iterator(); for (int i = 0; i < NB_ENTRIES; i++) { System.out.println("i = " + i); Assert.assertTrue(iterator.hasNext()); final PaymentTransactionModelDao nextEntry = iterator.next(); Assert.assertEquals(nextEntry.getTransactionStatus(), TransactionStatus.UNKNOWN); } } @Test(groups = "slow") public void testPaymentAttemptsByStateAcrossTenants() { final UUID paymentMethodId = UUID.randomUUID(); final UUID accountId = UUID.randomUUID(); final String externalKey1 = "gfhfg"; final String transactionExternalKey1 = "sadas"; final String externalKey2 = "asdwqeqw"; final String transactionExternalKey2 = "fghfg"; final DateTime createdAfterDate = clock.getUTCNow().minusDays(10); final DateTime createdBeforeDate = clock.getUTCNow().minusDays(1); final String stateName = "FOO"; final String pluginName = "miraculous"; final PaymentAttemptModelDao attempt1 = new PaymentAttemptModelDao(accountId, paymentMethodId, createdAfterDate, createdAfterDate, externalKey1, UUID.randomUUID(), transactionExternalKey1, TransactionType.AUTHORIZE, stateName, BigDecimal.ONE, Currency.USD, ImmutableList.<String>of(pluginName), null); final PaymentAttemptModelDao attempt2 = new PaymentAttemptModelDao(accountId, paymentMethodId, createdAfterDate, createdAfterDate, externalKey2, UUID.randomUUID(), transactionExternalKey2, TransactionType.AUTHORIZE, stateName, BigDecimal.ONE, Currency.USD, ImmutableList.<String>of(pluginName), null); final InternalCallContext context1 = new InternalCallContext(1L, 1L, internalCallContext.getUserToken(), internalCallContext.getCreatedBy(), internalCallContext.getCallOrigin(), internalCallContext.getContextUserType(), internalCallContext.getReasonCode(), internalCallContext.getComments(), createdAfterDate, createdAfterDate); paymentDao.insertPaymentAttemptWithProperties(attempt1, context1); final InternalCallContext context2 = new InternalCallContext(2L, 2L, internalCallContext.getUserToken(), internalCallContext.getCreatedBy(), internalCallContext.getCallOrigin(), internalCallContext.getContextUserType(), internalCallContext.getReasonCode(), internalCallContext.getComments(), createdAfterDate, createdAfterDate); paymentDao.insertPaymentAttemptWithProperties(attempt2, context2); final Pagination<PaymentAttemptModelDao> result = paymentDao.getPaymentAttemptsByStateAcrossTenants(stateName, createdBeforeDate, 0L, 2L); Assert.assertEquals(result.getTotalNbRecords().longValue(), 2L); } @Test(groups = "slow") public void testUpdatePaymentAttempt() throws PluginPropertySerializerException { final UUID paymentMethodId = UUID.randomUUID(); final UUID accountId = UUID.randomUUID(); final String externalKey1 = "2354"; final String transactionExternalKey1 = "jkjkjk"; final DateTime createdAfterDate = clock.getUTCNow().minusDays(10); final String stateName = "RRRRR"; final String pluginName = "elated"; final PaymentAttemptModelDao attempt = new PaymentAttemptModelDao(accountId, paymentMethodId, createdAfterDate, createdAfterDate, externalKey1, UUID.randomUUID(), transactionExternalKey1, TransactionType.AUTHORIZE, stateName, BigDecimal.ONE, Currency.USD, ImmutableList.<String>of(pluginName), null); final PaymentAttemptModelDao rehydratedAttempt = paymentDao.insertPaymentAttemptWithProperties(attempt, internalCallContext); final UUID transactionId = UUID.randomUUID(); final String newStateName = "YYYYYYY"; paymentDao.updatePaymentAttempt(rehydratedAttempt.getId(), transactionId, newStateName, internalCallContext); final PaymentAttemptModelDao attempt1 = paymentDao.getPaymentAttempt(rehydratedAttempt.getId(), internalCallContext); assertEquals(attempt1.getStateName(), newStateName); assertEquals(attempt1.getTransactionId(), transactionId); final List<PluginProperty> properties = new ArrayList<PluginProperty>(); properties.add(new PluginProperty("prop1", "value1", false)); properties.add(new PluginProperty("prop2", "value2", false)); final byte [] serializedProperties = PluginPropertySerializer.serialize(properties); paymentDao.updatePaymentAttemptWithProperties(rehydratedAttempt.getId(), transactionId, newStateName, serializedProperties, internalCallContext); final PaymentAttemptModelDao attempt2 = paymentDao.getPaymentAttempt(rehydratedAttempt.getId(), internalCallContext); assertEquals(attempt2.getStateName(), newStateName); assertEquals(attempt2.getTransactionId(), transactionId); final Iterable<PluginProperty> properties2 = PluginPropertySerializer.deserialize(attempt2.getPluginProperties()); checkProperty(properties2, new PluginProperty("prop1", "value1", false)); checkProperty(properties2, new PluginProperty("prop2", "value2", false)); } private void checkProperty(final Iterable<PluginProperty> properties, final PluginProperty expected) { final PluginProperty found = Iterables.tryFind(properties, new Predicate<PluginProperty>() { @Override public boolean apply(final PluginProperty input) { return input.getKey().equals(expected.getKey()); } }).orNull(); assertNotNull(found, "Did not find property key = " + expected.getKey()); assertEquals(found.getValue(), expected.getValue()); } private List<PaymentTransactionModelDao> getPendingTransactions(final UUID paymentId) { final List<PaymentTransactionModelDao> total = paymentDao.getTransactionsForPayment(paymentId, internalCallContext); return ImmutableList.copyOf(Iterables.filter(total, new Predicate<PaymentTransactionModelDao>() { @Override public boolean apply(final PaymentTransactionModelDao input) { return input.getTransactionStatus() == TransactionStatus.PENDING; } })); } }
/* Licensed to Diennea S.r.l. under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. Diennea S.r.l. licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package herddb.client; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import herddb.backup.BackupFileConstants; import herddb.backup.DumpedLogEntry; import herddb.backup.DumpedTableMetadata; import herddb.client.impl.LeaderChangedException; import herddb.client.impl.RetryRequestException; import herddb.log.LogSequenceNumber; import herddb.model.Index; import herddb.model.Record; import herddb.model.Table; import herddb.model.Transaction; import herddb.network.Channel; import herddb.network.ChannelEventListener; import herddb.utils.KeyValue; import herddb.network.ServerHostData; import herddb.proto.Pdu; import herddb.proto.PduCodec; import herddb.proto.PduCodec.ErrorResponse; import herddb.security.sasl.SaslNettyClient; import herddb.security.sasl.SaslUtils; import herddb.storage.DataStorageManagerException; import herddb.utils.Bytes; import herddb.utils.DataAccessor; import herddb.utils.RawString; import herddb.utils.RecordsBatch; import io.netty.buffer.ByteBuf; import java.util.HashMap; import java.util.concurrent.CompletableFuture; /** * A real connection to a server * * @author enrico.olivelli */ public class RoutedClientSideConnection implements ChannelEventListener { private static final Logger LOGGER = Logger.getLogger(RoutedClientSideConnection.class.getName()); private static final RawString RAWSTRING_KEY = RawString.of("_key"); private final HDBConnection connection; private final String nodeId; private final long timeout; private final ServerHostData server; private final String clientId; private final ReentrantReadWriteLock connectionLock = new ReentrantReadWriteLock(true); private volatile Channel channel; private final AtomicLong scannerIdGenerator = new AtomicLong(); private final ClientSideQueryCache preparedStatements = new ClientSideQueryCache(); private final Map<String, TableSpaceDumpReceiver> dumpReceivers = new ConcurrentHashMap<>(); public RoutedClientSideConnection(HDBConnection connection, String nodeId, ServerHostData server) { this.connection = connection; this.nodeId = nodeId; this.server = server; this.timeout = connection.getClient().getConfiguration().getLong(ClientConfiguration.PROPERTY_TIMEOUT, ClientConfiguration.PROPERTY_TIMEOUT_DEFAULT); this.clientId = connection.getClient().getConfiguration().getString(ClientConfiguration.PROPERTY_CLIENTID, ClientConfiguration.PROPERTY_CLIENTID_DEFAULT); } private void performAuthentication(Channel _channel, String serverHostname) throws Exception { SaslNettyClient saslNettyClient = new SaslNettyClient( connection.getClient().getConfiguration().getString(ClientConfiguration.PROPERTY_CLIENT_USERNAME, ClientConfiguration.PROPERTY_CLIENT_USERNAME_DEFAULT), connection.getClient().getConfiguration().getString(ClientConfiguration.PROPERTY_CLIENT_PASSWORD, ClientConfiguration.PROPERTY_CLIENT_PASSWORD_DEFAULT), serverHostname ); byte[] firstToken = new byte[0]; if (saslNettyClient.hasInitialResponse()) { firstToken = saslNettyClient.evaluateChallenge(new byte[0]); } long requestId = _channel.generateRequestId(); Pdu saslResponse = _channel.sendMessageWithPduReply(requestId, PduCodec.SaslTokenMessageRequest.write(requestId, SaslUtils.AUTH_DIGEST_MD5, firstToken), timeout); try { for (int i = 0; i < 100; i++) { byte[] responseToSendToServer; switch (saslResponse.type) { case Pdu.TYPE_SASL_TOKEN_SERVER_RESPONSE: byte[] token = PduCodec.SaslTokenServerResponse.readToken(saslResponse); responseToSendToServer = saslNettyClient.evaluateChallenge(token); requestId = _channel.generateRequestId(); saslResponse.close(); saslResponse = _channel.sendMessageWithPduReply(requestId, PduCodec.SaslTokenMessageToken.write(requestId, responseToSendToServer), timeout); if (saslNettyClient.isComplete()) { LOGGER.finest("SASL auth completed with success"); return; } break; case Pdu.TYPE_ERROR: throw new Exception("Server returned ERROR during SASL negotiation, Maybe authentication failure (" + PduCodec.ErrorResponse.readError(saslResponse) + ")"); default: throw new Exception("Unexpected server response during SASL negotiation (" + saslResponse + ")"); } } } finally { saslResponse.close(); } throw new Exception("SASL negotiation took too many steps"); } @Override @SuppressFBWarnings(value = "SF_SWITCH_NO_DEFAULT") @SuppressWarnings("empty-statement") public void requestReceived(Pdu message, Channel _channel) { try { switch (message.type) { case Pdu.TYPE_TABLESPACE_DUMP_DATA: { String dumpId = PduCodec.TablespaceDumpData.readDumpId(message); TableSpaceDumpReceiver receiver = dumpReceivers.get(dumpId); LOGGER.log(Level.FINE, "receiver for {0}: {1}", new Object[]{dumpId, receiver}); if (receiver == null) { if (_channel != null) { ByteBuf resp = PduCodec.ErrorResponse.write(message.messageId, "no such dump receiver " + dumpId); _channel.sendReplyMessage(message.messageId, resp); } return; } try { String command = PduCodec.TablespaceDumpData.readCommand(message); boolean sendAck = true; switch (command) { case "start": { long ledgerId = PduCodec.TablespaceDumpData.readLedgerId(message); long offset = PduCodec.TablespaceDumpData.readOffset(message); receiver.start(new LogSequenceNumber(ledgerId, offset)); break; } case "beginTable": { byte[] tableDefinition = PduCodec.TablespaceDumpData.readTableDefinition(message); Table table = Table.deserialize(tableDefinition); long estimatedSize = PduCodec.TablespaceDumpData.readEstimatedSize(message); long dumpLedgerId = PduCodec.TablespaceDumpData.readLedgerId(message); long dumpOffset = PduCodec.TablespaceDumpData.readOffset(message); List<byte[]> indexesDef = PduCodec.TablespaceDumpData.readIndexesDefinition(message); List<Index> indexes = indexesDef .stream() .map(Index::deserialize) .collect(Collectors.toList()); Map<String, Object> stats = new HashMap<>(); stats.put("estimatedSize", estimatedSize); stats.put("dumpLedgerId", dumpLedgerId); stats.put("dumpOffset", dumpOffset); receiver.beginTable(new DumpedTableMetadata(table, new LogSequenceNumber(dumpLedgerId, dumpOffset), indexes), stats); break; } case "endTable": { receiver.endTable(); break; } case "finish": { long ledgerId = PduCodec.TablespaceDumpData.readLedgerId(message); long offset = PduCodec.TablespaceDumpData.readOffset(message); receiver.finish(new LogSequenceNumber(ledgerId, offset)); sendAck = false; break; } case "data": { List<Record> records = new ArrayList<>(); PduCodec.TablespaceDumpData.readRecords(message, (key, value) -> { records.add(new Record(new Bytes(key), new Bytes(value))); }); receiver.receiveTableDataChunk(records); break; } case "txlog": { List<DumpedLogEntry> records = new ArrayList<>(); PduCodec.TablespaceDumpData.readRecords(message, (key, value) -> { records.add(new DumpedLogEntry(LogSequenceNumber.deserialize(key), value)); }); receiver.receiveTransactionLogChunk(records); break; } case "transactions": { List<Transaction> transactions = new ArrayList<>(); PduCodec.TablespaceDumpData.readRecords(message, (key, value) -> { transactions.add(Transaction.deserialize(null, value)); }); receiver.receiveTransactionsAtDump(transactions); break; } default: throw new DataStorageManagerException("invalid dump command:" + command); } if (_channel != null && sendAck) { ByteBuf res = PduCodec.AckResponse.write(message.messageId); _channel.sendReplyMessage(message.messageId, res); } } catch (DataStorageManagerException error) { LOGGER.log(Level.SEVERE, "error while handling dump data", error); if (_channel != null) { ByteBuf res = PduCodec.ErrorResponse.write(message.messageId, error); _channel.sendReplyMessage(message.messageId, res); } } } break; } } finally { message.close(); } } @Override public void channelClosed(Channel channel) { if (channel == this.channel) { this.channel = null; } } public void close() { LOGGER.log(Level.SEVERE, "{0} - close", this); connectionLock.writeLock().lock(); try { if (channel != null) { channel.close(); } } finally { channel = null; connectionLock.writeLock().unlock();; } } private Channel ensureOpen() throws HDBException { connectionLock.readLock().lock(); try { if (channel != null) { return channel; } connectionLock.readLock().unlock(); connectionLock.writeLock().lock(); try { if (channel != null) { return channel; } LOGGER.log(Level.FINE, "{0} - connect to {1}:{2} ssh:{3}", new Object[]{this, server.getHost(), server.getPort(), server.isSsl()}); Channel _channel = this.connection.getClient().createChannelTo(server, this); try { performAuthentication(_channel, server.getHost()); channel = _channel; return channel; } catch (Exception err) { LOGGER.log(Level.SEVERE, "Error", err); if (_channel != null) { _channel.close(); } throw err; } } finally { connectionLock.writeLock().unlock(); connectionLock.readLock().lock(); } } catch (Exception err) { throw new HDBException(err); } finally { connectionLock.readLock().unlock(); } } long prepareQuery(String tableSpace, String query) throws HDBException, ClientSideMetadataProviderException { long existing = preparedStatements.getQueryId(tableSpace, query); if (existing != 0) { return existing; } Channel _channel = ensureOpen(); try { long requestId = _channel.generateRequestId(); ByteBuf message = PduCodec.PrepareStatement.write(requestId, tableSpace, query); try (Pdu reply = _channel.sendMessageWithPduReply(requestId, message, timeout);) { if (reply.type == Pdu.TYPE_ERROR) { handleGenericError(reply, 0); } else if (reply.type != Pdu.TYPE_PREPARE_STATEMENT_RESULT) { throw new HDBException(reply); } long statementId = PduCodec.PrepareStatementResult.readStatementId(reply); preparedStatements.registerQueryId(tableSpace, query, statementId); return statementId; } } catch (InterruptedException | TimeoutException err) { throw new HDBException(err); } } DMLResult executeUpdate(String tableSpace, String query, long tx, boolean returnValues, boolean usePreparedStatement, List<Object> params) throws HDBException, ClientSideMetadataProviderException { Channel _channel = ensureOpen(); try { long requestId = _channel.generateRequestId(); long statementId = usePreparedStatement ? prepareQuery(tableSpace, query) : 0; query = statementId > 0 ? "" : query; ByteBuf message = PduCodec.ExecuteStatement.write(requestId, tableSpace, query, tx, returnValues, statementId, params); try (Pdu reply = _channel.sendMessageWithPduReply(requestId, message, timeout);) { if (reply.type == Pdu.TYPE_ERROR) { handleGenericError(reply, statementId); } else if (reply.type != Pdu.TYPE_EXECUTE_STATEMENT_RESULT) { throw new HDBException(reply); } long updateCount = PduCodec.ExecuteStatementResult.readUpdateCount(reply); long transactionId = PduCodec.ExecuteStatementResult.readTx(reply); boolean hasData = PduCodec.ExecuteStatementResult.hasRecord(reply); Object key = null; Map<RawString, Object> newvalue = null; if (hasData) { PduCodec.ObjectListReader parametersReader = PduCodec.ExecuteStatementResult.readRecord(reply); newvalue = readParametersListAsMap(parametersReader); key = newvalue.get(RAWSTRING_KEY); } return new DMLResult(updateCount, key, newvalue, transactionId); } } catch (InterruptedException | TimeoutException err) { throw new HDBException(err); } } CompletableFuture<DMLResult> executeUpdateAsync(String tableSpace, String query, long tx, boolean returnValues, boolean usePreparedStatement, List<Object> params) { CompletableFuture<DMLResult> res = new CompletableFuture<>(); try { Channel _channel = ensureOpen(); long requestId = _channel.generateRequestId(); long statementId = usePreparedStatement ? prepareQuery(tableSpace, query) : 0; query = statementId > 0 ? "" : query; ByteBuf message = PduCodec.ExecuteStatement.write(requestId, tableSpace, query, tx, returnValues, statementId, params); _channel.sendRequestWithAsyncReply(requestId, message, timeout, (msg, error) -> { if (error != null) { res.completeExceptionally(error); return; } try (Pdu reply = msg) { if (reply.type == Pdu.TYPE_ERROR) { handleGenericError(reply, statementId); return; } else if (reply.type != Pdu.TYPE_EXECUTE_STATEMENT_RESULT) { throw new HDBException(reply); } long updateCount = PduCodec.ExecuteStatementResult.readUpdateCount(reply); long transactionId = PduCodec.ExecuteStatementResult.readTx(reply); boolean hasData = PduCodec.ExecuteStatementResult.hasRecord(reply); Object key = null; Map<RawString, Object> newvalue = null; if (hasData) { PduCodec.ObjectListReader parametersReader = PduCodec.ExecuteStatementResult.readRecord(reply); newvalue = readParametersListAsMap(parametersReader); key = newvalue.get(RAWSTRING_KEY); } res.complete(new DMLResult(updateCount, key, newvalue, transactionId)); } catch (HDBException | ClientSideMetadataProviderException err) { res.completeExceptionally(err); } }); } catch (HDBException | ClientSideMetadataProviderException err) { res.completeExceptionally(new HDBException(err)); } return res; } List<DMLResult> executeUpdates(String tableSpace, String query, long tx, boolean returnValues, boolean usePreparedStatement, List<List<Object>> batch) throws HDBException, ClientSideMetadataProviderException { try { Channel _channel = ensureOpen(); long requestId = _channel.generateRequestId(); long statementId = usePreparedStatement ? prepareQuery(tableSpace, query) : 0; query = statementId > 0 ? "" : query; ByteBuf message = PduCodec.ExecuteStatements.write(requestId, tableSpace, query, tx, returnValues, statementId, batch); try (Pdu reply = _channel.sendMessageWithPduReply(requestId, message, timeout);) { if (reply.type == Pdu.TYPE_ERROR) { handleGenericError(reply, statementId); return null; // not possible } else if (reply.type != Pdu.TYPE_EXECUTE_STATEMENTS_RESULT) { throw new HDBException(reply); } long transactionId = PduCodec.ExecuteStatementsResult.readTx(reply); List<Long> updateCounts = PduCodec.ExecuteStatementsResult.readUpdateCounts(reply); int numResults = updateCounts.size(); List<DMLResult> results = new ArrayList<>(numResults); PduCodec.ListOfListsReader resultRecords = PduCodec.ExecuteStatementsResult.startResultRecords(reply); int numResultRecords = resultRecords.getNumLists(); for (int i = 0; i < numResults; i++) { Map<RawString, Object> newvalue = null; Object key = null; if (numResultRecords > 0) { PduCodec.ObjectListReader list = resultRecords.nextList(); newvalue = readParametersListAsMap(list); if (newvalue != null) { key = newvalue.get(RAWSTRING_KEY); } } long updateCount = updateCounts.get(i); DMLResult res = new DMLResult(updateCount, key, newvalue, transactionId); results.add(res); } return results; } } catch (InterruptedException | TimeoutException err) { throw new HDBException(err); } } CompletableFuture<List<DMLResult>> executeUpdatesAsync(String tableSpace, String query, long tx, boolean returnValues, boolean usePreparedStatement, List<List<Object>> batch) { CompletableFuture<List<DMLResult>> res = new CompletableFuture<>(); try { Channel _channel = ensureOpen(); long requestId = _channel.generateRequestId(); long statementId = usePreparedStatement ? prepareQuery(tableSpace, query) : 0; query = statementId > 0 ? "" : query; ByteBuf message = PduCodec.ExecuteStatements.write(requestId, tableSpace, query, tx, returnValues, statementId, batch); _channel.sendRequestWithAsyncReply(requestId, message, timeout, (msg, error) -> { if (error != null) { res.completeExceptionally(error); return; } try (Pdu reply = msg) { if (reply.type == Pdu.TYPE_ERROR) { handleGenericError(reply, statementId); return; } else if (reply.type != Pdu.TYPE_EXECUTE_STATEMENTS_RESULT) { throw new HDBException(reply); } long transactionId = PduCodec.ExecuteStatementsResult.readTx(reply); List<Long> updateCounts = PduCodec.ExecuteStatementsResult.readUpdateCounts(reply); int numResults = updateCounts.size(); List<DMLResult> results = new ArrayList<>(numResults); PduCodec.ListOfListsReader resultRecords = PduCodec.ExecuteStatementsResult.startResultRecords(reply); int numResultRecords = resultRecords.getNumLists(); for (int i = 0; i < numResults; i++) { Map<RawString, Object> newvalue = null; Object key = null; if (numResultRecords > 0) { PduCodec.ObjectListReader list = resultRecords.nextList(); newvalue = readParametersListAsMap(list); if (newvalue != null) { key = newvalue.get(RAWSTRING_KEY); } } long updateCount = updateCounts.get(i); DMLResult _res = new DMLResult(updateCount, key, newvalue, transactionId); results.add(_res); } res.complete(results); } catch (HDBException | ClientSideMetadataProviderException err) { res.completeExceptionally(err); } }); } catch (HDBException | ClientSideMetadataProviderException err) { res.completeExceptionally(new HDBException(err)); } return res; } GetResult executeGet(String tableSpace, String query, long tx, boolean usePreparedStatement, List<Object> params) throws HDBException, ClientSideMetadataProviderException { Channel _channel = ensureOpen(); try { long requestId = _channel.generateRequestId(); long statementId = usePreparedStatement ? prepareQuery(tableSpace, query) : 0; query = statementId > 0 ? "" : query; ByteBuf message = PduCodec.ExecuteStatement.write(requestId, tableSpace, query, tx, true, statementId, params); try (Pdu reply = _channel.sendMessageWithPduReply(requestId, message, timeout);) { if (reply.type == Pdu.TYPE_ERROR) { handleGenericError(reply, statementId); } else if (reply.type != Pdu.TYPE_EXECUTE_STATEMENT_RESULT) { throw new HDBException(reply); } long updateCount = PduCodec.ExecuteStatementResult.readUpdateCount(reply); long transactionId = PduCodec.ExecuteStatementResult.readTx(reply); boolean hasData = PduCodec.ExecuteStatementResult.hasRecord(reply); Map<RawString, Object> data = null; if (hasData) { PduCodec.ObjectListReader parametersReader = PduCodec.ExecuteStatementResult.readRecord(reply); data = readParametersListAsMap(parametersReader); } if (updateCount <= 0) { return new GetResult(null, transactionId); } else { return new GetResult(data, transactionId); } } } catch (InterruptedException | TimeoutException err) { throw new HDBException(err); } } Map<RawString, Object> readParametersListAsMap(PduCodec.ObjectListReader parametersReader) { Map<RawString, Object> data = new HashMap<>(); for (int i = 0; i < parametersReader.getNumParams(); i += 2) { RawString _key = (RawString) parametersReader.nextObject(); Object _value = parametersReader.nextObject(); data.put(_key, _value); } return data; } long beginTransaction(String tableSpace) throws HDBException, ClientSideMetadataProviderException { Channel _channel = ensureOpen(); try { long requestId = _channel.generateRequestId(); ByteBuf message = PduCodec.TxCommand.write(requestId, PduCodec.TxCommand.TX_COMMAND_BEGIN_TRANSACTION, 0, tableSpace); try (Pdu reply = _channel.sendMessageWithPduReply(requestId, message, timeout);) { if (reply.type == Pdu.TYPE_ERROR) { handleGenericError(reply, 0); return -1; // not possible } else if (reply.type == Pdu.TYPE_TX_COMMAND_RESULT) { long tx = PduCodec.TxCommandResult.readTx(reply); if (tx <= 0) { throw new HDBException("Server did not create a new transaction"); } return tx; } else { throw new HDBException(reply); } } } catch (InterruptedException | TimeoutException err) { throw new HDBException(err); } } void commitTransaction(String tableSpace, long tx) throws HDBException, ClientSideMetadataProviderException { Channel _channel = ensureOpen(); try { long requestId = _channel.generateRequestId(); ByteBuf message = PduCodec.TxCommand.write(requestId, PduCodec.TxCommand.TX_COMMAND_COMMIT_TRANSACTION, tx, tableSpace); try (Pdu reply = _channel.sendMessageWithPduReply(requestId, message, timeout);) { if (reply.type == Pdu.TYPE_ERROR) { handleGenericError(reply, 0); return; // not possible } else if (reply.type != Pdu.TYPE_TX_COMMAND_RESULT) { throw new HDBException(reply); } } } catch (InterruptedException | TimeoutException err) { throw new HDBException(err); } } void rollbackTransaction(String tableSpace, long tx) throws HDBException, ClientSideMetadataProviderException { Channel _channel = ensureOpen(); try { long requestId = _channel.generateRequestId(); ByteBuf message = PduCodec.TxCommand.write(requestId, PduCodec.TxCommand.TX_COMMAND_ROLLBACK_TRANSACTION, tx, tableSpace); try (Pdu reply = _channel.sendMessageWithPduReply(requestId, message, timeout);) { if (reply.type == Pdu.TYPE_ERROR) { handleGenericError(reply, 0); return; // not possible } else if (reply.type != Pdu.TYPE_TX_COMMAND_RESULT) { throw new HDBException(reply); } } } catch (InterruptedException | TimeoutException err) { throw new HDBException(err); } } void handleGenericError(final Pdu reply, final long statementId) throws HDBException, ClientSideMetadataProviderException { handleGenericError(reply, statementId, false); } void handleGenericError(final Pdu reply, final long statementId, final boolean release) throws HDBException, ClientSideMetadataProviderException { boolean notLeader = PduCodec.ErrorResponse.readIsNotLeader(reply); boolean missingPreparedStatement = ErrorResponse.readIsMissingPreparedStatementError(reply); String msg = PduCodec.ErrorResponse.readError(reply); if (release) { reply.close(); } if (notLeader) { this.connection.requestMetadataRefresh(); throw new LeaderChangedException(msg); } else if (missingPreparedStatement) { LOGGER.log(Level.INFO, "Statement was flushed from server side cache " + msg); preparedStatements.invalidate(statementId); throw new RetryRequestException(msg); } else { throw new HDBException(msg); } } ScanResultSet executeScan(String tableSpace, String query, boolean usePreparedStatement, List<Object> params, long tx, int maxRows, int fetchSize) throws HDBException, ClientSideMetadataProviderException { Channel _channel = ensureOpen(); Pdu reply = null; try { long scannerId = scannerIdGenerator.incrementAndGet(); long requestId = _channel.generateRequestId(); long statementId = usePreparedStatement ? prepareQuery(tableSpace, query) : 0; query = statementId > 0 ? "" : query; ByteBuf message = PduCodec.OpenScanner.write(requestId, tableSpace, query, scannerId, tx, params, statementId, fetchSize, maxRows); LOGGER.log(Level.FINEST, "open scanner {0} for query {1}, params {2}", new Object[]{scannerId, query, params}); reply = _channel.sendMessageWithPduReply(requestId, message, timeout); if (reply.type == Pdu.TYPE_ERROR) { handleGenericError(reply, statementId, true); return null; // not possible } else if (reply.type != Pdu.TYPE_RESULTSET_CHUNK) { HDBException err = new HDBException(reply); reply.close(); throw err; } boolean last = PduCodec.ResultSetChunk.readIsLast(reply); long transactionId = PduCodec.ResultSetChunk.readTx(reply); RecordsBatch data = PduCodec.ResultSetChunk.startReadingData(reply); //LOGGER.log(Level.SEVERE, "received first " + initialFetchBuffer.size() + " records for query " + query); ScanResultSetImpl impl = new ScanResultSetImpl(scannerId, data, fetchSize, last, transactionId); return impl; } catch (InterruptedException | TimeoutException err) { if (reply != null) { reply.close(); } throw new HDBException(err); } } void dumpTableSpace(String tableSpace, int fetchSize, boolean includeTransactionLog, TableSpaceDumpReceiver receiver) throws HDBException, ClientSideMetadataProviderException { Channel _channel = ensureOpen(); try { String dumpId = this.clientId + ":" + scannerIdGenerator.incrementAndGet(); long requestId = _channel.generateRequestId(); ByteBuf message = PduCodec.RequestTablespaceDump.write(requestId, tableSpace, dumpId, fetchSize, includeTransactionLog); LOGGER.log(Level.SEVERE, "dumpTableSpace id {0} for tablespace {1}", new Object[]{dumpId, tableSpace}); dumpReceivers.put(dumpId, receiver); try (Pdu reply = _channel.sendMessageWithPduReply(requestId, message, timeout);) { LOGGER.log(Level.SEVERE, "dumpTableSpace id {0} for tablespace {1}: first reply {2}", new Object[]{dumpId, tableSpace, reply}); if (reply.type == Pdu.TYPE_ERROR) { handleGenericError(reply, 0); } else if (reply.type != Pdu.TYPE_ACK) { throw new HDBException(reply); } } } catch (InterruptedException | TimeoutException err) { throw new HDBException(err); } } void restoreTableSpace(String tableSpace, TableSpaceRestoreSource source) throws HDBException, ClientSideMetadataProviderException { List<DumpedTableMetadata> tables = new ArrayList<>(); try { while (true) { String entryType = source.nextEntryType(); LOGGER.log(Level.SEVERE, "restore, entryType:{0}", entryType); switch (entryType) { case BackupFileConstants.ENTRY_TYPE_START: { break; } case BackupFileConstants.ENTRY_TYPE_TABLE: { DumpedTableMetadata table = source.nextTable(); Channel _channel = ensureOpen(); long id = _channel.generateRequestId(); ByteBuf message_create_table = PduCodec.RequestTableRestore.write(id, tableSpace, table.table.serialize(), table.logSequenceNumber.ledgerId, table.logSequenceNumber.offset); sendMessageAndCheckNoError(_channel, id, message_create_table); List<KeyValue> chunk = source.nextTableDataChunk(); while (chunk != null) { id = _channel.generateRequestId(); ByteBuf message = PduCodec.PushTableData.write(id, tableSpace, table.table.name, chunk); sendMessageAndCheckNoError(_channel, id, message); chunk = source.nextTableDataChunk(); } tables.add(table); break; } case BackupFileConstants.ENTRY_TYPE_TXLOGCHUNK: { Channel _channel = ensureOpen(); List<KeyValue> chunk = source.nextTransactionLogChunk(); long id = _channel.generateRequestId(); ByteBuf message = PduCodec.PushTxLogChunk.write(id, tableSpace, chunk); sendMessageAndCheckNoError(_channel, id, message); break; } case BackupFileConstants.ENTRY_TYPE_TRANSACTIONS: { Channel _channel = ensureOpen(); List<byte[]> chunk = source.nextTransactionsBlock(); long id = _channel.generateRequestId(); ByteBuf message = PduCodec.PushTransactionsBlock.write(id, tableSpace, chunk); sendMessageAndCheckNoError(_channel, id, message); break; } case BackupFileConstants.ENTRY_TYPE_END: { // send a 'table finished' event only at the end of the procedure // the stream of transaction log entries is finished, so the data contained in the table is "final" // we are going to create now all the indexes too Channel _channel = ensureOpen(); for (DumpedTableMetadata table : tables) { List<byte[]> indexes = table.indexes.stream().map(Index::serialize).collect(Collectors.toList()); long id = _channel.generateRequestId(); ByteBuf message_table_finished = PduCodec.TableRestoreFinished.write(id, tableSpace, table.table.name, indexes); sendMessageAndCheckNoError(_channel, id, message_table_finished); } long id = _channel.generateRequestId(); ByteBuf message_restore_finished = PduCodec.RestoreFinished.write(id, tableSpace); sendMessageAndCheckNoError(_channel, id, message_restore_finished); return; } default: throw new HDBException("bad entryType " + entryType); } } } catch (InterruptedException | TimeoutException | DataStorageManagerException err) { throw new HDBException(err); } } private void sendMessageAndCheckNoError(Channel _channel, long id, ByteBuf message) throws HDBException, InterruptedException, TimeoutException { try (Pdu reply = _channel.sendMessageWithPduReply(id, message, timeout);) { if (reply.type == Pdu.TYPE_ERROR) { throw new HDBException(reply); } } } private class ScanResultSetImpl extends ScanResultSet { private final long scannerId; private final ScanResultSetMetadata metadata; RecordsBatch fetchBuffer; DataAccessor next; boolean finished; boolean noMoreData; int fetchSize; boolean lastChunk; private ScanResultSetImpl(long scannerId, RecordsBatch firstFetchBuffer, int fetchSize, boolean onlyOneChunk, long tx) { super(tx); this.scannerId = scannerId; this.metadata = new ScanResultSetMetadata(firstFetchBuffer.columnNames); this.fetchSize = fetchSize; this.fetchBuffer = firstFetchBuffer; if (firstFetchBuffer.isEmpty()) { // empty result set finished = true; noMoreData = true; } if (onlyOneChunk) { lastChunk = true; } } @Override public ScanResultSetMetadata getMetadata() { return metadata; } @Override public void close() { finished = true; releaseBuffer(); } private void releaseBuffer() { if (fetchBuffer != null) { fetchBuffer.release(); fetchBuffer = null; } } @Override public boolean hasNext() throws HDBException { if (finished) { return false; } return ensureNext(); } private void fillBuffer() throws HDBException { releaseBuffer(); if (lastChunk) { noMoreData = true; return; } Channel _channel = ensureOpen(); Pdu result = null; try { long requestId = _channel.generateRequestId(); ByteBuf message = PduCodec.FetchScannerData.write(requestId, scannerId, fetchSize); result = _channel.sendMessageWithPduReply(requestId, message, 10000); //LOGGER.log(Level.SEVERE, "fillBuffer result " + result); if (result.type == Pdu.TYPE_ERROR) { try { throw new HDBException(result); } finally { result.close(); } } if (result.type != Pdu.TYPE_RESULTSET_CHUNK) { finished = true; try { throw new HDBException("protocol error: " + result); } finally { result.close(); } } lastChunk = PduCodec.ResultSetChunk.readIsLast(result); fetchBuffer = PduCodec.ResultSetChunk.startReadingData(result); if (!fetchBuffer.hasNext()) { noMoreData = true; } } catch (InterruptedException | TimeoutException err) { if (result != null) { result.close(); } throw new HDBException(err); } } private boolean ensureNext() throws HDBException { if (next != null) { return true; } if (!fetchBuffer.hasNext()) { fillBuffer(); if (noMoreData) { finished = true; return false; } } next = fetchBuffer.next(); return true; } @Override public DataAccessor next() throws HDBException { if (finished) { throw new HDBException("Scanner is exhausted"); } DataAccessor _next = next; next = null; return _next; } } }
/* * Autopsy Forensic Browser * * Copyright 2011-2018 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.timeline.ui.countsview; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import java.util.List; import java.util.Map; import java.util.function.Function; import javafx.application.Platform; import javafx.beans.Observable; import javafx.beans.binding.BooleanBinding; import javafx.beans.property.SimpleObjectProperty; import javafx.collections.FXCollections; import javafx.concurrent.Task; import javafx.fxml.FXML; import javafx.geometry.Insets; import javafx.scene.Cursor; import javafx.scene.Node; import javafx.scene.chart.CategoryAxis; import javafx.scene.chart.NumberAxis; import javafx.scene.chart.XYChart; import javafx.scene.control.Label; import javafx.scene.control.RadioButton; import javafx.scene.control.ToggleGroup; import javafx.scene.control.Tooltip; import javafx.scene.image.Image; import javafx.scene.image.ImageView; import javafx.scene.layout.BorderPane; import javafx.scene.layout.HBox; import javafx.scene.layout.Pane; import javafx.scene.text.Font; import javafx.scene.text.FontPosture; import javafx.scene.text.FontWeight; import javafx.scene.text.Text; import javafx.scene.text.TextFlow; import org.controlsfx.control.PopOver; import org.joda.time.Interval; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.ThreadConfined; import org.sleuthkit.autopsy.timeline.FXMLConstructor; import org.sleuthkit.autopsy.timeline.EventsModel; import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.ViewMode; import org.sleuthkit.autopsy.timeline.ui.AbstractTimelineChart; import org.sleuthkit.autopsy.timeline.utils.RangeDivision; import org.sleuthkit.datamodel.TimelineEventType; /** * FXML Controller class for a StackedBarChart<String,Number> based * implementation of a TimeLineChart. * * This class listens to changes in the assigned FilteredEventsModel and updates * the internal EventCountsChart to reflect the currently requested events. * * This class captures input from the user in the form of mouse clicks on graph * bars, and forwards them to the assigned TimeLineController * * Concurrency Policy: Access to the private members stackedBarChart, countAxis, * dateAxis, EventTypeMap, and dataSets affects the stackedBarChart so they all * must only be manipulated on the JavaFx thread (through * Platform.runLater(java.lang.Runnable). The FilteredEventsModel should * encapsulate all need synchronization internally. */ public class CountsViewPane extends AbstractTimelineChart<String, Number, Node, EventCountsChart> { private static final Logger logger = Logger.getLogger(CountsViewPane.class.getName()); private final NumberAxis countAxis = new NumberAxis(); private final CategoryAxis dateAxis = new CategoryAxis(FXCollections.<String>observableArrayList()); private final SimpleObjectProperty<Scale> scaleProp = new SimpleObjectProperty<>(Scale.LOGARITHMIC); @Override protected String getTickMarkLabel(String labelValueString) { return labelValueString; } @Override protected Boolean isTickBold(String value) { return dataSeries.stream().flatMap(series -> series.getData().stream()) .anyMatch(data -> data.getXValue().equals(value) && data.getYValue().intValue() > 0); } @Override protected Task<Boolean> getNewUpdateTask() { return new CountsUpdateTask(); } /** * Constructor * * @param controller The TimelineController for this view. */ @NbBundle.Messages({ "# {0} - scale name", "CountsViewPane.numberOfEvents=Number of Events ({0})"}) public CountsViewPane(TimeLineController controller) { super(controller); setChart(new EventCountsChart(controller, dateAxis, countAxis, getSelectedNodes())); getChart().setData(dataSeries); Tooltip.install(getChart(), getDefaultTooltip()); dateAxis.getTickMarks().addListener((Observable tickMarks) -> layoutDateLabels()); dateAxis.categorySpacingProperty().addListener((Observable spacing) -> layoutDateLabels()); dateAxis.getCategories().addListener((Observable categories) -> layoutDateLabels()); //bind tick visibility to scaleProp BooleanBinding scaleIsLinear = scaleProp.isEqualTo(Scale.LINEAR); countAxis.tickLabelsVisibleProperty().bind(scaleIsLinear); countAxis.tickMarkVisibleProperty().bind(scaleIsLinear); countAxis.minorTickVisibleProperty().bind(scaleIsLinear); scaleProp.addListener(scale -> { refresh(); syncAxisScaleLabel(); }); syncAxisScaleLabel(); } @Override final protected NumberAxis getYAxis() { return countAxis; } @Override final protected CategoryAxis getXAxis() { return dateAxis; } @Override protected double getTickSpacing() { return dateAxis.getCategorySpacing(); } @Override protected void applySelectionEffect(Node c1, Boolean applied) { c1.setEffect(applied ? getChart().getSelectionEffect() : null); } @ThreadConfined(type = ThreadConfined.ThreadType.JFX) @Override protected void clearData() { for (XYChart.Series<String, Number> series : dataSeries) { series.getData().clear(); } dataSeries.clear(); eventTypeToSeriesMap.clear(); createSeries(); } @Override final protected ViewMode getViewMode() { return ViewMode.COUNTS; } @Override protected ImmutableList<Node> getSettingsControls() { return ImmutableList.copyOf(new CountsViewSettingsPane().getChildrenUnmodifiable()); } @Override protected boolean hasCustomTimeNavigationControls() { return false; } @Override protected ImmutableList<Node> getTimeNavigationControls() { return ImmutableList.of(); } /** * Set the appropriate label on the vertical axis, depending on the selected * scale. */ private void syncAxisScaleLabel() { countAxis.setLabel(Bundle.CountsViewPane_numberOfEvents(scaleProp.get().getDisplayName())); } /** * Enum for the Scales available in the Counts View. */ @NbBundle.Messages({ "ScaleType.Linear=Linear", "ScaleType.Logarithmic=Logarithmic"}) private static enum Scale implements Function<Long, Double> { LINEAR(Bundle.ScaleType_Linear()) { @Override public Double apply(Long inValue) { return inValue.doubleValue(); } }, LOGARITHMIC(Bundle.ScaleType_Logarithmic()) { @Override public Double apply(Long inValue) { return Math.log10(inValue) + 1; } }; private final String displayName; /** * Constructor * * @param displayName The display name for this Scale. */ Scale(String displayName) { this.displayName = displayName; } /** * Get the display name of this ScaleType * * @return The display name. */ private String getDisplayName() { return displayName; } } @Override protected double getAxisMargin() { return dateAxis.getStartMargin() + dateAxis.getEndMargin(); } /* * A Pane that contains widgets to adjust settings specific to a * CountsViewPane */ private class CountsViewSettingsPane extends HBox { @FXML private RadioButton logRadio; @FXML private RadioButton linearRadio; @FXML private ToggleGroup scaleGroup; @FXML private Label scaleLabel; @FXML private ImageView logImageView; @FXML private ImageView linearImageView; @FXML @NbBundle.Messages({ "CountsViewPane.logRadio.text=Logarithmic", "CountsViewPane.scaleLabel.text=Scale:", "CountsViewPane.scaleHelp.label.text=Scales: ", "CountsViewPane.linearRadio.text=Linear", "CountsViewPane.scaleHelpLinear=The linear scale is good for many use cases. When this scale is selected, the height of the bars represents the counts in a linear, one-to-one fashion, and the y-axis is labeled with values. When the range of values is very large, time periods with low counts may have a bar that is too small to see. To help the user detect this, the labels for date ranges with events are bold. To see bars that are too small, there are three options: adjust the window size so that the timeline has more vertical space, adjust the time range shown so that time periods with larger bars are excluded, or adjust the scale setting to logarithmic.", "CountsViewPane.scaleHelpLog=The logarithmic scale represents the number of events in a non-linear way that compresses the difference between large and small numbers. Note that even with the logarithmic scale, an extremely large difference in counts may still produce bars too small to see. In this case the only option may be to filter events to reduce the difference in counts. NOTE: Because the logarithmic scale is applied to each event type separately, the meaning of the height of the combined bar is not intuitive, and to emphasize this, no labels are shown on the y-axis with the logarithmic scale. The logarithmic scale should be used to quickly compare the counts ", "CountsViewPane.scaleHelpLog2=across time within a type, or across types for one time period, but not both.", "CountsViewPane.scaleHelpLog3= The actual counts (available in tooltips or the result viewer) should be used for absolute comparisons. Use the logarithmic scale with care."}) void initialize() { assert logRadio != null : "fx:id=\"logRadio\" was not injected: check your FXML file 'CountsViewSettingsPane.fxml'."; // NON-NLS assert linearRadio != null : "fx:id=\"linearRadio\" was not injected: check your FXML file 'CountsViewSettingsPane.fxml'."; // NON-NLS scaleLabel.setText(Bundle.CountsViewPane_scaleLabel_text()); linearRadio.setText(Bundle.CountsViewPane_linearRadio_text()); logRadio.setText(Bundle.CountsViewPane_logRadio_text()); scaleGroup.selectedToggleProperty().addListener((observable, oldToggle, newToggle) -> { if (newToggle == linearRadio) { scaleProp.set(Scale.LINEAR); } else if (newToggle == logRadio) { scaleProp.set(Scale.LOGARITHMIC); } }); logRadio.setSelected(true); //make a popup help "window" with a description of the log scale. logImageView.setCursor(Cursor.HAND); logImageView.setOnMouseClicked(clicked -> { Text text = new Text(Bundle.CountsViewPane_scaleHelpLog()); Text text2 = new Text(Bundle.CountsViewPane_scaleHelpLog2()); Font baseFont = text.getFont(); text2.setFont(Font.font(baseFont.getFamily(), FontWeight.BOLD, FontPosture.ITALIC, baseFont.getSize())); Text text3 = new Text(Bundle.CountsViewPane_scaleHelpLog3()); showPopoverHelp(logImageView, Bundle.CountsViewPane_logRadio_text(), logImageView.getImage(), new TextFlow(text, text2, text3)); }); //make a popup help "window" with a description of the linear scale. linearImageView.setCursor(Cursor.HAND); linearImageView.setOnMouseClicked(clicked -> { Text text = new Text(Bundle.CountsViewPane_scaleHelpLinear()); text.setWrappingWidth(480); //This is a hack to fix the layout. showPopoverHelp(linearImageView, Bundle.CountsViewPane_linearRadio_text(), linearImageView.getImage(), text); }); } /** * Constructor */ CountsViewSettingsPane() { FXMLConstructor.construct(this, "CountsViewSettingsPane.fxml"); // NON-NLS } } /** * * Static utility to to show a Popover with the given Node as owner. * * @param owner The owner of the Popover * @param headerText A short String that will be shown in the top-left * corner of the Popover. * @param headerImage An Image that will be shown at the top-right corner of * the Popover. * @param content The main content of the Popover, shown in the * bottom-center * */ private static void showPopoverHelp(final Node owner, final String headerText, final Image headerImage, final Node content) { Pane borderPane = new BorderPane(null, null, new ImageView(headerImage), content, new Label(headerText)); borderPane.setPadding(new Insets(10)); borderPane.setPrefWidth(500); PopOver popOver = new PopOver(borderPane); popOver.setDetachable(false); popOver.setArrowLocation(PopOver.ArrowLocation.TOP_CENTER); popOver.show(owner); } /** * Task that clears the Chart, fetches new data according to the current * ZoomState and loads it into the Chart * */ @NbBundle.Messages({ "CountsViewPane.loggedTask.name=Updating Counts View", "CountsViewPane.loggedTask.updatingCounts=Populating view"}) private class CountsUpdateTask extends ViewRefreshTask<List<String>> { CountsUpdateTask() { super(Bundle.CountsViewPane_loggedTask_name(), true); } @Override protected void succeeded() { super.succeeded(); layoutDateLabels(); } @Override protected Boolean call() throws Exception { super.call(); if (isCancelled()) { return null; } EventsModel eventsModel = getEventsModel(); final RangeDivision rangeInfo = RangeDivision.getRangeDivision(eventsModel.getTimeRange(), TimeLineController.getJodaTimeZone()); getChart().setRangeInfo(rangeInfo); //do we need this. It seems like a hack. List<Interval> intervals = rangeInfo.getIntervals(TimeLineController.getJodaTimeZone()); //clear old data, and reset ranges and series resetView(Lists.transform(intervals, interval -> interval.getStart().toString(rangeInfo.getTickFormatter()))); updateMessage(Bundle.CountsViewPane_loggedTask_updatingCounts()); int chartMax = 0; int numIntervals = intervals.size(); Scale activeScale = scaleProp.get(); /* * For each interval, query the database for event counts and add * the counts to the chart. Doing this in chunks might seem * inefficient but it lets us reuse more cached results as the user * navigates to overlapping views. */ for (int i = 0; i < numIntervals; i++) { if (isCancelled()) { return null; } updateProgress(i, numIntervals); final Interval interval = intervals.get(i); int maxPerInterval = 0; //query for current interval Map<TimelineEventType, Long> eventCounts = eventsModel.getEventCounts(interval); //for each type add data to graph for (final TimelineEventType eventType : eventCounts.keySet()) { if (isCancelled()) { return null; } final Long count = eventCounts.get(eventType); if (count > 0) { final String intervalCategory = interval.getStart().toString(rangeInfo.getTickFormatter()); final double adjustedCount = activeScale.apply(count); final XYChart.Data<String, Number> dataItem = new XYChart.Data<>(intervalCategory, adjustedCount, new EventCountsChart.ExtraData(interval, eventType, count)); Platform.runLater(() -> getSeries(eventType).getData().add(dataItem)); maxPerInterval += adjustedCount; } } chartMax = Math.max(chartMax, maxPerInterval); } //adjust vertical axis according to scale type and max counts double countAxisUpperbound = 1 + chartMax * 1.2; double tickUnit = Scale.LINEAR.equals(activeScale) ? Math.pow(10, Math.max(0, Math.floor(Math.log10(chartMax)) - 1)) : Double.MAX_VALUE; Platform.runLater(() -> { countAxis.setTickUnit(tickUnit); countAxis.setUpperBound(countAxisUpperbound); }); return chartMax > 0; // are there events } @Override protected void setDateValues(List<String> categories) { dateAxis.getCategories().setAll(categories); } } }
package org.bsworks.x2.resource.impl; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Deque; import java.util.Iterator; import java.util.Set; import org.bsworks.x2.resource.AggregatePropertyHandler; import org.bsworks.x2.resource.DependentRefPropertyHandler; import org.bsworks.x2.resource.FilterCondition; import org.bsworks.x2.resource.FilterConditionOperandType; import org.bsworks.x2.resource.FilterConditionType; import org.bsworks.x2.resource.InvalidResourceDataException; import org.bsworks.x2.resource.InvalidSpecificationException; import org.bsworks.x2.resource.PropertyValueFunction; import org.bsworks.x2.resource.RefPropertyHandler; import org.bsworks.x2.resource.ResourcePropertyHandler; import org.bsworks.x2.resource.ResourcePropertyValueHandler; /** * Filter condition implementation. * * @author Lev Himmelfarb */ class FilterConditionImpl implements FilterCondition { /** * Condition type. */ private final FilterConditionType type; /** * Tells if negated. */ private final boolean negated; /** * Property path. */ private final String propPath; /** * Property value type. */ private final FilterConditionOperandType propValueType; /** * Property value transformation function. */ private final PropertyValueFunction valueFunc; /** * Property value transformation function parameters. */ private final Object[] valueFuncParams; /** * Property chain. */ private final Deque<? extends ResourcePropertyHandler> propChain; /** * Condition operands. */ private final Collection<FilterConditionOperandImpl> operands; /** * Create new condition. * * @param resources Application resources manager. * @param type Condition type. * @param valueFunc Property value transformation function. * @param valueFuncParams Property value transformation function parameters. * May be {@code null} if the function takes no parameters. * @param negated {@code true} if negated. * @param prsrcHandler Root persistent resource handler. * @param propPath Property path. * @param operands Condition operands. Cannot be {@code null}, but can be * empty. * @param prsrcClasses Set, to which to add any participating persistent * resource classes. * * @throws InvalidSpecificationException If condition specification is * invalid. */ FilterConditionImpl(final ResourcesImpl resources, final FilterConditionType type, final PropertyValueFunction valueFunc, final Object[] valueFuncParams, final boolean negated, final PersistentResourceHandlerImpl<?> prsrcHandler, final String propPath, final Object[] operands, final Set<Class<?>> prsrcClasses) { this.type = type; this.valueFunc = valueFunc; this.valueFuncParams = (valueFuncParams != null ? valueFuncParams : new Object[0]); this.negated = negated; // get property path and tested value operand type if (propPath.endsWith("/id")) { this.propPath = propPath.substring(0, propPath.length() - "/id".length()); this.propValueType = FilterConditionOperandType.ID; } else if (propPath.endsWith("/key")) { this.propPath = propPath.substring(0, propPath.length() - "/key".length()); this.propValueType = FilterConditionOperandType.KEY; } else { this.propPath = propPath; this.propValueType = FilterConditionOperandType.VALUE; } // get property path chain this.propChain = prsrcHandler.getPersistentPropertyChain(this.propPath); // get handler of the property at the end of the chain final AbstractResourcePropertyHandlerImpl propHandler = (AbstractResourcePropertyHandlerImpl) this.propChain.getLast(); // cannot use aggregates in filters if (propHandler instanceof AggregatePropertyHandler) throw new InvalidSpecificationException( "Cannot use aggregate properties in filters."); // get property value handlers (top and leaf) final AbstractResourcePropertyValueHandlerImpl propTopValueHandler = propHandler.getValueHandler(); final AbstractResourcePropertyValueHandlerImpl propLeafValueHandler = propHandler.getValueHandler().getLastInChain(); // determine if the test is a presence check, validate operands number final boolean presenceCheck = ((this.type == FilterConditionType.EMPTY) || (this.type == FilterConditionType.NOT_EMPTY)); if (presenceCheck && (operands.length > 0)) throw new InvalidSpecificationException("This type of filter" + " condition does not use operands."); if (!presenceCheck && (operands.length == 0)) throw new InvalidSpecificationException("This type of filter" + " condition requires at least one operand."); // get value handler for the operand final ResourcePropertyValueHandler opValueHandler; switch (this.propValueType) { case ID: // make sure the property is a reference if (!propLeafValueHandler.isRef()) throw new InvalidSpecificationException("Property " + propPath + " is not a reference and its id cannot be tested."); // use target resource id property value handler for operand values opValueHandler = resources.getPersistentResourceHandler( propHandler.getValueHandler().getLastInChain() .getRefTargetClass()) .getIdProperty().getValueHandler(); break; case KEY: // make sure the property is a map if (propTopValueHandler.getType() != ResourcePropertyValueType.MAP) throw new InvalidSpecificationException("Property " + propPath + " is not a map and does not have a key."); // use key value handler for operand values opValueHandler = propHandler.getKeyValueHandler(); break; default: // VALUE // make sure the property has simple value to test if (!(propLeafValueHandler instanceof SimpleResourcePropertyValueHandler)) throw new InvalidSpecificationException("Property " + propPath + " does not have simple value."); // use property value handler for operand values opValueHandler = propHandler.getValueHandler(); } // gather all operands using appropriate value handler try { final Collection<FilterConditionOperandImpl> operandsCol = new ArrayList<>(operands.length > 10 ? operands.length : 10); for (final Object op : operands) { if (op == null) throw new InvalidSpecificationException( "Filter condition operands may not be null."); operandsCol.add(new FilterConditionOperandImpl( op instanceof String ? opValueHandler.valueOf((String) op) : op)); } this.operands = Collections.unmodifiableCollection(operandsCol); } catch (final InvalidResourceDataException e) { throw new InvalidSpecificationException("Invalid operand value.", e); } // save participating persistent resource classes from the chain for (final Iterator<? extends ResourcePropertyHandler> i = this.propChain.iterator(); i.hasNext();) { final ResourcePropertyHandler prop = i.next(); if ((prop instanceof RefPropertyHandler) && i.hasNext()) prsrcClasses.add(((RefPropertyHandler) prop) .getReferredResourceClass()); else if (prop instanceof DependentRefPropertyHandler) prsrcClasses.add(((DependentRefPropertyHandler) prop) .getReferredResourceClass()); } } /* (non-Javadoc) * See overridden method. */ @Override public FilterConditionType getType() { return this.type; } /* (non-Javadoc) * See overridden method. */ @Override public boolean isNegated() { return this.negated; } /* (non-Javadoc) * See overridden method. */ @Override public String getPropertyPath() { return this.propPath; } /* (non-Javadoc) * See overridden method. */ @Override public FilterConditionOperandType getPropertyValueType() { return this.propValueType; } /* (non-Javadoc) * See overridden method. */ @Override public PropertyValueFunction getValueFunction() { return this.valueFunc; } /* (non-Javadoc) * See overridden method. */ @Override public Object[] getValueFunctionParams() { return this.valueFuncParams; } /* (non-Javadoc) * See overridden method. */ @Override public Deque<? extends ResourcePropertyHandler> getPropertyChain() { return this.propChain; } /* (non-Javadoc) * See overridden method. */ @Override public Collection<FilterConditionOperandImpl> getOperands() { return this.operands; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.client; import com.google.common.collect.ImmutableMap; import io.airlift.units.Duration; import java.net.URI; import java.nio.charset.CharsetEncoder; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; import static com.google.common.base.MoreObjects.toStringHelper; import static com.google.common.base.Preconditions.checkArgument; import static java.nio.charset.StandardCharsets.US_ASCII; import static java.util.Collections.emptyMap; import static java.util.Objects.requireNonNull; public class ClientSession { private final URI server; private final String user; private final String source; private final String catalog; private final String schema; private final String timeZoneId; private final Locale locale; private final Map<String, String> properties; private final Map<String, String> preparedStatements; private final String transactionId; private final boolean debug; private final Duration clientRequestTimeout; public static ClientSession withCatalogAndSchema(ClientSession session, String catalog, String schema) { return new ClientSession( session.getServer(), session.getUser(), session.getSource(), catalog, schema, session.getTimeZoneId(), session.getLocale(), session.getProperties(), session.getPreparedStatements(), session.getTransactionId(), session.isDebug(), session.getClientRequestTimeout()); } public static ClientSession withProperties(ClientSession session, Map<String, String> properties) { return new ClientSession( session.getServer(), session.getUser(), session.getSource(), session.getCatalog(), session.getSchema(), session.getTimeZoneId(), session.getLocale(), properties, session.getPreparedStatements(), session.getTransactionId(), session.isDebug(), session.getClientRequestTimeout()); } public static ClientSession withPreparedStatements(ClientSession session, Map<String, String> preparedStatements) { return new ClientSession( session.getServer(), session.getUser(), session.getSource(), session.getCatalog(), session.getSchema(), session.getTimeZoneId(), session.getLocale(), session.getProperties(), preparedStatements, session.getTransactionId(), session.isDebug(), session.getClientRequestTimeout()); } public static ClientSession withTransactionId(ClientSession session, String transactionId) { return new ClientSession( session.getServer(), session.getUser(), session.getSource(), session.getCatalog(), session.getSchema(), session.getTimeZoneId(), session.getLocale(), session.getProperties(), session.getPreparedStatements(), transactionId, session.isDebug(), session.getClientRequestTimeout()); } public static ClientSession stripTransactionId(ClientSession session) { return new ClientSession( session.getServer(), session.getUser(), session.getSource(), session.getCatalog(), session.getSchema(), session.getTimeZoneId(), session.getLocale(), session.getProperties(), session.getPreparedStatements(), null, session.isDebug(), session.getClientRequestTimeout()); } public ClientSession(URI server, String user, String source, String catalog, String schema, String timeZoneId, Locale locale, Map<String, String> properties, String transactionId, boolean debug, Duration clientRequestTimeout) { this(server, user, source, catalog, schema, timeZoneId, locale, properties, emptyMap(), transactionId, debug, clientRequestTimeout); } public ClientSession(URI server, String user, String source, String catalog, String schema, String timeZoneId, Locale locale, Map<String, String> properties, Map<String, String> preparedStatements, String transactionId, boolean debug, Duration clientRequestTimeout) { this.server = requireNonNull(server, "server is null"); this.user = user; this.source = source; this.catalog = catalog; this.schema = schema; this.locale = locale; this.timeZoneId = requireNonNull(timeZoneId, "timeZoneId is null"); this.transactionId = transactionId; this.debug = debug; this.properties = ImmutableMap.copyOf(requireNonNull(properties, "properties is null")); this.preparedStatements = ImmutableMap.copyOf(requireNonNull(preparedStatements, "preparedStatements is null")); this.clientRequestTimeout = clientRequestTimeout; // verify the properties are valid CharsetEncoder charsetEncoder = US_ASCII.newEncoder(); for (Entry<String, String> entry : properties.entrySet()) { checkArgument(!entry.getKey().isEmpty(), "Session property name is empty"); checkArgument(entry.getKey().indexOf('=') < 0, "Session property name must not contain '=': %s", entry.getKey()); checkArgument(charsetEncoder.canEncode(entry.getKey()), "Session property name is not US_ASCII: %s", entry.getKey()); checkArgument(charsetEncoder.canEncode(entry.getValue()), "Session property value is not US_ASCII: %s", entry.getValue()); } } public URI getServer() { return server; } public String getUser() { return user; } public String getSource() { return source; } public String getCatalog() { return catalog; } public String getSchema() { return schema; } public String getTimeZoneId() { return timeZoneId; } public Locale getLocale() { return locale; } public Map<String, String> getProperties() { return properties; } public Map<String, String> getPreparedStatements() { return preparedStatements; } public String getTransactionId() { return transactionId; } public boolean isDebug() { return debug; } public Duration getClientRequestTimeout() { return clientRequestTimeout; } @Override public String toString() { return toStringHelper(this) .add("server", server) .add("user", user) .add("catalog", catalog) .add("schema", schema) .add("timeZone", timeZoneId) .add("locale", locale) .add("properties", properties) .add("transactionId", transactionId) .add("debug", debug) .toString(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.rmapp; import java.util.Collection; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; import org.apache.hadoop.yarn.MockApps; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationSubmissionContextPBImpl; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; public class MockRMApp implements RMApp { static final int DT = 1000000; // ms String user = MockApps.newUserName(); String name = MockApps.newAppName(); String queue = MockApps.newQueue(); long start = System.currentTimeMillis() - (int) (Math.random() * DT); long submit = start - (int) (Math.random() * DT); long finish = 0; RMAppState state = RMAppState.NEW; int failCount = 0; ApplicationId id; String url = null; StringBuilder diagnostics = new StringBuilder(); RMAppAttempt attempt; int maxAppAttempts = 1; public MockRMApp(int newid, long time, RMAppState newState) { finish = time; id = MockApps.newAppID(newid); state = newState; } public MockRMApp(int newid, long time, RMAppState newState, String userName) { this(newid, time, newState); user = userName; } public MockRMApp(int newid, long time, RMAppState newState, String userName, String diag) { this(newid, time, newState, userName); this.diagnostics = new StringBuilder(diag); } @Override public ApplicationId getApplicationId() { return id; } @Override public ApplicationSubmissionContext getApplicationSubmissionContext() { return new ApplicationSubmissionContextPBImpl(); } @Override public RMAppState getState() { return state; } public void setState(RMAppState state) { this.state = state; } @Override public String getUser() { return user; } public void setUser(String user) { this.user = user; } @Override public float getProgress() { return (float) 0.0; } @Override public RMAppAttempt getRMAppAttempt(ApplicationAttemptId appAttemptId) { throw new UnsupportedOperationException("Not supported yet."); } @Override public String getQueue() { return queue; } public void setQueue(String queue) { this.queue = queue; } @Override public String getName() { return name; } public void setName(String name) { this.name = name; } @Override public Map<ApplicationAttemptId, RMAppAttempt> getAppAttempts() { Map<ApplicationAttemptId, RMAppAttempt> attempts = new LinkedHashMap<ApplicationAttemptId, RMAppAttempt>(); if(attempt != null) { attempts.put(attempt.getAppAttemptId(), attempt); } return attempts; } @Override public RMAppAttempt getCurrentAppAttempt() { return attempt; } public void setCurrentAppAttempt(RMAppAttempt attempt) { this.attempt = attempt; } @Override public ApplicationReport createAndGetApplicationReport( String clientUserName, boolean allowAccess) { throw new UnsupportedOperationException("Not supported yet."); } @Override public long getFinishTime() { return finish; } public void setFinishTime(long time) { this.finish = time; } @Override public long getStartTime() { return start; } @Override public long getSubmitTime() { return submit; } public void setStartTime(long time) { this.start = time; } @Override public String getTrackingUrl() { return url; } public void setTrackingUrl(String url) { this.url = url; } @Override public StringBuilder getDiagnostics() { return diagnostics; } public void setDiagnostics(String diag) { this.diagnostics = new StringBuilder(diag); } @Override public int getMaxAppAttempts() { return maxAppAttempts; } public void setNumMaxRetries(int maxAppAttempts) { this.maxAppAttempts = maxAppAttempts; } @Override public void handle(RMAppEvent event) { } @Override public FinalApplicationStatus getFinalApplicationStatus() { return FinalApplicationStatus.UNDEFINED; } @Override public int pullRMNodeUpdates(Collection<RMNode> updatedNodes) { throw new UnsupportedOperationException("Not supported yet."); } @Override public String getApplicationType() { return YarnConfiguration.DEFAULT_APPLICATION_TYPE; } @Override public Set<String> getApplicationTags() { return null; } @Override public boolean isAppFinalStateStored() { return true; } @Override public YarnApplicationState createApplicationState() { return null; } @Override public Set<NodeId> getRanNodes() { return null; } }
/* Copyright (c) 2009, University of Oslo, Norway * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of the University of Oslo nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package vtk.web.decorating.components.menu; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import javax.servlet.http.HttpServletRequest; import vtk.repository.AuthorizationException; import vtk.repository.Path; import vtk.repository.Privilege; import vtk.repository.Property; import vtk.repository.PropertySet; import vtk.repository.Repository; import vtk.repository.Resource; import vtk.repository.resourcetype.PropertyTypeDefinition; import vtk.security.AuthenticationException; import vtk.security.Principal; import vtk.web.RequestContext; import vtk.web.decorating.DecoratorRequest; import vtk.web.decorating.DecoratorResponse; import vtk.web.decorating.components.DecoratorComponentException; import vtk.web.referencedata.provider.BreadCrumbProvider; import vtk.web.referencedata.provider.BreadcrumbElement; import vtk.web.service.URL; import vtk.web.view.components.menu.MenuItem; /** * XXX This is NOT a simple breadcrumb menu component, as the name would * suggest. It creates a parent/child menu. */ public class BreadcrumbMenuComponent extends ListMenuComponent { private static final int DEFAULT_NUMBER_OF_SIBLINGS = 3000; private static final String PARAMETER_MAX_NUMBER_OF_SIBLINGS = "max-number-of-siblings"; private static final String PARAMETER_MAX_NUMBER_OF_SIBLINGS_DESC = "Defines the maximum number of siblings. When this limit is" + " reached no siblings are going to be displayed. Default limit is: " + DEFAULT_NUMBER_OF_SIBLINGS; private static final String BREAD_CRUMB_MENU_PARAMETER_DISPLAY_FROM_LEVEL_DESC = "Defines the starting URI level for the menu"; @Override public void processModel(Map<String, Object> model, DecoratorRequest request, DecoratorResponse response) throws Exception { int displayFromLevel = getIntegerGreaterThanZero(PARAMETER_DISPLAY_FROM_LEVEL, request, -1); int maxSiblings = getIntegerGreaterThanZero(PARAMETER_MAX_NUMBER_OF_SIBLINGS, request, DEFAULT_NUMBER_OF_SIBLINGS); boolean ascendingSort = true; RequestContext requestContext = RequestContext.getRequestContext(request.getServletRequest()); String token = requestContext.getSecurityToken(); Repository repository = requestContext.getRepository(); Path uri = requestContext.getResourceURI(); Principal principal = requestContext.getPrincipal(); List<BreadcrumbElement> breadCrumbElements = getBreadcrumbElements(request.getServletRequest()); Resource currentResource = null; currentResource = repository.retrieve(token, uri, true); if ((!currentResource.isCollection() && (displayFromLevel + 1) > breadCrumbElements.size()) || (displayFromLevel > breadCrumbElements.size())) { return; } for (int i = 0; i < displayFromLevel; i++) { if (breadCrumbElements.size() > 0) { breadCrumbElements.remove(0); } } // From here on we typically load other resources than current resource // and must respect plainServiceMode. if (requestContext.isViewUnauthenticated()) { token = null; principal = null; } if (!currentResource.isCollection()) { try { currentResource = repository.retrieve(token, uri.getParent(), true); } catch (AuthorizationException e) { model.put("breadcrumb", breadCrumbElements); return; } catch (AuthenticationException e) { model.put("breadcrumb", breadCrumbElements); return; } if (breadCrumbElements.size() > 0) { breadCrumbElements.remove(breadCrumbElements.size() - 1); } } URL markedUrl = menuGenerator.getViewService().urlConstructor(requestContext.getRequestURL()) .withURI(currentResource.getURI()) .constructURL(); breadCrumbElements.add(new BreadcrumbElement(markedUrl, getMenuTitle(currentResource))); // XXX: for this case currentResource will never be equal any of the // resources in list to generate menu from, // so generatemenuItemList need not check for this condition for hidden // ones. However, it will need to // do that for the sibling case in the next call below. List<MenuItem<PropertySet>> menuItemList = generateMenuItemList(request, repository.listChildren(token, currentResource.getURI(), true), currentResource, principal, repository); // If menu is empty, i.e. current resource has no children or // all children were hidden, then generate menu based on siblings. if (menuItemList.isEmpty()) { Resource currentResourceParent = null; try { currentResourceParent = repository.retrieve(token, currentResource.getURI().getParent(), true); } catch (Exception e) { // Ignore } if (currentResourceParent != null) { menuItemList = generateMenuItemList(request, repository.listChildren(token, currentResourceParent.getURI(), true), currentResource, principal, repository); breadCrumbElements.remove(breadCrumbElements.size() - 1); if (menuItemList.size() > maxSiblings) { menuItemList = new ArrayList<>(); menuItemList.add(buildItem(request, currentResource)); } } } if (currentResource.getProperty(menuGenerator.getSortDescendingPropDef()) != null) { if (currentResource.getProperty(menuGenerator.getSortDescendingPropDef()).getBooleanValue()) { ascendingSort = false; } } menuItemList = sortByOrder(menuItemList, request.getLocale(), ascendingSort); model.put("breadcrumb", breadCrumbElements); model.put("children", menuItemList); model.put("markedurl", markedUrl); } private List<BreadcrumbElement> getBreadcrumbElements(HttpServletRequest request) throws Exception { RequestContext requestContext = RequestContext.getRequestContext(request); // XXX NO! Reconsider this. Refactor BreadCrumbProvider and create // separate generic class for bread crumb creation. Use this separate // implementation in provider and here. String breadcrumbName = "breadcrumb"; BreadCrumbProvider breadCrumbProvider = new BreadCrumbProvider(); breadCrumbProvider.setSkipCurrentResource(true); breadCrumbProvider.setService(this.menuGenerator.getViewService()); breadCrumbProvider.setBreadcrumbName(breadcrumbName); breadCrumbProvider.setSkipIndexFile(false); if (!requestContext.isPreviewUnpublished()) { breadCrumbProvider.setIgnoreProperty(menuGenerator.getUnpublishedCollectionPropDef()); } PropertyTypeDefinition titleProp[] = new PropertyTypeDefinition[2]; titleProp[0] = this.menuGenerator.getNavigationTitlePropDef(); titleProp[1] = this.menuGenerator.getTitlePropDef(); breadCrumbProvider.setTitleOverrideProperties(titleProp); breadCrumbProvider.afterPropertiesSet(); Map<String, Object> map = new HashMap<>(); breadCrumbProvider.referenceData(map, request); Object o = map.get(breadcrumbName); if (!(o instanceof BreadcrumbElement[])) { throw new IllegalStateException("Expected BreadCrumbElement[] in model, found " + o); } BreadcrumbElement[] list = (BreadcrumbElement[]) o; List<BreadcrumbElement> result = new ArrayList<>(); for (int i = 0; i < list.length; i++) { result.add(list[i]); } return result; } private List<MenuItem<PropertySet>> generateMenuItemList(DecoratorRequest request, Resource[] resources, Resource currentResource, Principal principal, Repository repository) throws Exception { RequestContext requestContext = RequestContext.getRequestContext(request.getServletRequest()); List<MenuItem<PropertySet>> menuItems = new ArrayList<>(); if (!requestContext.isPreviewUnpublished()) { if (currentResource.getProperty(menuGenerator.getUnpublishedCollectionPropDef()) != null) { return menuItems; } } for (Resource r : resources) { // Filtering: if (!r.isCollection()) { continue; } if (r.getProperty(menuGenerator.getHiddenPropDef()) != null && !r.getURI().equals(currentResource.getURI())) { continue; } if (!requestContext.isPreviewUnpublished()) { if (r.getProperty(menuGenerator.getUnpublishedCollectionPropDef()) != null) { continue; } } Path uri = currentResource.getURI(); if(!currentResource.isCollection()){ uri = uri.getParent(); } if (!r.isPublished() && (!r.getURI().equals(uri) && requestContext.isPreviewUnpublished())) { continue; } // Remove resources that current principal is not allowed to access // (they may appear when using Repository.loadChildren). if (!repository.authorize(principal, r.getAcl(), Privilege.READ_PROCESSED)) { continue; } // Passed filtering, build menu item: menuItems.add(buildItem(request, r)); } return menuItems; } private String getMenuTitle(Resource resource) { Property prop = resource.getProperty(this.menuGenerator.getNavigationTitlePropDef()); if (prop != null) { return prop.getStringValue(); } return resource.getTitle(); } private int getIntegerGreaterThanZero(String prameter, DecoratorRequest request, int returnWhenParamNotFound) { int value = returnWhenParamNotFound; try { value = Integer.parseInt(request.getStringParameter(prameter)); if (value < 1) intergerMustBeGreaterThanZeroException(prameter); } catch (NumberFormatException e) { if (request.getRawParameter(prameter) != null) intergerMustBeGreaterThanZeroException(prameter); } return value; } private void intergerMustBeGreaterThanZeroException(String prameter) { throw new DecoratorComponentException("Parameter '" + prameter + "' must be an integer > 0"); } @Override protected Map<String, String> getParameterDescriptionsInternal() { Map<String, String> map = new LinkedHashMap<>(); map.put(PARAMETER_DISPLAY_FROM_LEVEL, BREAD_CRUMB_MENU_PARAMETER_DISPLAY_FROM_LEVEL_DESC); map.put(PARAMETER_MAX_NUMBER_OF_SIBLINGS, PARAMETER_MAX_NUMBER_OF_SIBLINGS_DESC); return map; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.catalog; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import java.io.IOException; import java.io.InterruptedIOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.NavigableMap; import java.util.Set; import java.util.TreeMap; /** * Reads region and assignment information from <code>hbase:meta</code>. */ @InterfaceAudience.Private public class MetaReader { // TODO: Strip CatalogTracker from this class. Its all over and in the end // its only used to get its Configuration so we can get associated // Connection. private static final Log LOG = LogFactory.getLog(MetaReader.class); static final byte [] META_REGION_PREFIX; static { // Copy the prefix from FIRST_META_REGIONINFO into META_REGION_PREFIX. // FIRST_META_REGIONINFO == 'hbase:meta,,1'. META_REGION_PREFIX == 'hbase:meta,' int len = HRegionInfo.FIRST_META_REGIONINFO.getRegionName().length - 2; META_REGION_PREFIX = new byte [len]; System.arraycopy(HRegionInfo.FIRST_META_REGIONINFO.getRegionName(), 0, META_REGION_PREFIX, 0, len); } /** * Performs a full scan of <code>hbase:meta</code>, skipping regions from any * tables in the specified set of disabled tables. * @param catalogTracker * @param disabledTables set of disabled tables that will not be returned * @return Returns a map of every region to it's currently assigned server, * according to META. If the region does not have an assignment it will have * a null value in the map. * @throws IOException */ public static Map<HRegionInfo, ServerName> fullScan( CatalogTracker catalogTracker, final Set<TableName> disabledTables) throws IOException { return fullScan(catalogTracker, disabledTables, false); } /** * Performs a full scan of <code>hbase:meta</code>, skipping regions from any * tables in the specified set of disabled tables. * @param catalogTracker * @param disabledTables set of disabled tables that will not be returned * @param excludeOfflinedSplitParents If true, do not include offlined split * parents in the return. * @return Returns a map of every region to it's currently assigned server, * according to META. If the region does not have an assignment it will have * a null value in the map. * @throws IOException */ public static Map<HRegionInfo, ServerName> fullScan( CatalogTracker catalogTracker, final Set<TableName> disabledTables, final boolean excludeOfflinedSplitParents) throws IOException { final Map<HRegionInfo, ServerName> regions = new TreeMap<HRegionInfo, ServerName>(); Visitor v = new Visitor() { @Override public boolean visit(Result r) throws IOException { if (r == null || r.isEmpty()) return true; Pair<HRegionInfo, ServerName> region = HRegionInfo.getHRegionInfoAndServerName(r); HRegionInfo hri = region.getFirst(); if (hri == null) return true; if (hri.getTable() == null) return true; if (disabledTables.contains( hri.getTable())) return true; // Are we to include split parents in the list? if (excludeOfflinedSplitParents && hri.isSplitParent()) return true; regions.put(hri, region.getSecond()); return true; } }; fullScan(catalogTracker, v); return regions; } /** * Performs a full scan of <code>hbase:meta</code>. * @return List of {@link Result} * @throws IOException */ public static List<Result> fullScan(CatalogTracker catalogTracker) throws IOException { CollectAllVisitor v = new CollectAllVisitor(); fullScan(catalogTracker, v, null); return v.getResults(); } /** * Performs a full scan of a <code>hbase:meta</code> table. * @return List of {@link Result} * @throws IOException */ public static List<Result> fullScanOfMeta(CatalogTracker catalogTracker) throws IOException { CollectAllVisitor v = new CollectAllVisitor(); fullScan(catalogTracker, v, null); return v.getResults(); } /** * Performs a full scan of <code>hbase:meta</code>. * @param catalogTracker * @param visitor Visitor invoked against each row. * @throws IOException */ public static void fullScan(CatalogTracker catalogTracker, final Visitor visitor) throws IOException { fullScan(catalogTracker, visitor, null); } /** * Callers should call close on the returned {@link HTable} instance. * @param catalogTracker We'll use this catalogtracker's connection * @param tableName Table to get an {@link HTable} against. * @return An {@link HTable} for <code>tableName</code> * @throws IOException */ private static HTable getHTable(final CatalogTracker catalogTracker, final TableName tableName) throws IOException { // Passing the CatalogTracker's connection ensures this // HTable instance uses the CatalogTracker's connection. org.apache.hadoop.hbase.client.HConnection c = catalogTracker.getConnection(); if (c == null) throw new NullPointerException("No connection"); return new HTable(tableName, c); } /** * Callers should call close on the returned {@link HTable} instance. * @param catalogTracker * @return An {@link HTable} for <code>hbase:meta</code> * @throws IOException */ static HTable getCatalogHTable(final CatalogTracker catalogTracker) throws IOException { return getMetaHTable(catalogTracker); } /** * Callers should call close on the returned {@link HTable} instance. * @param ct * @return An {@link HTable} for <code>hbase:meta</code> * @throws IOException */ static HTable getMetaHTable(final CatalogTracker ct) throws IOException { return getHTable(ct, TableName.META_TABLE_NAME); } /** * @param t Table to use (will be closed when done). * @param g Get to run * @throws IOException */ private static Result get(final HTable t, final Get g) throws IOException { try { return t.get(g); } finally { t.close(); } } /** * Reads the location of the specified region * @param catalogTracker * @param regionName region whose location we are after * @return location of region as a {@link ServerName} or null if not found * @throws IOException */ static ServerName readRegionLocation(CatalogTracker catalogTracker, byte [] regionName) throws IOException { Pair<HRegionInfo, ServerName> pair = getRegion(catalogTracker, regionName); return (pair == null || pair.getSecond() == null)? null: pair.getSecond(); } /** * Gets the region info and assignment for the specified region. * @param catalogTracker * @param regionName Region to lookup. * @return Location and HRegionInfo for <code>regionName</code> * @throws IOException */ public static Pair<HRegionInfo, ServerName> getRegion( CatalogTracker catalogTracker, byte [] regionName) throws IOException { Get get = new Get(regionName); get.addFamily(HConstants.CATALOG_FAMILY); Result r = get(getCatalogHTable(catalogTracker), get); return (r == null || r.isEmpty())? null: HRegionInfo.getHRegionInfoAndServerName(r); } /** * Gets the result in hbase:meta for the specified region. * @param catalogTracker * @param regionName * @return result of the specified region * @throws IOException */ public static Result getRegionResult(CatalogTracker catalogTracker, byte[] regionName) throws IOException { Get get = new Get(regionName); get.addFamily(HConstants.CATALOG_FAMILY); return get(getCatalogHTable(catalogTracker), get); } /** * Get regions from the merge qualifier of the specified merged region * @return null if it doesn't contain merge qualifier, else two merge regions * @throws IOException */ public static Pair<HRegionInfo, HRegionInfo> getRegionsFromMergeQualifier( CatalogTracker catalogTracker, byte[] regionName) throws IOException { Result result = getRegionResult(catalogTracker, regionName); HRegionInfo mergeA = HRegionInfo.getHRegionInfo(result, HConstants.MERGEA_QUALIFIER); HRegionInfo mergeB = HRegionInfo.getHRegionInfo(result, HConstants.MERGEB_QUALIFIER); if (mergeA == null && mergeB == null) { return null; } return new Pair<HRegionInfo, HRegionInfo>(mergeA, mergeB); } /** * Checks if the specified table exists. Looks at the hbase:meta table hosted on * the specified server. * @param catalogTracker * @param tableName table to check * @return true if the table exists in meta, false if not * @throws IOException */ public static boolean tableExists(CatalogTracker catalogTracker, final TableName tableName) throws IOException { if (tableName.equals(HTableDescriptor.META_TABLEDESC.getTableName())) { // Catalog tables always exist. return true; } // Make a version of ResultCollectingVisitor that only collects the first CollectingVisitor<HRegionInfo> visitor = new CollectingVisitor<HRegionInfo>() { private HRegionInfo current = null; @Override public boolean visit(Result r) throws IOException { this.current = HRegionInfo.getHRegionInfo(r, HConstants.REGIONINFO_QUALIFIER); if (this.current == null) { LOG.warn("No serialized HRegionInfo in " + r); return true; } if (!isInsideTable(this.current, tableName)) return false; // Else call super and add this Result to the collection. super.visit(r); // Stop collecting regions from table after we get one. return false; } @Override void add(Result r) { // Add the current HRI. this.results.add(this.current); } }; fullScan(catalogTracker, visitor, getTableStartRowForMeta(tableName)); // If visitor has results >= 1 then table exists. return visitor.getResults().size() >= 1; } /** * Gets all of the regions of the specified table. * @param catalogTracker * @param tableName * @return Ordered list of {@link HRegionInfo}. * @throws IOException */ public static List<HRegionInfo> getTableRegions(CatalogTracker catalogTracker, TableName tableName) throws IOException { return getTableRegions(catalogTracker, tableName, false); } /** * Gets all of the regions of the specified table. * @param catalogTracker * @param tableName * @param excludeOfflinedSplitParents If true, do not include offlined split * parents in the return. * @return Ordered list of {@link HRegionInfo}. * @throws IOException */ public static List<HRegionInfo> getTableRegions(CatalogTracker catalogTracker, TableName tableName, final boolean excludeOfflinedSplitParents) throws IOException { List<Pair<HRegionInfo, ServerName>> result = null; try { result = getTableRegionsAndLocations(catalogTracker, tableName, excludeOfflinedSplitParents); } catch (InterruptedException e) { throw (InterruptedIOException)new InterruptedIOException().initCause(e); } return getListOfHRegionInfos(result); } static List<HRegionInfo> getListOfHRegionInfos(final List<Pair<HRegionInfo, ServerName>> pairs) { if (pairs == null || pairs.isEmpty()) return null; List<HRegionInfo> result = new ArrayList<HRegionInfo>(pairs.size()); for (Pair<HRegionInfo, ServerName> pair: pairs) { result.add(pair.getFirst()); } return result; } /** * @param current * @param tableName * @return True if <code>current</code> tablename is equal to * <code>tableName</code> */ static boolean isInsideTable(final HRegionInfo current, final TableName tableName) { return tableName.equals(current.getTable()); } /** * @param tableName * @return Place to start Scan in <code>hbase:meta</code> when passed a * <code>tableName</code>; returns &lt;tableName&rt; &lt;,&rt; &lt;,&rt; */ static byte [] getTableStartRowForMeta(TableName tableName) { byte [] startRow = new byte[tableName.getName().length + 2]; System.arraycopy(tableName.getName(), 0, startRow, 0, tableName.getName().length); startRow[startRow.length - 2] = HConstants.DELIMITER; startRow[startRow.length - 1] = HConstants.DELIMITER; return startRow; } /** * This method creates a Scan object that will only scan catalog rows that * belong to the specified table. It doesn't specify any columns. * This is a better alternative to just using a start row and scan until * it hits a new table since that requires parsing the HRI to get the table * name. * @param tableName bytes of table's name * @return configured Scan object */ public static Scan getScanForTableName(TableName tableName) { String strName = tableName.getNameAsString(); // Start key is just the table name with delimiters byte[] startKey = Bytes.toBytes(strName + ",,"); // Stop key appends the smallest possible char to the table name byte[] stopKey = Bytes.toBytes(strName + " ,,"); Scan scan = new Scan(startKey); scan.setStopRow(stopKey); return scan; } /** * @param catalogTracker * @param tableName * @return Return list of regioninfos and server. * @throws IOException * @throws InterruptedException */ public static List<Pair<HRegionInfo, ServerName>> getTableRegionsAndLocations(CatalogTracker catalogTracker, TableName tableName) throws IOException, InterruptedException { return getTableRegionsAndLocations(catalogTracker, tableName, true); } /** * @param catalogTracker * @param tableName * @return Return list of regioninfos and server addresses. * @throws IOException * @throws InterruptedException */ public static List<Pair<HRegionInfo, ServerName>> getTableRegionsAndLocations(final CatalogTracker catalogTracker, final TableName tableName, final boolean excludeOfflinedSplitParents) throws IOException, InterruptedException { if (tableName.equals(TableName.META_TABLE_NAME)) { // If meta, do a bit of special handling. ServerName serverName = catalogTracker.getMetaLocation(); List<Pair<HRegionInfo, ServerName>> list = new ArrayList<Pair<HRegionInfo, ServerName>>(); list.add(new Pair<HRegionInfo, ServerName>(HRegionInfo.FIRST_META_REGIONINFO, serverName)); return list; } // Make a version of CollectingVisitor that collects HRegionInfo and ServerAddress CollectingVisitor<Pair<HRegionInfo, ServerName>> visitor = new CollectingVisitor<Pair<HRegionInfo, ServerName>>() { private Pair<HRegionInfo, ServerName> current = null; @Override public boolean visit(Result r) throws IOException { HRegionInfo hri = HRegionInfo.getHRegionInfo(r, HConstants.REGIONINFO_QUALIFIER); if (hri == null) { LOG.warn("No serialized HRegionInfo in " + r); return true; } if (!isInsideTable(hri, tableName)) return false; if (excludeOfflinedSplitParents && hri.isSplitParent()) return true; ServerName sn = HRegionInfo.getServerName(r); // Populate this.current so available when we call #add this.current = new Pair<HRegionInfo, ServerName>(hri, sn); // Else call super and add this Result to the collection. return super.visit(r); } @Override void add(Result r) { this.results.add(this.current); } }; fullScan(catalogTracker, visitor, getTableStartRowForMeta(tableName)); return visitor.getResults(); } /** * @param catalogTracker * @param serverName * @return List of user regions installed on this server (does not include * catalog regions). * @throws IOException */ public static NavigableMap<HRegionInfo, Result> getServerUserRegions(CatalogTracker catalogTracker, final ServerName serverName) throws IOException { final NavigableMap<HRegionInfo, Result> hris = new TreeMap<HRegionInfo, Result>(); // Fill the above hris map with entries from hbase:meta that have the passed // servername. CollectingVisitor<Result> v = new CollectingVisitor<Result>() { @Override void add(Result r) { if (r == null || r.isEmpty()) return; ServerName sn = HRegionInfo.getServerName(r); if (sn != null && sn.equals(serverName)) this.results.add(r); } }; fullScan(catalogTracker, v); List<Result> results = v.getResults(); if (results != null && !results.isEmpty()) { // Convert results to Map keyed by HRI for (Result r: results) { Pair<HRegionInfo, ServerName> p = HRegionInfo.getHRegionInfoAndServerName(r); if (p != null && p.getFirst() != null) hris.put(p.getFirst(), r); } } return hris; } public static void fullScanMetaAndPrint(final CatalogTracker catalogTracker) throws IOException { Visitor v = new Visitor() { @Override public boolean visit(Result r) throws IOException { if (r == null || r.isEmpty()) return true; LOG.info("fullScanMetaAndPrint.Current Meta Row: " + r); HRegionInfo hrim = HRegionInfo.getHRegionInfo(r); LOG.info("fullScanMetaAndPrint.HRI Print= " + hrim); return true; } }; fullScan(catalogTracker, v); } /** * Performs a full scan of a catalog table. * @param catalogTracker * @param visitor Visitor invoked against each row. * @param startrow Where to start the scan. Pass null if want to begin scan * at first row. * <code>hbase:meta</code>, the default (pass false to scan hbase:meta) * @throws IOException */ public static void fullScan(CatalogTracker catalogTracker, final Visitor visitor, final byte [] startrow) throws IOException { Scan scan = new Scan(); if (startrow != null) scan.setStartRow(startrow); if (startrow == null) { int caching = catalogTracker.getConnection().getConfiguration() .getInt(HConstants.HBASE_META_SCANNER_CACHING, 100); scan.setCaching(caching); } scan.addFamily(HConstants.CATALOG_FAMILY); HTable metaTable = getMetaHTable(catalogTracker); ResultScanner scanner = null; try { scanner = metaTable.getScanner(scan); Result data; while((data = scanner.next()) != null) { if (data.isEmpty()) continue; // Break if visit returns false. if (!visitor.visit(data)) break; } } finally { if (scanner != null) scanner.close(); metaTable.close(); } return; } /** * Implementations 'visit' a catalog table row. */ public interface Visitor { /** * Visit the catalog table row. * @param r A row from catalog table * @return True if we are to proceed scanning the table, else false if * we are to stop now. */ boolean visit(final Result r) throws IOException; } /** * A {@link Visitor} that collects content out of passed {@link Result}. */ static abstract class CollectingVisitor<T> implements Visitor { final List<T> results = new ArrayList<T>(); @Override public boolean visit(Result r) throws IOException { if (r == null || r.isEmpty()) return true; add(r); return true; } abstract void add(Result r); /** * @return Collected results; wait till visits complete to collect all * possible results */ List<T> getResults() { return this.results; } } /** * Collects all returned. */ static class CollectAllVisitor extends CollectingVisitor<Result> { @Override void add(Result r) { this.results.add(r); } } /** * Count regions in <code>hbase:meta</code> for passed table. * @param c * @param tableName * @return Count or regions in table <code>tableName</code> * @throws IOException */ public static int getRegionCount(final Configuration c, final String tableName) throws IOException { HTable t = new HTable(c, tableName); try { return t.getRegionLocations().size(); } finally { t.close(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package test.ins.URLTest; // // IDL:URLTest/IIOPAddress:1.0 // public abstract class IIOPAddressPOA extends org.omg.PortableServer.Servant implements org.omg.CORBA.portable.InvokeHandler, IIOPAddressOperations { static final String[] _ob_ids_ = { "IDL:URLTest/IIOPAddress:1.0", }; public IIOPAddress _this() { return IIOPAddressHelper.narrow(super._this_object()); } public IIOPAddress _this(org.omg.CORBA.ORB orb) { return IIOPAddressHelper.narrow(super._this_object(orb)); } public String[] _all_interfaces(org.omg.PortableServer.POA poa, byte[] objectId) { return _ob_ids_; } public org.omg.CORBA.portable.OutputStream _invoke(String opName, org.omg.CORBA.portable.InputStream in, org.omg.CORBA.portable.ResponseHandler handler) { final String[] _ob_names = { "deactivate", "destroy", "getCorbalocURL", "getHost", "getIIOPAddress", "getKey", "getPort", "getString", "setString" }; int _ob_left = 0; int _ob_right = _ob_names.length; int _ob_index = -1; while(_ob_left < _ob_right) { int _ob_m = (_ob_left + _ob_right) / 2; int _ob_res = _ob_names[_ob_m].compareTo(opName); if(_ob_res == 0) { _ob_index = _ob_m; break; } else if(_ob_res > 0) _ob_right = _ob_m; else _ob_left = _ob_m + 1; } if(_ob_index == -1 && opName.charAt(0) == '_') { _ob_left = 0; _ob_right = _ob_names.length; String _ob_ami_op = opName.substring(1); while(_ob_left < _ob_right) { int _ob_m = (_ob_left + _ob_right) / 2; int _ob_res = _ob_names[_ob_m].compareTo(_ob_ami_op); if(_ob_res == 0) { _ob_index = _ob_m; break; } else if(_ob_res > 0) _ob_right = _ob_m; else _ob_left = _ob_m + 1; } } switch(_ob_index) { case 0: // deactivate return _OB_op_deactivate(in, handler); case 1: // destroy return _OB_op_destroy(in, handler); case 2: // getCorbalocURL return _OB_op_getCorbalocURL(in, handler); case 3: // getHost return _OB_op_getHost(in, handler); case 4: // getIIOPAddress return _OB_op_getIIOPAddress(in, handler); case 5: // getKey return _OB_op_getKey(in, handler); case 6: // getPort return _OB_op_getPort(in, handler); case 7: // getString return _OB_op_getString(in, handler); case 8: // setString return _OB_op_setString(in, handler); } throw new org.omg.CORBA.BAD_OPERATION(); } private org.omg.CORBA.portable.OutputStream _OB_op_deactivate(org.omg.CORBA.portable.InputStream in, org.omg.CORBA.portable.ResponseHandler handler) { org.omg.CORBA.portable.OutputStream out = null; deactivate(); out = handler.createReply(); return out; } private org.omg.CORBA.portable.OutputStream _OB_op_destroy(org.omg.CORBA.portable.InputStream in, org.omg.CORBA.portable.ResponseHandler handler) { org.omg.CORBA.portable.OutputStream out = null; destroy(); out = handler.createReply(); return out; } private org.omg.CORBA.portable.OutputStream _OB_op_getCorbalocURL(org.omg.CORBA.portable.InputStream in, org.omg.CORBA.portable.ResponseHandler handler) { org.omg.CORBA.portable.OutputStream out = null; String _ob_r = getCorbalocURL(); out = handler.createReply(); out.write_string(_ob_r); return out; } private org.omg.CORBA.portable.OutputStream _OB_op_getHost(org.omg.CORBA.portable.InputStream in, org.omg.CORBA.portable.ResponseHandler handler) { org.omg.CORBA.portable.OutputStream out = null; String _ob_r = getHost(); out = handler.createReply(); out.write_string(_ob_r); return out; } private org.omg.CORBA.portable.OutputStream _OB_op_getIIOPAddress(org.omg.CORBA.portable.InputStream in, org.omg.CORBA.portable.ResponseHandler handler) { org.omg.CORBA.portable.OutputStream out = null; String _ob_r = getIIOPAddress(); out = handler.createReply(); out.write_string(_ob_r); return out; } private org.omg.CORBA.portable.OutputStream _OB_op_getKey(org.omg.CORBA.portable.InputStream in, org.omg.CORBA.portable.ResponseHandler handler) { org.omg.CORBA.portable.OutputStream out = null; String _ob_r = getKey(); out = handler.createReply(); out.write_string(_ob_r); return out; } private org.omg.CORBA.portable.OutputStream _OB_op_getPort(org.omg.CORBA.portable.InputStream in, org.omg.CORBA.portable.ResponseHandler handler) { org.omg.CORBA.portable.OutputStream out = null; short _ob_r = getPort(); out = handler.createReply(); out.write_ushort(_ob_r); return out; } private org.omg.CORBA.portable.OutputStream _OB_op_getString(org.omg.CORBA.portable.InputStream in, org.omg.CORBA.portable.ResponseHandler handler) { org.omg.CORBA.portable.OutputStream out = null; String _ob_r = getString(); out = handler.createReply(); out.write_string(_ob_r); return out; } private org.omg.CORBA.portable.OutputStream _OB_op_setString(org.omg.CORBA.portable.InputStream in, org.omg.CORBA.portable.ResponseHandler handler) { org.omg.CORBA.portable.OutputStream out = null; String _ob_a0 = in.read_string(); setString(_ob_a0); out = handler.createReply(); return out; } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.cxx; import com.facebook.buck.core.cell.CellPathResolver; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.model.Flavor; import com.facebook.buck.core.model.FlavorConvertible; import com.facebook.buck.core.model.FlavorDomain; import com.facebook.buck.core.model.InternalFlavor; import com.facebook.buck.core.rules.ActionGraphBuilder; import com.facebook.buck.core.rules.BuildRule; import com.facebook.buck.core.sourcepath.SourcePath; import com.facebook.buck.core.util.graph.AbstractBreadthFirstTraversal; import com.facebook.buck.cxx.config.CxxBuckConfig; import com.facebook.buck.cxx.toolchain.CxxPlatform; import com.facebook.buck.cxx.toolchain.HeaderSymlinkTree; import com.facebook.buck.cxx.toolchain.HeaderVisibility; import com.facebook.buck.cxx.toolchain.InferBuckConfig; import com.facebook.buck.cxx.toolchain.PicType; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.util.stream.RichStream; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Multimaps; import java.nio.file.Path; import java.util.Optional; /** Handles infer flavors for {@link CxxLibraryGroup} and {@link CxxBinary}. */ public final class CxxInferEnhancer { /** Flavor adorning the individual inter capture rules. */ static final InternalFlavor INFER_CAPTURE_FLAVOR = InternalFlavor.of("infer-capture"); /** Flavors affixed to a library or binary rule to run infer. */ public enum InferFlavors implements FlavorConvertible { INFER(InternalFlavor.of("infer")), INFER_ANALYZE(InternalFlavor.of("infer-analyze")), INFER_CAPTURE_ALL(InternalFlavor.of("infer-capture-all")), INFER_CAPTURE_ONLY(InternalFlavor.of("infer-capture-only")); private final InternalFlavor flavor; InferFlavors(InternalFlavor flavor) { this.flavor = flavor; } @Override public InternalFlavor getFlavor() { return flavor; } private static BuildTarget targetWithoutAnyInferFlavor(BuildTarget target) { BuildTarget result = target; for (InferFlavors f : values()) { result = result.withoutFlavors(f.getFlavor()); } return result; } private static void checkNoInferFlavors(ImmutableSet<Flavor> flavors) { for (InferFlavors f : InferFlavors.values()) { Preconditions.checkArgument( !flavors.contains(f.getFlavor()), "Unexpected infer-related flavor found: %s", f.toString()); } } } public static final FlavorDomain<InferFlavors> INFER_FLAVOR_DOMAIN = FlavorDomain.from("Infer flavors", InferFlavors.class); public static BuildRule requireInferRule( BuildTarget target, ProjectFilesystem filesystem, ActionGraphBuilder graphBuilder, CellPathResolver cellRoots, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, CxxConstructorArg args, InferBuckConfig inferBuckConfig) { return new CxxInferEnhancer(graphBuilder, cxxBuckConfig, inferBuckConfig, cxxPlatform) .requireInferRule(target, cellRoots, filesystem, args); } private final ActionGraphBuilder graphBuilder; private final CxxBuckConfig cxxBuckConfig; private final InferBuckConfig inferBuckConfig; private final CxxPlatform cxxPlatform; private CxxInferEnhancer( ActionGraphBuilder graphBuilder, CxxBuckConfig cxxBuckConfig, InferBuckConfig inferBuckConfig, CxxPlatform cxxPlatform) { this.graphBuilder = graphBuilder; this.cxxBuckConfig = cxxBuckConfig; this.inferBuckConfig = inferBuckConfig; this.cxxPlatform = cxxPlatform; } private BuildRule requireInferRule( BuildTarget buildTarget, CellPathResolver cellRoots, ProjectFilesystem filesystem, CxxConstructorArg args) { Optional<InferFlavors> inferFlavor = INFER_FLAVOR_DOMAIN.getValue(buildTarget); Preconditions.checkArgument( inferFlavor.isPresent(), "Expected BuildRuleParams to contain infer flavor."); switch (inferFlavor.get()) { case INFER: return requireInferAnalyzeAndReportBuildRuleForCxxDescriptionArg( buildTarget, cellRoots, filesystem, args); case INFER_ANALYZE: return requireInferAnalyzeBuildRuleForCxxDescriptionArg( buildTarget, cellRoots, filesystem, args); case INFER_CAPTURE_ALL: return requireAllTransitiveCaptureBuildRules(buildTarget, cellRoots, filesystem, args); case INFER_CAPTURE_ONLY: return requireInferCaptureAggregatorBuildRuleForCxxDescriptionArg( buildTarget, cellRoots, filesystem, args); } throw new IllegalStateException( "All InferFlavor cases should be handled, got: " + inferFlavor.get()); } private BuildRule requireAllTransitiveCaptureBuildRules( BuildTarget target, CellPathResolver cellRoots, ProjectFilesystem filesystem, CxxConstructorArg args) { CxxInferCaptureRulesAggregator aggregator = requireInferCaptureAggregatorBuildRuleForCxxDescriptionArg( target, cellRoots, filesystem, args); ImmutableSet<CxxInferCapture> captureRules = aggregator.getAllTransitiveCaptures(); return graphBuilder.addToIndex(new CxxInferCaptureTransitive(target, filesystem, captureRules)); } private CxxInferComputeReport requireInferAnalyzeAndReportBuildRuleForCxxDescriptionArg( BuildTarget target, CellPathResolver cellRoots, ProjectFilesystem filesystem, CxxConstructorArg args) { BuildTarget cleanTarget = InferFlavors.targetWithoutAnyInferFlavor(target); return (CxxInferComputeReport) graphBuilder.computeIfAbsent( cleanTarget.withAppendedFlavors(InferFlavors.INFER.getFlavor()), targetWithInferFlavor -> new CxxInferComputeReport( targetWithInferFlavor, filesystem, requireInferAnalyzeBuildRuleForCxxDescriptionArg( cleanTarget, cellRoots, filesystem, args))); } private CxxInferAnalyze requireInferAnalyzeBuildRuleForCxxDescriptionArg( BuildTarget target, CellPathResolver cellRoots, ProjectFilesystem filesystem, CxxConstructorArg args) { Flavor inferAnalyze = InferFlavors.INFER_ANALYZE.getFlavor(); BuildTarget cleanTarget = InferFlavors.targetWithoutAnyInferFlavor(target); return (CxxInferAnalyze) graphBuilder.computeIfAbsent( cleanTarget.withAppendedFlavors(inferAnalyze), targetWithInferAnalyzeFlavor -> { ImmutableSet<BuildRule> deps = args.getCxxDeps().get(graphBuilder, cxxPlatform); ImmutableSet<CxxInferAnalyze> transitiveDepsLibraryRules = requireTransitiveDependentLibraries( cxxPlatform, deps, inferAnalyze, CxxInferAnalyze.class); return new CxxInferAnalyze( targetWithInferAnalyzeFlavor, filesystem, inferBuckConfig, requireInferCaptureBuildRules( cleanTarget, cellRoots, filesystem, collectSources(cleanTarget, args), args), transitiveDepsLibraryRules); }); } private CxxInferCaptureRulesAggregator requireInferCaptureAggregatorBuildRuleForCxxDescriptionArg( BuildTarget target, CellPathResolver cellRoots, ProjectFilesystem filesystem, CxxConstructorArg args) { Flavor inferCaptureOnly = InferFlavors.INFER_CAPTURE_ONLY.getFlavor(); return (CxxInferCaptureRulesAggregator) graphBuilder.computeIfAbsent( InferFlavors.targetWithoutAnyInferFlavor(target).withAppendedFlavors(inferCaptureOnly), targetWithInferCaptureOnlyFlavor -> { BuildTarget cleanTarget = InferFlavors.targetWithoutAnyInferFlavor(target); ImmutableMap<String, CxxSource> sources = collectSources(cleanTarget, args); ImmutableSet<CxxInferCapture> captureRules = requireInferCaptureBuildRules(cleanTarget, cellRoots, filesystem, sources, args); ImmutableSet<CxxInferCaptureRulesAggregator> transitiveAggregatorRules = requireTransitiveCaptureAndAggregatingRules(args, inferCaptureOnly); return new CxxInferCaptureRulesAggregator( targetWithInferCaptureOnlyFlavor, filesystem, captureRules, transitiveAggregatorRules); }); } private ImmutableSet<CxxInferCaptureRulesAggregator> requireTransitiveCaptureAndAggregatingRules( CxxConstructorArg args, Flavor requiredFlavor) { ImmutableSet<BuildRule> deps = args.getCxxDeps().get(graphBuilder, cxxPlatform); return requireTransitiveDependentLibraries( cxxPlatform, deps, requiredFlavor, CxxInferCaptureRulesAggregator.class); } private ImmutableMap<String, CxxSource> collectSources( BuildTarget buildTarget, CxxConstructorArg args) { InferFlavors.checkNoInferFlavors(buildTarget.getFlavors().getSet()); return CxxDescriptionEnhancer.parseCxxSources(buildTarget, graphBuilder, cxxPlatform, args); } private <T extends BuildRule> ImmutableSet<T> requireTransitiveDependentLibraries( CxxPlatform cxxPlatform, Iterable<? extends BuildRule> deps, Flavor requiredFlavor, Class<T> ruleClass) { ImmutableSet.Builder<T> depsBuilder = ImmutableSet.builder(); new AbstractBreadthFirstTraversal<BuildRule>(deps) { @Override public Iterable<BuildRule> visit(BuildRule buildRule) { if (buildRule instanceof CxxLibraryGroup) { CxxLibraryGroup library = (CxxLibraryGroup) buildRule; depsBuilder.add( (ruleClass.cast( library.requireBuildRule( graphBuilder, requiredFlavor, cxxPlatform.getFlavor())))); return buildRule.getBuildDeps(); } return ImmutableSet.of(); } }.start(); return depsBuilder.build(); } private ImmutableList<CxxPreprocessorInput> computePreprocessorInputForCxxBinaryDescriptionArg( BuildTarget target, CellPathResolver cellRoots, CxxPlatform cxxPlatform, CxxBinaryDescription.CommonArg args, HeaderSymlinkTree headerSymlinkTree, ProjectFilesystem projectFilesystem) { ImmutableSet<BuildRule> deps = args.getCxxDeps().get(graphBuilder, cxxPlatform); return CxxDescriptionEnhancer.collectCxxPreprocessorInput( target, cxxPlatform, graphBuilder, deps, ImmutableListMultimap.copyOf( Multimaps.transformValues( CxxFlags.getLanguageFlagsWithMacros( args.getPreprocessorFlags(), args.getPlatformPreprocessorFlags(), args.getLangPreprocessorFlags(), args.getLangPlatformPreprocessorFlags(), cxxPlatform), CxxDescriptionEnhancer.getStringWithMacrosArgsConverter( target, cellRoots, graphBuilder, cxxPlatform) ::convert)), ImmutableList.of(headerSymlinkTree), args.getFrameworks(), CxxPreprocessables.getTransitiveCxxPreprocessorInputFromDeps( cxxPlatform, graphBuilder, RichStream.from(deps).filter(CxxPreprocessorDep.class::isInstance).toImmutableList()), args.getRawHeaders(), args.getIncludeDirectories(), projectFilesystem); } private ImmutableSet<CxxInferCapture> requireInferCaptureBuildRules( BuildTarget target, CellPathResolver cellRoots, ProjectFilesystem filesystem, ImmutableMap<String, CxxSource> sources, CxxConstructorArg args) { InferFlavors.checkNoInferFlavors(target.getFlavors().getSet()); ImmutableMap<Path, SourcePath> headers = CxxDescriptionEnhancer.parseHeaders( target, graphBuilder, filesystem, Optional.of(cxxPlatform), args); // Setup the header symlink tree and combine all the preprocessor input from this rule // and all dependencies. boolean shouldCreateHeadersSymlinks = true; if (args instanceof CxxLibraryDescription.CommonArg) { shouldCreateHeadersSymlinks = ((CxxLibraryDescription.CommonArg) args) .getXcodePrivateHeadersSymlinks() .orElse(cxxPlatform.getPrivateHeadersSymlinksEnabled()); } HeaderSymlinkTree headerSymlinkTree = CxxDescriptionEnhancer.requireHeaderSymlinkTree( target, filesystem, graphBuilder, cxxPlatform, headers, HeaderVisibility.PRIVATE, shouldCreateHeadersSymlinks); ImmutableList<CxxPreprocessorInput> preprocessorInputs; if (args instanceof CxxBinaryDescription.CommonArg) { preprocessorInputs = computePreprocessorInputForCxxBinaryDescriptionArg( target, cellRoots, cxxPlatform, (CxxBinaryDescription.CommonArg) args, headerSymlinkTree, filesystem); } else if (args instanceof CxxLibraryDescription.CommonArg) { preprocessorInputs = CxxLibraryDescription.getPreprocessorInputsForBuildingLibrarySources( cxxBuckConfig, graphBuilder, cellRoots, target, (CxxLibraryDescription.CommonArg) args, cxxPlatform, args.getCxxDeps().get(graphBuilder, cxxPlatform), CxxLibraryDescription.TransitiveCxxPreprocessorInputFunction.fromLibraryRule(), ImmutableList.of(headerSymlinkTree), filesystem); } else { throw new IllegalStateException("Only Binary and Library args supported."); } CxxSourceRuleFactory factory = CxxSourceRuleFactory.of( filesystem, target, graphBuilder, graphBuilder.getSourcePathResolver(), cxxBuckConfig, cxxPlatform, preprocessorInputs, ImmutableMultimap.copyOf( Multimaps.transformValues( CxxFlags.getLanguageFlagsWithMacros( args.getCompilerFlags(), args.getPlatformCompilerFlags(), args.getLangCompilerFlags(), args.getLangPlatformCompilerFlags(), cxxPlatform), CxxDescriptionEnhancer.getStringWithMacrosArgsConverter( target, cellRoots, graphBuilder, cxxPlatform) ::convert)), args.getPrefixHeader(), args.getPrecompiledHeader(), PicType.PDC); return factory.requireInferCaptureBuildRules(sources, inferBuckConfig); } }
/* * Copyright 2015-2016, Institute of Cybernetics at Tallinn University of Technology * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ee.ioc.phon.android.speechutils; import android.annotation.TargetApi; import android.media.AudioRecord; import android.media.MediaCodec; import android.media.MediaFormat; import android.os.Build; import androidx.annotation.RequiresPermission; import java.nio.ByteBuffer; import java.util.List; import ee.ioc.phon.android.speechutils.utils.AudioUtils; import static android.Manifest.permission.RECORD_AUDIO; /** * Based on https://android.googlesource.com/platform/cts/+/jb-mr2-release/tests/tests/media/src/android/media/cts/EncoderTest.java * Requires Android v4.1 / API 16 / JELLY_BEAN */ public class EncodedAudioRecorder extends AbstractAudioRecorder { // TODO: support other formats than FLAC private static final String MIME = "audio/flac"; private static final String CONTENT_TYPE = "audio/x-flac"; //private static final String MIME = "audio/opus"; //private static final String CONTENT_TYPE = "audio/x-opus"; // Stop encoding if output buffer has not been available that many times. private static final int MAX_NUM_RETRIES_DEQUEUE_OUTPUT_BUFFER = 10; // Timeout in microseconds to dequeue a buffer // TODO: not sure what values are best here (behaves weird with negative values and very large values) private static final long DEQUEUE_INPUT_BUFFER_TIMEOUT = 10000; private static final long DEQUEUE_OUTPUT_BUFFER_TIMEOUT = 10000; // TODO: Use queue of byte[] private final byte[] mRecordingEnc; private int mRecordedEncLength = 0; private int mConsumedEncLength = 0; private int mNumBytesSubmitted = 0; private int mNumBytesDequeued = 0; @RequiresPermission(RECORD_AUDIO) public EncodedAudioRecorder(int audioSource, int sampleRate) { super(audioSource, sampleRate); try { int bufferSize = getBufferSize(); createRecorder(audioSource, sampleRate, bufferSize); int framePeriod = bufferSize / (2 * RESOLUTION_IN_BYTES * CHANNELS); createBuffer(framePeriod); setState(State.READY); } catch (Exception e) { if (e.getMessage() == null) { handleError("Unknown error occurred while initializing recording"); } else { handleError(e.getMessage()); } } // TODO: replace 35 with the max length of the recording mRecordingEnc = new byte[RESOLUTION_IN_BYTES * CHANNELS * sampleRate * 35]; // 35 sec raw } @RequiresPermission(RECORD_AUDIO) public EncodedAudioRecorder(int sampleRate) { this(DEFAULT_AUDIO_SOURCE, sampleRate); } @RequiresPermission(RECORD_AUDIO) public EncodedAudioRecorder() { this(DEFAULT_AUDIO_SOURCE, DEFAULT_SAMPLE_RATE); } /** * TODO: the MIME should be configurable as the server might not support all formats * (returning "Your GStreamer installation is missing a plug-in.") * TODO: according to the server docs, for encoded data we do not need to specify the content type * such as "audio/x-flac", but it did not work without (nor with "audio/flac"). */ public String getContentType() { return CONTENT_TYPE; } public synchronized byte[] consumeRecordingEncAndTruncate() { int len = getConsumedEncLength(); byte[] bytes = getCurrentRecordingEnc(len); setRecordedEncLength(0); setConsumedEncLength(0); return bytes; } /** * @return bytes that have been recorded and encoded since this method was last called */ public synchronized byte[] consumeRecordingEnc() { byte[] bytes = getCurrentRecordingEnc(getConsumedEncLength()); setConsumedEncLength(getRecordedEncLength()); return bytes; } @TargetApi(Build.VERSION_CODES.JELLY_BEAN) @RequiresPermission(RECORD_AUDIO) @Override protected void recorderLoop(AudioRecord speechRecord) { mNumBytesSubmitted = 0; mNumBytesDequeued = 0; MediaFormat format = MediaFormatFactory.createMediaFormat(MIME, getSampleRate()); MediaCodec codec = getCodec(format); if (codec == null) { handleError("no codec found"); } else { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { Log.i("Using codec: " + codec.getCanonicalName()); } int status = recorderEncoderLoop(codec, speechRecord); if (Log.DEBUG) { AudioUtils.showMetrics(format, mNumBytesSubmitted, mNumBytesDequeued); } if (status < 0) { handleError("encoder error"); } } } // TODO: we currently return the first suitable codec private MediaCodec getCodec(MediaFormat format) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) { List<String> componentNames = AudioUtils.getEncoderNamesForType(format.getString(MediaFormat.KEY_MIME)); for (String componentName : componentNames) { Log.i("component/format: " + componentName + "/" + format); MediaCodec codec = AudioUtils.createCodec(componentName, format); if (codec != null) { return codec; } } } return null; } private int getConsumedEncLength() { return mConsumedEncLength; } private void setConsumedEncLength(int len) { mConsumedEncLength = len; } private void setRecordedEncLength(int len) { mRecordedEncLength = len; } private int getRecordedEncLength() { return mRecordedEncLength; } private void addEncoded(byte[] buffer) { int len = buffer.length; if (mRecordingEnc.length >= mRecordedEncLength + len) { System.arraycopy(buffer, 0, mRecordingEnc, mRecordedEncLength, len); mRecordedEncLength += len; } else { handleError("RecorderEnc buffer overflow: " + mRecordedEncLength); } } private byte[] getCurrentRecordingEnc(int startPos) { int len = getRecordedEncLength() - startPos; byte[] bytes = new byte[len]; System.arraycopy(mRecordingEnc, startPos, bytes, 0, len); Log.i("Copied (enc) from pos: " + startPos + ", bytes: " + bytes.length); return bytes; } /** * Copy audio from the recorder into the encoder. */ @TargetApi(Build.VERSION_CODES.JELLY_BEAN) @RequiresPermission(RECORD_AUDIO) private int queueInputBuffer(MediaCodec codec, ByteBuffer[] inputBuffers, int index, AudioRecord speechRecord) { if (speechRecord == null || speechRecord.getRecordingState() != SpeechRecord.RECORDSTATE_RECORDING) { return -1; } if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) { ByteBuffer inputBuffer = inputBuffers[index]; inputBuffer.clear(); int size = inputBuffer.limit(); byte[] buffer = new byte[size]; int status = read(speechRecord, buffer); if (status < 0) { handleError("status = " + status); return -1; } inputBuffer.put(buffer); codec.queueInputBuffer(index, 0, size, 0, 0); return size; } return -1; } /** * Save the encoded (output) buffer into the complete encoded recording. * TODO: copy directly (without the intermediate byte array) */ @TargetApi(Build.VERSION_CODES.JELLY_BEAN) private void dequeueOutputBuffer(MediaCodec codec, ByteBuffer[] outputBuffers, int index, MediaCodec.BufferInfo info) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) { ByteBuffer buffer = outputBuffers[index]; Log.i("size/remaining: " + info.size + "/" + buffer.remaining()); if (info.size <= buffer.remaining()) { final byte[] bufferCopied = new byte[info.size]; buffer.get(bufferCopied); // TODO: catch BufferUnderflow // TODO: do we need to clear? // on N5: always size == remaining(), clearing is not needed // on SGS2: remaining decreases until it becomes less than size, which results in BufferUnderflow // (but SGS2 records only zeros anyway) //buffer.clear(); codec.releaseOutputBuffer(index, false); addEncoded(bufferCopied); if (Log.DEBUG) { AudioUtils.showSomeBytes("out", bufferCopied); } } else { Log.e("size > remaining"); codec.releaseOutputBuffer(index, false); } } } /** * Reads bytes from the given recorder and encodes them with the given encoder. * Uses the (deprecated) Synchronous Processing using Buffer Arrays. * <p/> * Encoders (or codecs that generate compressed data) will create and return the codec specific * data before any valid output buffer in output buffers marked with the codec-config flag. * Buffers containing codec-specific-data have no meaningful timestamps. */ @TargetApi(Build.VERSION_CODES.JELLY_BEAN) @RequiresPermission(RECORD_AUDIO) private int recorderEncoderLoop(MediaCodec codec, AudioRecord speechRecord) { int status = -1; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) { codec.start(); // Getting some buffers (e.g. 4 of each) to communicate with the codec ByteBuffer[] codecInputBuffers = codec.getInputBuffers(); ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers(); Log.i("input buffers " + codecInputBuffers.length + "; output buffers: " + codecOutputBuffers.length); boolean doneSubmittingInput = false; int numDequeueOutputBufferTimeout = 0; int index; while (true) { if (!doneSubmittingInput) { index = codec.dequeueInputBuffer(DEQUEUE_INPUT_BUFFER_TIMEOUT); if (index >= 0) { int size = queueInputBuffer(codec, codecInputBuffers, index, speechRecord); if (size == -1) { codec.queueInputBuffer(index, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); Log.i("enc: in: EOS"); doneSubmittingInput = true; } else { Log.i("enc: in: " + size); mNumBytesSubmitted += size; } } else { Log.i("enc: in: timeout, will try again"); } } MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT); Log.i("enc: out: flags/index: " + info.flags + "/" + index); if (index == MediaCodec.INFO_TRY_AGAIN_LATER) { numDequeueOutputBufferTimeout++; Log.i("enc: out: INFO_TRY_AGAIN_LATER: " + numDequeueOutputBufferTimeout); if (numDequeueOutputBufferTimeout > MAX_NUM_RETRIES_DEQUEUE_OUTPUT_BUFFER) { break; } } else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { MediaFormat format = codec.getOutputFormat(); Log.i("enc: out: INFO_OUTPUT_FORMAT_CHANGED: " + format.toString()); } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { codecOutputBuffers = codec.getOutputBuffers(); Log.i("enc: out: INFO_OUTPUT_BUFFERS_CHANGED"); } else { dequeueOutputBuffer(codec, codecOutputBuffers, index, info); mNumBytesDequeued += info.size; numDequeueOutputBufferTimeout = 0; if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { Log.i("enc: out: EOS"); status = 0; break; } } } codec.stop(); codec.release(); Log.i("stopped and released codec"); } return status; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.servicenetworking.v1beta.model; /** * A backend rule provides configuration for an individual API element. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Service Networking API. For a detailed explanation * see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class BackendRule extends com.google.api.client.json.GenericJson { /** * The address of the API backend. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String address; /** * The number of seconds to wait for a response from a request. The default deadline for gRPC is * infinite (no deadline) and HTTP requests is 5 seconds. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Double deadline; /** * When disable_auth is false, a JWT ID token will be generated with the value from * BackendRule.address as jwt_audience, overrode to the HTTP "Authorization" request header and * sent to the backend. * * When disable_auth is true, a JWT ID token won't be generated and the original "Authorization" * HTTP header will be preserved. If the header is used to carry the original token and is * expected by the backend, this field must be set to true to preserve the header. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean disableAuth; /** * The JWT audience is used when generating a JWT ID token for the backend. This ID token will be * added in the HTTP "authorization" header, and sent to the backend. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String jwtAudience; /** * Minimum deadline in seconds needed for this method. Calls having deadline value lower than this * will be rejected. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Double minDeadline; /** * The number of seconds to wait for the completion of a long running operation. The default is no * deadline. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Double operationDeadline; /** * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String pathTranslation; /** * Selects the methods to which this rule applies. * * Refer to selector for syntax details. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String selector; /** * The address of the API backend. * @return value or {@code null} for none */ public java.lang.String getAddress() { return address; } /** * The address of the API backend. * @param address address or {@code null} for none */ public BackendRule setAddress(java.lang.String address) { this.address = address; return this; } /** * The number of seconds to wait for a response from a request. The default deadline for gRPC is * infinite (no deadline) and HTTP requests is 5 seconds. * @return value or {@code null} for none */ public java.lang.Double getDeadline() { return deadline; } /** * The number of seconds to wait for a response from a request. The default deadline for gRPC is * infinite (no deadline) and HTTP requests is 5 seconds. * @param deadline deadline or {@code null} for none */ public BackendRule setDeadline(java.lang.Double deadline) { this.deadline = deadline; return this; } /** * When disable_auth is false, a JWT ID token will be generated with the value from * BackendRule.address as jwt_audience, overrode to the HTTP "Authorization" request header and * sent to the backend. * * When disable_auth is true, a JWT ID token won't be generated and the original "Authorization" * HTTP header will be preserved. If the header is used to carry the original token and is * expected by the backend, this field must be set to true to preserve the header. * @return value or {@code null} for none */ public java.lang.Boolean getDisableAuth() { return disableAuth; } /** * When disable_auth is false, a JWT ID token will be generated with the value from * BackendRule.address as jwt_audience, overrode to the HTTP "Authorization" request header and * sent to the backend. * * When disable_auth is true, a JWT ID token won't be generated and the original "Authorization" * HTTP header will be preserved. If the header is used to carry the original token and is * expected by the backend, this field must be set to true to preserve the header. * @param disableAuth disableAuth or {@code null} for none */ public BackendRule setDisableAuth(java.lang.Boolean disableAuth) { this.disableAuth = disableAuth; return this; } /** * The JWT audience is used when generating a JWT ID token for the backend. This ID token will be * added in the HTTP "authorization" header, and sent to the backend. * @return value or {@code null} for none */ public java.lang.String getJwtAudience() { return jwtAudience; } /** * The JWT audience is used when generating a JWT ID token for the backend. This ID token will be * added in the HTTP "authorization" header, and sent to the backend. * @param jwtAudience jwtAudience or {@code null} for none */ public BackendRule setJwtAudience(java.lang.String jwtAudience) { this.jwtAudience = jwtAudience; return this; } /** * Minimum deadline in seconds needed for this method. Calls having deadline value lower than this * will be rejected. * @return value or {@code null} for none */ public java.lang.Double getMinDeadline() { return minDeadline; } /** * Minimum deadline in seconds needed for this method. Calls having deadline value lower than this * will be rejected. * @param minDeadline minDeadline or {@code null} for none */ public BackendRule setMinDeadline(java.lang.Double minDeadline) { this.minDeadline = minDeadline; return this; } /** * The number of seconds to wait for the completion of a long running operation. The default is no * deadline. * @return value or {@code null} for none */ public java.lang.Double getOperationDeadline() { return operationDeadline; } /** * The number of seconds to wait for the completion of a long running operation. The default is no * deadline. * @param operationDeadline operationDeadline or {@code null} for none */ public BackendRule setOperationDeadline(java.lang.Double operationDeadline) { this.operationDeadline = operationDeadline; return this; } /** * @return value or {@code null} for none */ public java.lang.String getPathTranslation() { return pathTranslation; } /** * @param pathTranslation pathTranslation or {@code null} for none */ public BackendRule setPathTranslation(java.lang.String pathTranslation) { this.pathTranslation = pathTranslation; return this; } /** * Selects the methods to which this rule applies. * * Refer to selector for syntax details. * @return value or {@code null} for none */ public java.lang.String getSelector() { return selector; } /** * Selects the methods to which this rule applies. * * Refer to selector for syntax details. * @param selector selector or {@code null} for none */ public BackendRule setSelector(java.lang.String selector) { this.selector = selector; return this; } @Override public BackendRule set(String fieldName, Object value) { return (BackendRule) super.set(fieldName, value); } @Override public BackendRule clone() { return (BackendRule) super.clone(); } }
// Copyright 2008 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.common.labs.matcher; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.atomic.AtomicInteger; import java.util.regex.Pattern; /** * An implementation of {@link Mapping} for which the pattern language is Google * URL patterns; see * documentation in <a * href="http://code.google.com/apis/searchappliance/documentation/50/admin/URL_patterns.html"> * this document</a>. Note these limitations: * <ul> * <li> {@code www?:} patterns are not supported </li> * <li> {@code regexp:}, {@code regexpCase:} and {@code regexpIgnoreCase:} * patterns are treated as Java regexes, not as GNU regexes (as documented on the * <a * href="http://code.google.com/apis/searchappliance/documentation/50/admin/URL_patterns.html"> * reference site</a>). </li> * <li> Exception patterns (patterns with leading {@code -} or {@code +-}) are * not supported.</li> * </ul> * <p> * The best match order is defined as follows: * <ul> * <li> The best domain match is determined by the order in which the patterns * are added. The first pattern whose domain matches a subject path determines * the best domain match. </li> * <li> Within the set of all patterns that start with the best domain match, * the path exact match is preferred, if there is one, followed by prefix * matches from longest to shortest, followed by other matches in the order in * which they were added</li> * <li> Finally, all free matches ({@code contains}, {@code regexp:}, * {@code regexpCase:} and {@code regexpIgnoreCase:}, in the order in which * they were added.</li> * </ul> * * @param <V> the type of mapped values */ public class UrlMapping<V> implements Mapping<V> { public interface CollectionFactory<V> { /* * The domain map is a singleton (per mapping). It maps each attested domain * to a PathMapper that will do the mapping of the paths for that domain. */ Mapping<PathMapper<V>> makeDomainMap(String name); /* * The full-url mapper is a singleton (per mapping). It takes care of all * the non-host-path type patterns - the ones that have to be mapped against * a single url. */ Mapping<Entry<String, V>> makeFullUrlMapper(String name); /* * Each PathMapper creates an exact-matches map to hold the exact match patterns * for that domain. */ Map<String, Entry<String, V>> makeExactMatchesMap(String name); /* * Each PathMapper creates a prefix mapper to hold the prefix-type patterns * for that domain. */ Mapping<Entry<String, V>> makePrefixMapper(String name); /* * Each PathMapper creates a regex mapper to hold the regex patterns * for that domain. */ Mapping<Entry<String, V>> makeRegexMapper(String name); } private final CollectionFactory<V> collectionFactory; /** * For host-path patterns (patterns that can be broken into two parts, one for * the host part and one for the path part -- see * {@link ParsedUrlPattern#isHostPathType()}) we use a hierarchical * structure: the {@code domainMap} handles the host portion, and returns a * {@link PathMapper} that handles the path portion. */ private final Mapping<PathMapper<V>> domainMap; /** * For other patterns (typically regexes that could match either host or path) * we use a single structure that tries them all sequentially. Obviously, * performance is best if these are not used at all. */ private final Mapping<Entry<String, V>> fullUrlMapper; private final AtomicInteger comparisonCounter; private final AtomicInteger mappingCounter; private static final String DEFAULT_NAME = "um"; private final String name; public UrlMapping() { this(new AtomicInteger(0)); } public UrlMapping(CollectionFactory<V> collectionFactory) { this(new AtomicInteger(0), DEFAULT_NAME, collectionFactory, true); } public UrlMapping(AtomicInteger i) { this(i, DEFAULT_NAME); } /** * This constructor accepts the boolean flag to turn on/off the * caching mechanism of the pattern matcher. * @param isCachingEnabled set to true if caching is desired; otherwise false */ public UrlMapping(boolean isCachingEnabled) { this(new AtomicInteger(0), DEFAULT_NAME, null, isCachingEnabled); } public UrlMapping(AtomicInteger i, String name) { this(i, name, null, true); } public UrlMapping(AtomicInteger i, String name, CollectionFactory<V> collectionFactory) { this(i, name, collectionFactory, true); } public UrlMapping(AtomicInteger i, String name, CollectionFactory<V> collectionFactory, boolean isCachingEnabled) { comparisonCounter = i; this.collectionFactory = (collectionFactory == null) ? new DefaultCollectionFactory<V>(isCachingEnabled) : collectionFactory; // make the domainMap - maps a url's domain to a special matcher for that domain domainMap = this.collectionFactory.makeDomainMap(name); // make the fullUrlMapper - which is used when the pattern can't be separated into // domain and path patterns fullUrlMapper = this.collectionFactory.makeFullUrlMapper(name); mappingCounter = new AtomicInteger(0); this.name = name; } @Override public Entry<String, V> getBestEntry(String subject) { AnalyzedUrl url = new AnalyzedUrl(subject); Entry<String, V> result = null; for (PathMapper<V> m : domainMap.getMappings(url.getHostPart()).values()) { result = m.getBestEntry(url.getPathPart()); if (result != null) { return result; } } Entry<String, Entry<String, V>> fullUrlMatch = fullUrlMapper.getBestEntry(subject); if (fullUrlMatch != null) { result = fullUrlMatch.getValue(); } return result; } @Override public V getBestValue(String subject) { Entry<String, V> e = getBestEntry(subject); if (e != null) { return e.getValue(); } return null; } @Override public V getByPattern(String pattern) { ParsedUrlPattern p = new ParsedUrlPattern(pattern); if (p.isHostPathType()) { String hostRegex = p.getHostRegex(); PathMapper<V> m = domainMap.getByPattern(hostRegex); if (m == null) { return null; } return m.getByPattern(p); } else { Entry<String, V> v = fullUrlMapper.getByPattern(p.getUrlRegex()); if (v == null) { return null; } return v.getValue(); } } @Override public ImmutableMap<String, V> getMappings(String subject) { AnalyzedUrl url = new AnalyzedUrl(subject); Map<String, V> result = new LinkedHashMap<String, V>(); for (PathMapper<V> m : domainMap.getMappings(url.getHostPart()).values()) { result.putAll(m.getMappings(url.getPathPart())); } for (Entry<String, V> v : fullUrlMapper.getMappings(subject).values()) { result.put(v.getKey(), v.getValue()); } return ImmutableMap.copyOf(result); } /** @throws IllegalArgumentException if the pattern is unsupported */ @Override public V put(String pattern, V value) { ParsedUrlPattern p = new ParsedUrlPattern(pattern); if (p.isHostPathType()) { String hostRegex = p.getHostRegex(); PathMapper<V> m = getPathMapper(hostRegex); m.addParsedUrl(p, value); } else { String urlRegex = p.getUrlRegex(); Entry<String, V> v = Maps.immutableEntry(pattern, value); fullUrlMapper.put(urlRegex, v); } mappingCounter.incrementAndGet(); return value; } private PathMapper<V> getPathMapper(String hostRegex) { PathMapper<V> m = domainMap.getByPattern(hostRegex); if (m == null) { m = new PathMapper<V>(name + hostRegex, collectionFactory); domainMap.put(hostRegex, m); } return m; } /** @throws IllegalArgumentException if any pattern is unsupported */ @Override public void putAll(Map<String, ? extends V> m) { for (Map.Entry<String, ? extends V> e : m.entrySet()) { put(e.getKey(), e.getValue()); } } @Override public V remove(String pattern) { ParsedUrlPattern p = new ParsedUrlPattern(pattern); V value; if (p.isHostPathType()) { String hostRegex = p.getHostRegex(); PathMapper<V> m = getPathMapper(hostRegex); value = m.removeParsedUrl(p); } else { String urlRegex = p.getUrlRegex(); value = fullUrlMapper.remove(urlRegex).getValue(); } mappingCounter.decrementAndGet(); return value; } /** * Set a counter to count basic operations. Optionally used for performance * testing. * * @param count the value to set the comparison counter. */ public void setComparisonCounter(int count) { // This is a shared counter, so a j.u.c.AtomicInteger is used. comparisonCounter.set(count); } /** * Returns the value of the comparison counter. Optionally used for * performance testing. * * @return the value of the comparison counter. */ public int getComparisonCount() { return comparisonCounter.intValue(); } /** * Increment the value of the comparison counter. Optionally used for * performance testing. * * @param i the amount to add to the comparison counter. */ protected void incrementComparisonCounter(int i) { comparisonCounter.addAndGet(i); } /** * Returns the number of mappings. * * @return the number of mappings. */ @Override public int size() { return mappingCounter.intValue(); } public static class PathMapper<V> { private final Map<String, Entry<String, V>> exactMatches; private final Mapping<Entry<String, V>> prefixMapper; private final Mapping<Entry<String, V>> regexMapper; private final String name; public PathMapper(String name, CollectionFactory<V> collectionFactory) { this.name = name; exactMatches = collectionFactory.makeExactMatchesMap(name); prefixMapper = collectionFactory.makePrefixMapper(name); regexMapper = collectionFactory.makeRegexMapper(name); } public V removeParsedUrl(ParsedUrlPattern p) { V value = null; if (p.isPathExactMatch()) { value = exactMatches.remove(p.getPathExactString()).getValue(); } else if (p.isPathPrefixMatch()) { value = prefixMapper.remove(p.getPathPrefixString()).getValue(); } else if (p.isHostPathType()) { value = regexMapper.remove(p.getPathRegex()).getValue(); } return value; } public V getByPattern(ParsedUrlPattern p) { Entry<String, V> pmv; if (p.isPathExactMatch()) { pmv = exactMatches.get(p.getPathExactString()); } else if (p.isPathPrefixMatch()) { pmv = prefixMapper.getByPattern(p.getPathPrefixString()); } else /* we know p.isHostPathType() */ { pmv = regexMapper.getByPattern(p.getPathRegex()); } V result = null; if (pmv != null) { result = pmv.getValue(); } return result; } public void addParsedUrl(ParsedUrlPattern p, V value) { String urlPattern = p.getUrlPattern(); Entry<String, V> pmv = Maps.immutableEntry(urlPattern, value); if (p.isPathExactMatch()) { exactMatches.put(p.getPathExactString(), pmv); } else if (p.isPathPrefixMatch()) { prefixMapper.put(p.getPathPrefixString(), pmv); } else if (p.isHostPathType()) { regexMapper.put(p.getPathRegex(), pmv); } else { throw new IllegalArgumentException(); } } public Entry<String, V> getBestEntry(String subject) { Entry<String, V> pmv = null; pmv = exactMatches.get(subject); if (pmv == null) { pmv = prefixMapper.getBestValue(subject); } if (pmv == null) { pmv = regexMapper.getBestValue(subject); } return pmv; } public Map<String, V> getMappings(String subject) { Map<String, V> result = new LinkedHashMap<String, V>(); Entry<String, V> pmv = null; pmv = exactMatches.get(subject); if (pmv != null) { result.put(pmv.getKey(), pmv.getValue()); } for (Entry<String, V> p : prefixMapper.getMappings(subject).values()) { result.put(p.getKey(), p.getValue()); } for (Entry<String, V> p : regexMapper.getMappings(subject).values()) { result.put(p.getKey(), p.getValue()); } return result; } public String getName() { return name; } } private class DefaultCollectionFactory<V> implements CollectionFactory<V> { private final boolean useCache; private DefaultCollectionFactory(boolean useCache) { this.useCache = useCache; } @Override public Mapping<PathMapper<V>> makeDomainMap(String name) { Map<String, Pattern> dpm = new HashMap<String, Pattern>(); PatternMatcher patternMatcher = new SequentialRegexPatternMatcher(UrlMapping.this.comparisonCounter, dpm); if (useCache) { patternMatcher = new CachedPatternMatcher(patternMatcher, UrlMapping.this.comparisonCounter); } Mapping<PathMapper<V>> domainMap = new MappingFromPatternMatcher<PathMapper<V>>(patternMatcher); return domainMap; } @Override public Mapping<Entry<String, V>> makeFullUrlMapper(String name) { Map<String, Pattern> fpm = new HashMap<String, Pattern>(); SequentialRegexPatternMatcher fm = new SequentialRegexPatternMatcher(UrlMapping.this.comparisonCounter, fpm); Mapping<Entry<String, V>> fullUrlMapper = new MappingFromPatternMatcher<Entry<String, V>>(fm); return fullUrlMapper; } @Override public Map<String, Entry<String, V>> makeExactMatchesMap(String name) { return new HashMap<String, Entry<String, V>>(); } @Override public Mapping<Entry<String, V>> makePrefixMapper(String name) { return new MappingFromPatternMatcher<Entry<String, V>>(new TriePrefixPatternMatcher()); } @Override public Mapping<Entry<String, V>> makeRegexMapper(String name) { return new MappingFromPatternMatcher<Entry<String, V>>(new SequentialRegexPatternMatcher()); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package javax.servlet.http; import java.io.IOException; import java.util.Arrays; import java.util.Hashtable; import java.util.ResourceBundle; import java.util.StringTokenizer; import javax.servlet.ServletInputStream; /** * @deprecated As of Java(tm) Servlet API 2.3. These methods were only useful * with the default encoding and have been moved to the request * interfaces. */ @Deprecated public class HttpUtils { private static final String LSTRING_FILE = "javax.servlet.http.LocalStrings"; private static final ResourceBundle lStrings = ResourceBundle.getBundle(LSTRING_FILE); /** * Constructs an empty <code>HttpUtils</code> object. * */ public HttpUtils() { // NOOP } /** * * Parses a query string passed from the client to the server and builds a * <code>HashTable</code> object with key-value pairs. The query string * should be in the form of a string packaged by the GET or POST method, * that is, it should have key-value pairs in the form <i>key=value</i>, * with each pair separated from the next by a &amp; character. * * <p> * A key can appear more than once in the query string with different * values. However, the key appears only once in the hashtable, with its * value being an array of strings containing the multiple values sent by * the query string. * * <p> * The keys and values in the hashtable are stored in their decoded form, so * any + characters are converted to spaces, and characters sent in * hexadecimal notation (like <i>%xx</i>) are converted to ASCII characters. * * @param s * a string containing the query to be parsed * * @return a <code>HashTable</code> object built from the parsed key-value * pairs * * @exception IllegalArgumentException * if the query string is invalid * */ public static Hashtable<String, String[]> parseQueryString(String s) { String valArray[] = null; if (s == null) { throw new IllegalArgumentException(); } Hashtable<String, String[]> ht = new Hashtable<>(); StringBuilder sb = new StringBuilder(); StringTokenizer st = new StringTokenizer(s, "&"); while (st.hasMoreTokens()) { String pair = st.nextToken(); int pos = pair.indexOf('='); if (pos == -1) { // XXX // should give more detail about the illegal argument throw new IllegalArgumentException(); } String key = parseName(pair.substring(0, pos), sb); String val = parseName(pair.substring(pos + 1, pair.length()), sb); if (ht.containsKey(key)) { String oldVals[] = ht.get(key); valArray = Arrays.copyOf(oldVals, oldVals.length + 1); valArray[oldVals.length] = val; } else { valArray = new String[1]; valArray[0] = val; } ht.put(key, valArray); } return ht; } /** * * Parses data from an HTML form that the client sends to the server using * the HTTP POST method and the <i>application/x-www-form-urlencoded</i> * MIME type. * * <p> * The data sent by the POST method contains key-value pairs. A key can * appear more than once in the POST data with different values. However, * the key appears only once in the hashtable, with its value being an array * of strings containing the multiple values sent by the POST method. * * <p> * The keys and values in the hashtable are stored in their decoded form, so * any + characters are converted to spaces, and characters sent in * hexadecimal notation (like <i>%xx</i>) are converted to ASCII characters. * * * * @param len * an integer specifying the length, in characters, of the * <code>ServletInputStream</code> object that is also passed to * this method * * @param in * the <code>ServletInputStream</code> object that contains the * data sent from the client * * @return a <code>HashTable</code> object built from the parsed key-value * pairs * * * @exception IllegalArgumentException * if the data sent by the POST method is invalid * */ public static Hashtable<String, String[]> parsePostData(int len, ServletInputStream in) { // XXX // should a length of 0 be an IllegalArgumentException // cheap hack to return an empty hash if (len <= 0) return new Hashtable<>(); if (in == null) { throw new IllegalArgumentException(); } // Make sure we read the entire POSTed body. byte[] postedBytes = new byte[len]; try { int offset = 0; do { int inputLen = in.read(postedBytes, offset, len - offset); if (inputLen <= 0) { String msg = lStrings.getString("err.io.short_read"); throw new IllegalArgumentException(msg); } offset += inputLen; } while ((len - offset) > 0); } catch (IOException e) { throw new IllegalArgumentException(e.getMessage(), e); } // XXX we shouldn't assume that the only kind of POST body // is FORM data encoded using ASCII or ISO Latin/1 ... or // that the body should always be treated as FORM data. try { String postedBody = new String(postedBytes, 0, len, "8859_1"); return parseQueryString(postedBody); } catch (java.io.UnsupportedEncodingException e) { // XXX function should accept an encoding parameter & throw this // exception. Otherwise throw something expected. throw new IllegalArgumentException(e.getMessage(), e); } } /* * Parse a name in the query string. */ private static String parseName(String s, StringBuilder sb) { sb.setLength(0); for (int i = 0; i < s.length(); i++) { char c = s.charAt(i); switch (c) { case '+': sb.append(' '); break; case '%': try { sb.append((char) Integer.parseInt(s.substring(i + 1, i + 3), 16)); i += 2; } catch (NumberFormatException e) { // XXX // need to be more specific about illegal arg throw new IllegalArgumentException(); } catch (StringIndexOutOfBoundsException e) { String rest = s.substring(i); sb.append(rest); if (rest.length() == 2) i++; } break; default: sb.append(c); break; } } return sb.toString(); } /** * * Reconstructs the URL the client used to make the request, using * information in the <code>HttpServletRequest</code> object. The returned * URL contains a protocol, server name, port number, and server path, but * it does not include query string parameters. * * <p> * Because this method returns a <code>StringBuffer</code>, not a string, * you can modify the URL easily, for example, to append query parameters. * * <p> * This method is useful for creating redirect messages and for reporting * errors. * * @param req * a <code>HttpServletRequest</code> object containing the * client's request * * @return a <code>StringBuffer</code> object containing the reconstructed * URL * */ public static StringBuffer getRequestURL(HttpServletRequest req) { StringBuffer url = new StringBuffer(); String scheme = req.getScheme(); int port = req.getServerPort(); String urlPath = req.getRequestURI(); url.append(scheme); // http, https url.append("://"); url.append(req.getServerName()); if ((scheme.equals("http") && port != 80) || (scheme.equals("https") && port != 443)) { url.append(':'); url.append(req.getServerPort()); } url.append(urlPath); return url; } }
package core.common.index; import core.adapt.Predicate; import core.common.globals.TableInfo; import core.common.key.ParsedTupleList; import core.common.key.RawIndexKey; import core.utils.Pair; import core.utils.TypeUtils.TYPE; import java.util.*; public class RobustTree implements MDIndex { public transient static Random randGenerator = new Random(); // TODO: Add capacity transient static List<Integer> leastAllocated = new ArrayList<>(50); /** * Return the dimension which has the maximum allocation unfulfilled */ transient static int countCalled = -1; transient public ParsedTupleList sample; transient public TableInfo tableInfo; public int maxBuckets; public int numAttributes; public TYPE[] dimensionTypes; public RNode root; public RobustTree(TableInfo tableInfo) { this.root = new RNode(); this.tableInfo = tableInfo; } public RobustTree() { } public static double nthroot(int n, double A) { return nthroot(n, A, .001); } public static double nthroot(int n, double A, double p) { if (A < 0) { System.err.println("A < 0");// we handle only real positive numbers return -1; } else if (A == 0) { return 0; } double x_prev = A; double x = A / n; // starting "guessed" value... while (Math.abs(x - x_prev) > p) { x_prev = x; x = ((n - 1.0) * x + A / Math.pow(x, n - 1.0)) / n; } return x; } public static void printNode(RNode node) { if (node.bucket != null) { System.out.format("B " + node.bucket.bucketId); } else { System.out.format("Node: %d %s { ", node.attribute, node.value.toString()); printNode(node.leftChild); System.out.print(" }{ "); printNode(node.rightChild); System.out.print(" }"); } } public int getMaxBuckets() { return maxBuckets; } public void setMaxBuckets(int maxBuckets) { this.maxBuckets = maxBuckets; } @Override public MDIndex clone() throws CloneNotSupportedException { throw new CloneNotSupportedException(); } public RNode getRoot() { return root; } // Only used for testing public void setRoot(RNode root) { this.root = root; } @Override public boolean equals(Object obj) { if (obj instanceof RobustTree) { RobustTree rhs = (RobustTree) obj; boolean allGood = true; allGood &= rhs.numAttributes == this.numAttributes; allGood &= rhs.maxBuckets == this.maxBuckets; allGood &= rhs.dimensionTypes.length == this.dimensionTypes.length; if (!allGood) return false; for (int i = 0; i < this.dimensionTypes.length; i++) { allGood &= this.dimensionTypes[i] == rhs.dimensionTypes[i]; } if (!allGood) return false; allGood = this.root == rhs.root; return allGood; } return false; } /** * Created the tree based on the histograms */ @Override public void initProbe() { System.out.println(this.sample.size() + " keys inserted"); // Computes log(this.maxBuckets) int maxDepth = 31 - Integer.numberOfLeadingZeros(this.maxBuckets); double allocationPerAttribute = RobustTree.nthroot( this.numAttributes, this.maxBuckets); System.out.println("Max allocation: " + allocationPerAttribute); double[] allocations = new double[this.numAttributes]; for (int i = 0; i < this.numAttributes; i++) { allocations[i] = allocationPerAttribute; } // Custom // double[] allocations = new double[this.numAttributes]; // for (int i = 0; i < this.numAttributes; i++) // allocations[i] = -1; // // allocations[tableInfo.schema.getAttributeId("o_orderdate")] = 12; // allocations[tableInfo.schema.getAttributeId("l_quantity")] = 6; // allocations[tableInfo.schema.getAttributeId("c_mktsegment")] = 4; // allocations[tableInfo.schema.getAttributeId("c_region")] = 4; /** * Do a level-order traversal */ LinkedList<Task> nodeQueue = new LinkedList<Task>(); // Initialize root with attribute 0 Task initialTask = new Task(); initialTask.node = root; initialTask.sample = this.sample; initialTask.depth = 0; nodeQueue.add(initialTask); while (nodeQueue.size() > 0) { Task t = nodeQueue.pollFirst(); if (t.depth < maxDepth) { int dim = -1; int round = 0; Pair<ParsedTupleList, ParsedTupleList> halves = null; while (dim == -1 && round < allocations.length) { int testDim = getLeastAllocated(allocations); allocations[testDim] -= 2.0 / Math.pow(2, t.depth); // TODO: For low cardinality values, it might be better to // choose some set of values on each side. // TPCH attribute 9 for example has only two distinct values // TODO: This might repeatedly use the same attribute halves = t.sample.sortAndSplit(testDim); if (halves.first.size() > 0 && halves.second.size() > 0) { dim = testDim; } else { System.err.println("WARN: Skipping attribute " + testDim); } round++; } if (dim == -1) { System.err.println("ERR: No attribute to partition on"); Bucket b = new Bucket(); b.setSample(sample); t.node.bucket = b; } else { t.node.attribute = dim; t.node.type = this.dimensionTypes[dim]; t.node.value = halves.first.getLast(dim); // Need to traverse up for range. t.node.leftChild = new RNode(); t.node.leftChild.parent = t.node; Task tl = new Task(); tl.node = t.node.leftChild; tl.depth = t.depth + 1; tl.sample = halves.first; nodeQueue.add(tl); t.node.rightChild = new RNode(); t.node.rightChild.parent = t.node; Task tr = new Task(); tr.node = t.node.rightChild; tr.depth = t.depth + 1; tr.sample = halves.second; nodeQueue.add(tr); } } else { Bucket b = new Bucket(); b.setSample(sample); t.node.bucket = b; } } System.out .println("Final Allocations: " + Arrays.toString(allocations)); } @Override public void initProbe(int joinAttribute) { System.out.println("method not implemented!"); } public int getLeastAllocated(double[] allocations) { int numAttributes = allocations.length; // if (countCalled < 2) { // countCalled++; // if (countCalled == 0) { // return tableInfo.schema.getAttributeId("o_orderdate"); // o_orderdate // } else if (countCalled == 1) { // return tableInfo.schema.getAttributeId("l_shipmode"); // l_shipmode // } else if (countCalled == 2) { // return tableInfo.schema.getAttributeId("l_shipmode"); // l_shipmode // } // // Could be useful to add quantity // } if (countCalled < 0) { countCalled++; if (countCalled == 0) { return tableInfo.schema.getAttributeId("sf_uploadtime"); // o_orderdate } // Could be useful to add quantity } leastAllocated.clear(); leastAllocated.add(0); double alloc = allocations[0]; for (int i = 1; i < numAttributes; i++) { if (allocations[i] > alloc) { alloc = allocations[i]; leastAllocated.clear(); leastAllocated.add(i); } else if (allocations[i] == alloc) { leastAllocated.add(i); } } if (leastAllocated.size() == 1) { return leastAllocated.get(0); } else { int r = randGenerator.nextInt(leastAllocated.size()); return leastAllocated.get(r); } } /** * Used in the 2nd phase of upfront to assign each tuple to the right */ @Override public Integer getBucketId(RawIndexKey key) { return root.getBucketId(key); } public int[] getAllBucketIds() { return root.getAllBucketIds(); } /*************************************************** * **************** RUNTIME METHODS ***************** ***************************************************/ public List<RNode> getMatchingBuckets(Predicate[] predicates) { List<RNode> results = root.search(predicates); return results; } /** * Serializes the index to string Very brittle - Consider rewriting */ @Override public byte[] marshall() { // JVM optimizes shit so no need to use string builder / buffer // Format: // maxBuckets, numAttributes // types // nodes in pre-order String robustTree = ""; robustTree += String.format("%d %d\n", this.maxBuckets, this.numAttributes); String types = ""; for (int i = 0; i < this.numAttributes; i++) { types += this.dimensionTypes[i].toString() + " "; } types += "\n"; robustTree += types; robustTree += this.root.marshall(); return robustTree.getBytes(); } @Override public void unmarshall(byte[] bytes) { String tree = new String(bytes); Scanner sc = new Scanner(tree); this.maxBuckets = sc.nextInt(); this.numAttributes = sc.nextInt(); this.dimensionTypes = new TYPE[this.numAttributes]; for (int i = 0; i < this.numAttributes; i++) { this.dimensionTypes[i] = TYPE.valueOf(sc.next()); } // Reset the maxBucketId. Bucket.maxBucketId = 0; this.root = new RNode(); this.root.parseNode(sc); } public void loadSample(TableInfo tableInfo, byte[] bytes) { this.sample = new ParsedTupleList(tableInfo.getTypeArray()); this.sample.unmarshall(bytes, tableInfo.delimiter); this.initializeBucketSamplesAndCounts(this.root, this.sample, this.sample.size(), tableInfo.numTuples); } public void loadSample(ParsedTupleList sample) { this.sample = sample; this.dimensionTypes = this.sample.getTypes(); this.numAttributes = this.dimensionTypes.length; } public void initializeBucketSamplesAndCounts(RNode n, ParsedTupleList sample, final double totalSamples, final double totalTuples) { if (n.bucket != null) { long sampleSize = sample.size(); double numTuples = (sampleSize * totalTuples) / totalSamples; n.bucket.setSample(sample); n.bucket.setEstimatedNumTuples(numTuples); } else { // By sorting we avoid memory allocation // Will most probably be faster sample.sort(n.attribute); Pair<ParsedTupleList, ParsedTupleList> halves = sample .splitAt(n.attribute, n.value); initializeBucketSamplesAndCounts(n.leftChild, halves.first, totalSamples, totalTuples); initializeBucketSamplesAndCounts(n.rightChild, halves.second, totalSamples, totalTuples); } } /** * Prints the tree created. Call only after initProbe is done. */ public void printTree() { printNode(root); } public double[] getAllocations() { List<RNode> queue = new LinkedList<RNode>(); Map<Integer, Double> allocs = new HashMap<Integer, Double>(); queue.add(this.getRoot()); int nodeNum = 0; RNode filler = new RNode(); filler.attribute = -1; int lastNode = 0; while (queue.size() > 0) { nodeNum++; RNode node = queue.remove(0); if (node.bucket != null || node.attribute == -1) { if (nodeNum > lastNode * 2) { break; } queue.add(filler); queue.add(filler); continue; } lastNode = nodeNum; if (!(allocs.containsKey(node.attribute))) { allocs.put(node.attribute, 0.0); } double addedAlloc = Math.pow(2, -1 * Math.floor(Math.log(nodeNum) / Math.log(2)) + 1); allocs.put(node.attribute, allocs.get(node.attribute) + addedAlloc); queue.add(node.leftChild); queue.add(node.rightChild); } double[] allocArray = new double[numAttributes]; for (int i = 0; i < numAttributes; i++) { if (!(allocs.containsKey(i))) { allocArray[i] = 0; } else { allocArray[i] = allocs.get(i); } } System.out.println(nodeNum); return allocArray; } public class Task { public RNode node; public int depth; public ParsedTupleList sample; } }
/* * Copyright 2015 The UIMaster Project * * The UIMaster Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.shaolin.uimaster.page.ajax; import org.shaolin.uimaster.page.AjaxContext; import org.shaolin.bmdp.json.JSONObject; import org.shaolin.uimaster.page.AjaxContextHelper; import org.shaolin.uimaster.page.IJSHandlerCollections; import org.shaolin.uimaster.page.ajax.json.IDataItem; public class Dialog extends Container { private static final long serialVersionUID = 121214544768634345L; //OptionType public static final int YES_NO_OPTION = 0; public static final int YES_NO_CANCEL_OPTION = 1; public static final int OK_CANCEL_OPTION = 2; public static final int OK_ONLY_OPTION = 3;//for showMessageDialog //MessageType which decides the icon. public static final int ERROR_MESSAGE = 0; public static final int INFORMATION_MESSAGE = 1; public static final int WARNING_MESSAGE = 2; public static final int QUESTION_MESSAGE = 3; //ReturnType public static final int YES_OPTION = 0;//for showConfirmDialog public static final int NO_OPTION = 1;//for showConfirmDialog public static final int OK_OPTION = 2;//for showInputDialog, showMessageDialog, showOptionDialog public static final int CANCEL_OPTION = 3;//for all public static final int CLOSED_OPTION = 4;//for all //DialogType public static final int MESSAGE_TYPE = 0; public static final int INPUT_TYPE = 1; public static final int OPTION_TYPE = 2; public static final int CONFIRM_TYPE = 3; //Default parameters public static final String DEF_TITLE = "Dialog"; public static final String DEF_MESSAGE = "Message"; public static final int DEF_OPTION_TYPE = OK_CANCEL_OPTION;//OK and Cancel public static final String[] DEF_OPTIONS = null; public static final int DEF_INITIAL_VALUE = 0; public static final int DEF_DIALOG_TYPE = MESSAGE_TYPE; public static final int DEF_MESSAGE_TYPE = INFORMATION_MESSAGE; public static final int DEF_X = -1; public static final int DEF_Y = -1; private String title; private String message; private int optionType; private String[] options; private int initialValue;//0,1,2... private int messageType; private int dialogType; private int x = DEF_X; private int y = DEF_Y; private String frameInfo = ""; public Dialog(String uiid) { super(AjaxContextHelper.getAjaxContext().getEntityPrefix() + uiid, new CellLayout()); this.title = DEF_TITLE; this.message = DEF_MESSAGE; this.optionType = DEF_OPTION_TYPE; this.options = DEF_OPTIONS; this.initialValue = DEF_INITIAL_VALUE; this.messageType = DEF_MESSAGE_TYPE; this.dialogType = DEF_DIALOG_TYPE; } public Dialog(String uiid, String title, String message, int optionType, String[] options, int initialValue, int messageType, int dialogType) { super(AjaxContextHelper.getAjaxContext().getEntityPrefix() + uiid, new CellLayout()); this.title = title; this.message = message; this.optionType = optionType; this.options = options; this.initialValue = initialValue; this.messageType = messageType; this.dialogType = dialogType; } public int getInitialValue() { return initialValue; } public void setInitialValue(int initialValue) { this.initialValue = initialValue; } public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } public String[] getOptions() { return options; } public void setOptions(String[] options) { this.options = options; } public int getOptionType() { return optionType; } public void setOptionType(int optionType) { this.optionType = optionType; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public int getMessageType() { return messageType; } public void setMessageType(int messageType) { this.messageType = messageType; } public int getDialogType() { return dialogType; } public void setDialogType(int dialogType) { this.dialogType = dialogType; } public int getX() { return x; } public void setX(int x) { this.x = x; } public int getY() { return y; } public void setY(int y) { this.y = y; } public String getFrameInfo() { return frameInfo; } public void setFrameInfo(String frameInfo) { this.frameInfo = frameInfo; } /** * Brings up a dialog with the options Yes and No with the title * @param message * @param title * @param frameInfo * @return ReturnType */ public static int showConfirmDialog(String message, String title, String frameInfo) { return showConfirmDialog(message, title, Dialog.DEF_X, Dialog.DEF_Y, frameInfo); } /** * Brings up a dialog with the options Yes and No with the title * @param message * @param title * @param x * @param y * @param frameInfo * @return */ public static int showConfirmDialog(String message, String title, int x, int y, String frameInfo) { return showConfirmDialog(message, title, Dialog.DEF_OPTION_TYPE, Dialog.DEF_MESSAGE_TYPE, x, y, frameInfo); } /** * Brings up a dialog where the number of choices is determined by the optionType parameter. * @param message * @param title * @param optionType * @param messageType * @param frameInfo * @return ReturnType */ public static int showConfirmDialog(String message, String title, int optionType, int messageType, int x, int y, String frameInfo) { Dialog dialog = new Dialog("dialog"); dialog.setDialogType(CONFIRM_TYPE); dialog.setMessage(message); dialog.setTitle(title); dialog.setOptionType(optionType); dialog.setMessageType(messageType); dialog.setX(x); dialog.setY(y); JSONObject jsono = new JSONObject(dialog); String jsCode = jsono.toString(); AjaxContext ajaxContext = AjaxContextHelper.getAjaxContext(); IDataItem item = createDataItem(dialog.getId(), jsCode); item.setFrameInfo(frameInfo); ajaxContext.addDataItem(item); //ajaxContext.suspend(dialog); //String returnTypeStr = ajaxContext.getRequest().getParameter("returnType"); return -1; } /** * Prompts the user for input in a blocking dialog. * @param message * @param title * @param messageType * @param frameInfo * @return client's input String if client clicks "Ok" option, else return null */ public static String showInputDialog(String message, String title, int messageType, String frameInfo) { return showInputDialog(message,title,messageType,Dialog.DEF_X,Dialog.DEF_Y, frameInfo); } /** * Prompts the user for input in a blocking dialog. * * @param message * @param title * @param messageType * @param x * @param y * @param frameInfo * @return */ public static String showInputDialog(String message, String title, int messageType, int x, int y, String frameInfo) { Dialog dialog = new Dialog("dialog"); dialog.setDialogType(INPUT_TYPE); dialog.setMessage(message); dialog.setTitle(title); dialog.setMessageType(messageType); dialog.setX(x); dialog.setY(y); JSONObject jsono = new JSONObject(dialog); String jsCode = jsono.toString(); AjaxContext ajaxContext = AjaxContextHelper.getAjaxContext(); IDataItem item = createDataItem(dialog.getId(), jsCode); item.setFrameInfo(frameInfo); ajaxContext.addDataItem(item); return ""; } /** * Brings up a dialog displaying a message. * @param message * @param title * @param messageType * @param frameInfo */ public static void showMessageDialog(String message, String title, int messageType, String frameInfo) { showMessageDialog(message,title,messageType,Dialog.DEF_X,Dialog.DEF_Y, frameInfo); } /** * Brings up a dialog displaying a message. * * @param message * @param title * @param messageType * @param x * @param y * @param frameInfo */ public static void showMessageDialog(String message, String title, int messageType,int x, int y, String frameInfo) { Dialog dialog = new Dialog("dialog"); dialog.setDialogType(MESSAGE_TYPE); dialog.setOptionType(OK_ONLY_OPTION); dialog.setMessage(message); dialog.setTitle(title); dialog.setMessageType(messageType); dialog.setX(x); dialog.setY(y); JSONObject jsono = new JSONObject(dialog); String jsCode = jsono.toString(); AjaxContext ajaxContext = AjaxContextHelper.getAjaxContext(); IDataItem item = createDataItem(dialog.getId(), jsCode); item.setFrameInfo(frameInfo); ajaxContext.addDataItem(item); //ajaxContext.suspend(dialog); return; } /** * Brings up a dialog where the initial choice is determined by the initialValue parameter * and the number of choices is determined by the optionType parameter. * @param message * @param title * @param optionType * @param messageType * @param options * @param initialValue determine the initial choice of options * @param frameInfo * @return return the selection from client if clicks "Ok" option, * else return -1 */ public static int showOptionDialog(String message, String title, int optionType, int messageType, String[] options, int initialValue, String frameInfo) { return showOptionDialog(message,title,optionType,messageType,options,initialValue,Dialog.DEF_X,Dialog.DEF_Y,frameInfo); } /** * Brings up a dialog where the initial choice is determined by the initialValue parameter * and the number of choices is determined by the optionType parameter. * * @param message * @param title * @param optionType * @param messageType * @param options * @param initialValue * @param x * @param y * @return */ public static int showOptionDialog(String message, String title, int optionType, int messageType, String[] options, int initialValue,int x, int y, String frameInfo) { Dialog dialog = new Dialog("dialog"); dialog.setDialogType(OPTION_TYPE); dialog.setMessage(message); dialog.setTitle(title); dialog.setOptionType(optionType); dialog.setMessageType(messageType); dialog.setOptions(options); dialog.setInitialValue(initialValue); dialog.setX(x); dialog.setY(y); JSONObject jsono = new JSONObject(dialog); String jsCode = jsono.toString(); AjaxContext ajaxContext = AjaxContextHelper.getAjaxContext(); IDataItem item = createDataItem(dialog.getId(), jsCode); item.setFrameInfo(frameInfo); ajaxContext.addDataItem(item); //ajaxContext.suspend(dialog); /* String returnTypeStr = ajaxContext.getRequest().getParameter("returnType"); int returnType = Integer.valueOf(returnTypeStr).intValue(); if(returnType == OK_OPTION) { String returnResultStr = ajaxContext.getRequest().getParameter("returnResult"); return Integer.valueOf(returnResultStr).intValue(); } else { return -1; } */ return -1; } /** * Brings up a dialog with the options Yes and No with the title * @param message * @param title * @return ReturnType */ public static int showConfirmDialog(String message, String title) { return showConfirmDialog(message, title, Dialog.DEF_X, Dialog.DEF_Y); } /** * Brings up a dialog with the options Yes and No with the title * @param message * @param title * @param x * @param y * @return */ public static int showConfirmDialog(String message, String title, int x, int y) { return showConfirmDialog(message, title, Dialog.DEF_OPTION_TYPE, Dialog.DEF_MESSAGE_TYPE, x, y); } /** * Brings up a dialog where the number of choices is determined by the optionType parameter. * @param message * @param title * @param optionType * @param messageType * @return ReturnType */ public static int showConfirmDialog(String message, String title, int optionType, int messageType, int x, int y) { Dialog dialog = new Dialog("dialog"); dialog.setDialogType(CONFIRM_TYPE); dialog.setMessage(message); dialog.setTitle(title); dialog.setOptionType(optionType); dialog.setMessageType(messageType); dialog.setX(x); dialog.setY(y); JSONObject jsono = new JSONObject(dialog); String jsCode = jsono.toString(); AjaxContext ajaxContext = AjaxContextHelper.getAjaxContext(); ajaxContext.addDataItem(createDataItem(dialog.getId(), jsCode)); //ajaxContext.suspend(dialog); /* String returnTypeStr = ajaxContext.getRequest().getParameter("returnType"); return Integer.valueOf(returnTypeStr).intValue(); */ return -1; } /** * Prompts the user for input in a blocking dialog. * @param message * @param title * @param messageType * @return client's input String if client clicks "Ok" option, else return null */ public static String showInputDialog(String message, String title, int messageType) { return showInputDialog(message,title,messageType,Dialog.DEF_X,Dialog.DEF_Y); } /** * Prompts the user for input in a blocking dialog. * * @param message * @param title * @param messageType * @param x * @param y * @return */ public static String showInputDialog(String message, String title, int messageType, int x, int y) { Dialog dialog = new Dialog("dialog"); dialog.setDialogType(INPUT_TYPE); dialog.setMessage(message); dialog.setTitle(title); dialog.setMessageType(messageType); dialog.setX(x); dialog.setY(y); JSONObject jsono = new JSONObject(dialog); String jsCode = jsono.toString(); AjaxContext ajaxContext = AjaxContextHelper.getAjaxContext(); ajaxContext.addDataItem(createDataItem(dialog.getId(), jsCode)); //ajaxContext.suspend(dialog); /* String returnTypeStr = ajaxContext.getRequest().getParameter("returnType"); int returnType = Integer.valueOf(returnTypeStr).intValue(); if(returnType == OK_OPTION) { String returnResultStr = ajaxContext.getRequest().getParameter("returnResult"); return returnResultStr; } else { return null; } */ return ""; } /** * Brings up a dialog displaying a message. * @param message * @param title * @param messageType */ public static void showMessageDialog(String message, String title, int messageType) { showMessageDialog(message,title,messageType,Dialog.DEF_X,Dialog.DEF_Y); } /** * Brings up a dialog displaying a message. * * @param message * @param title * @param messageType * @param x * @param y */ public static void showMessageDialog(String message, String title, int messageType,int x, int y) { Dialog dialog = new Dialog("dialog"); dialog.setDialogType(MESSAGE_TYPE); dialog.setOptionType(OK_ONLY_OPTION); dialog.setMessage(message); dialog.setTitle(title); dialog.setMessageType(messageType); dialog.setX(x); dialog.setY(y); JSONObject jsono = new JSONObject(dialog); String jsCode = jsono.toString(); AjaxContext ajaxContext = AjaxContextHelper.getAjaxContext(); ajaxContext.addDataItem(createDataItem(dialog.getId(), jsCode)); //ajaxContext.suspend(dialog); return; } /** * Brings up a dialog where the initial choice is determined by the initialValue parameter * and the number of choices is determined by the optionType parameter. * @param message * @param title * @param optionType * @param messageType * @param options * @param initialValue determine the initial choice of options * @return return the selection from client if clicks "Ok" option, * else return -1 */ public static int showOptionDialog(String message, String title, int optionType, int messageType, String[] options, int initialValue) { return showOptionDialog(message,title,optionType,messageType,options,initialValue,Dialog.DEF_X,Dialog.DEF_Y); } /** * Brings up a dialog where the initial choice is determined by the initialValue parameter * and the number of choices is determined by the optionType parameter. * * @param message * @param title * @param optionType * @param messageType * @param options * @param initialValue * @param x * @param y * @return */ public static int showOptionDialog(String message, String title, int optionType, int messageType, String[] options, int initialValue,int x, int y) { Dialog dialog = new Dialog("dialog"); dialog.setDialogType(OPTION_TYPE); dialog.setMessage(message); dialog.setTitle(title); dialog.setOptionType(optionType); dialog.setMessageType(messageType); dialog.setOptions(options); dialog.setInitialValue(initialValue); dialog.setX(x); dialog.setY(y); JSONObject jsono = new JSONObject(dialog); String jsCode = jsono.toString(); AjaxContext ajaxContext = AjaxContextHelper.getAjaxContext(); ajaxContext.addDataItem(createDataItem(dialog.getId(), jsCode)); //ajaxContext.suspend(dialog); /* String returnTypeStr = ajaxContext.getRequest().getParameter("returnType"); int returnType = Integer.valueOf(returnTypeStr).intValue(); if(returnType == OK_OPTION) { String returnResultStr = ajaxContext.getRequest().getParameter("returnResult"); return Integer.valueOf(returnResultStr).intValue(); } else { return -1; } */ return -1; } private static IDataItem createDataItem(String uiid, String data) { IDataItem dataItem = AjaxContextHelper.createDataItem(); dataItem.setJsHandler(IJSHandlerCollections.OPEN_DIALOG); dataItem.setData(data); return dataItem; } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.rest.dto.task; import org.camunda.bpm.engine.BadUserRequestException; import org.camunda.bpm.engine.rest.dto.converter.DelegationStateConverter; import org.camunda.bpm.engine.task.DelegationState; import org.camunda.bpm.engine.task.Task; import java.util.Date; public class TaskDto { private String id; private String name; private String assignee; private Date created; private Date due; private Date followUp; private String delegationState; private String description; private String executionId; private String owner; private String parentTaskId; private int priority; private String processDefinitionId; private String processInstanceId; private String taskDefinitionKey; private String caseExecutionId; private String caseInstanceId; private String caseDefinitionId; private boolean suspended; private String formKey; public String getId() { return id; } public void setId(String id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getAssignee() { return assignee; } public void setAssignee(String assignee) { this.assignee = assignee; } public Date getCreated() { return created; } public Date getDue() { return due; } public void setDue(Date due) { this.due = due; } public String getDelegationState() { return delegationState; } public void setDelegationState(String delegationState) { this.delegationState = delegationState; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getExecutionId() { return executionId; } public String getOwner() { return owner; } public void setOwner(String owner) { this.owner = owner; } public String getParentTaskId() { return parentTaskId; } public void setParentTaskId(String parentTaskId) { this.parentTaskId = parentTaskId; } public int getPriority() { return priority; } public void setPriority(int priority) { this.priority = priority; } public String getProcessDefinitionId() { return processDefinitionId; } public String getProcessInstanceId() { return processInstanceId; } public String getTaskDefinitionKey() { return taskDefinitionKey; } public Date getFollowUp() { return followUp; } public void setFollowUp(Date followUp) { this.followUp = followUp; } public String getCaseDefinitionId() { return caseDefinitionId; } public String getCaseExecutionId() { return caseExecutionId; } public String getCaseInstanceId() { return caseInstanceId; } public void setCaseInstanceId(String caseInstanceId) { this.caseInstanceId = caseInstanceId; } public boolean isSuspended() { return suspended; } public String getFormKey() { return formKey; } public static TaskDto fromEntity(Task task) { TaskDto dto = new TaskDto(); dto.id = task.getId(); dto.name = task.getName(); dto.assignee = task.getAssignee(); dto.created = task.getCreateTime(); dto.due = task.getDueDate(); dto.followUp = task.getFollowUpDate(); if (task.getDelegationState() != null) { dto.delegationState = task.getDelegationState().toString(); } dto.description = task.getDescription(); dto.executionId = task.getExecutionId(); dto.owner = task.getOwner(); dto.parentTaskId = task.getParentTaskId(); dto.priority = task.getPriority(); dto.processDefinitionId = task.getProcessDefinitionId(); dto.processInstanceId = task.getProcessInstanceId(); dto.taskDefinitionKey = task.getTaskDefinitionKey(); dto.caseDefinitionId = task.getCaseDefinitionId(); dto.caseExecutionId = task.getCaseExecutionId(); dto.caseInstanceId = task.getCaseInstanceId(); dto.suspended = task.isSuspended(); try { dto.formKey = task.getFormKey(); } catch (BadUserRequestException e) { // ignore (initializeFormKeys was not called) } return dto; } public void updateTask(Task task) { task.setName(getName()); task.setDescription(getDescription()); task.setPriority(getPriority()); task.setAssignee(getAssignee()); task.setOwner(getOwner()); DelegationState state = null; if (getDelegationState() != null) { DelegationStateConverter converter = new DelegationStateConverter(); state = converter.convertQueryParameterToType(getDelegationState()); } task.setDelegationState(state); task.setDueDate(getDue()); task.setFollowUpDate(getFollowUp()); task.setParentTaskId(getParentTaskId()); task.setCaseInstanceId(getCaseInstanceId()); } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.testsuite.forms; import org.keycloak.authentication.actiontoken.resetcred.ResetCredentialsActionToken; import org.jboss.arquillian.graphene.page.Page; import org.keycloak.events.Details; import org.keycloak.events.Errors; import org.keycloak.events.EventType; import org.keycloak.representations.idm.EventRepresentation; import org.keycloak.representations.idm.RealmRepresentation; import org.keycloak.representations.idm.UserRepresentation; import org.keycloak.testsuite.AssertEvents; import org.keycloak.testsuite.AbstractTestRealmKeycloakTest; import org.keycloak.testsuite.admin.ApiUtil; import org.keycloak.testsuite.pages.AppPage; import org.keycloak.testsuite.pages.AppPage.RequestType; import org.keycloak.testsuite.pages.ErrorPage; import org.keycloak.testsuite.pages.InfoPage; import org.keycloak.testsuite.pages.LoginPage; import org.keycloak.testsuite.pages.LoginPasswordResetPage; import org.keycloak.testsuite.pages.LoginPasswordUpdatePage; import org.keycloak.testsuite.pages.VerifyEmailPage; import org.keycloak.testsuite.util.GreenMailRule; import org.keycloak.testsuite.util.MailUtils; import org.keycloak.testsuite.util.OAuthClient; import org.keycloak.testsuite.util.UserBuilder; import javax.mail.MessagingException; import javax.mail.Multipart; import javax.mail.internet.MimeMessage; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import org.junit.*; import static org.junit.Assert.*; /** * @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a> * @author Stan Silvert ssilvert@redhat.com (C) 2016 Red Hat Inc. */ public class ResetPasswordTest extends AbstractTestRealmKeycloakTest { private String userId; @Override public void configureTestRealm(RealmRepresentation testRealm) { } @Before public void setup() { UserRepresentation user = UserBuilder.create() .username("login-test") .email("login@test.com") .enabled(true) .build(); userId = ApiUtil.createUserAndResetPasswordWithAdminClient(testRealm(), user, "password"); expectedMessagesCount = 0; getCleanup().addUserId(userId); } @Rule public GreenMailRule greenMail = new GreenMailRule(); @Page protected AppPage appPage; @Page protected LoginPage loginPage; @Page protected ErrorPage errorPage; @Page protected InfoPage infoPage; @Page protected VerifyEmailPage verifyEmailPage; @Page protected LoginPasswordResetPage resetPasswordPage; @Page protected LoginPasswordUpdatePage updatePasswordPage; @Rule public AssertEvents events = new AssertEvents(this); private int expectedMessagesCount; @Test public void resetPasswordLink() throws IOException, MessagingException { String username = "login-test"; String resetUri = oauth.AUTH_SERVER_ROOT + "/realms/test/login-actions/reset-credentials"; driver.navigate().to(resetUri); resetPasswordPage.assertCurrent(); resetPasswordPage.changePassword(username); loginPage.assertCurrent(); assertEquals("You should receive an email shortly with further instructions.", loginPage.getSuccessMessage()); events.expectRequiredAction(EventType.SEND_RESET_PASSWORD) .user(userId) .detail(Details.REDIRECT_URI, oauth.AUTH_SERVER_ROOT + "/realms/test/account/") .client("account") .detail(Details.USERNAME, username) .detail(Details.EMAIL, "login@test.com") .session((String)null) .assertEvent(); assertEquals(1, greenMail.getReceivedMessages().length); MimeMessage message = greenMail.getReceivedMessages()[0]; String changePasswordUrl = getPasswordResetEmailLink(message); driver.navigate().to(changePasswordUrl.trim()); updatePasswordPage.assertCurrent(); updatePasswordPage.changePassword("resetPassword", "resetPassword"); events.expectRequiredAction(EventType.UPDATE_PASSWORD) .detail(Details.REDIRECT_URI, oauth.AUTH_SERVER_ROOT + "/realms/test/account/") .client("account") .user(userId).detail(Details.USERNAME, username).assertEvent(); String sessionId = events.expectLogin().user(userId).detail(Details.USERNAME, username) .detail(Details.REDIRECT_URI, oauth.AUTH_SERVER_ROOT + "/realms/test/account/") .client("account") .assertEvent().getSessionId(); oauth.openLogout(); events.expectLogout(sessionId).user(userId).session(sessionId).assertEvent(); loginPage.open(); loginPage.login("login-test", "resetPassword"); events.expectLogin().user(userId).detail(Details.USERNAME, "login-test").assertEvent(); assertEquals(RequestType.AUTH_RESPONSE, appPage.getRequestType()); } @Test public void resetPassword() throws IOException, MessagingException { resetPassword("login-test"); } @Test public void resetPasswordTwice() throws IOException, MessagingException { String changePasswordUrl = resetPassword("login-test"); events.clear(); assertSecondPasswordResetFails(changePasswordUrl, null); // KC_RESTART doesn't exists, it was deleted after first successful reset-password flow was finished } @Test public void resetPasswordTwiceInNewBrowser() throws IOException, MessagingException { String changePasswordUrl = resetPassword("login-test"); events.clear(); String resetUri = oauth.AUTH_SERVER_ROOT + "/realms/test/login-actions/reset-credentials"; driver.navigate().to(resetUri); // This is necessary to delete KC_RESTART cookie that is restricted to /auth/realms/test path driver.manage().deleteAllCookies(); assertSecondPasswordResetFails(changePasswordUrl, null); } public void assertSecondPasswordResetFails(String changePasswordUrl, String clientId) { driver.navigate().to(changePasswordUrl.trim()); errorPage.assertCurrent(); assertEquals("Action expired. Please continue with login now.", errorPage.getError()); events.expect(EventType.RESET_PASSWORD) .client("account") .session((String) null) .user(userId) .error(Errors.EXPIRED_CODE) .assertEvent(); } @Test public void resetPasswordWithSpacesInUsername() throws IOException, MessagingException { resetPassword(" login-test "); } @Test public void resetPasswordCancelChangeUser() throws IOException, MessagingException { initiateResetPasswordFromResetPasswordPage("test-user@localhost"); events.expectRequiredAction(EventType.SEND_RESET_PASSWORD).detail(Details.USERNAME, "test-user@localhost") .session((String) null) .detail(Details.EMAIL, "test-user@localhost").assertEvent(); loginPage.login("login@test.com", "password"); EventRepresentation loginEvent = events.expectLogin().user(userId).detail(Details.USERNAME, "login@test.com").assertEvent(); String code = oauth.getCurrentQuery().get("code"); OAuthClient.AccessTokenResponse tokenResponse = oauth.doAccessTokenRequest(code, "password"); assertEquals(200, tokenResponse.getStatusCode()); assertEquals(userId, oauth.verifyToken(tokenResponse.getAccessToken()).getSubject()); events.expectCodeToToken(loginEvent.getDetails().get(Details.CODE_ID), loginEvent.getSessionId()).user(userId).assertEvent(); } @Test public void resetPasswordByEmail() throws IOException, MessagingException { resetPassword("login@test.com"); } private String resetPassword(String username) throws IOException, MessagingException { return resetPassword(username, "resetPassword"); } private String resetPassword(String username, String password) throws IOException, MessagingException { initiateResetPasswordFromResetPasswordPage(username); events.expectRequiredAction(EventType.SEND_RESET_PASSWORD) .user(userId) .detail(Details.USERNAME, username.trim()) .detail(Details.EMAIL, "login@test.com") .session((String)null) .assertEvent(); assertEquals(expectedMessagesCount, greenMail.getReceivedMessages().length); MimeMessage message = greenMail.getReceivedMessages()[greenMail.getReceivedMessages().length - 1]; String changePasswordUrl = getPasswordResetEmailLink(message); driver.navigate().to(changePasswordUrl.trim()); updatePasswordPage.assertCurrent(); updatePasswordPage.changePassword(password, password); events.expectRequiredAction(EventType.UPDATE_PASSWORD).user(userId).detail(Details.USERNAME, username.trim()).assertEvent(); assertEquals(RequestType.AUTH_RESPONSE, appPage.getRequestType()); String sessionId = events.expectLogin().user(userId).detail(Details.USERNAME, username.trim()).assertEvent().getSessionId(); oauth.openLogout(); events.expectLogout(sessionId).user(userId).session(sessionId).assertEvent(); loginPage.open(); loginPage.login("login-test", password); sessionId = events.expectLogin().user(userId).detail(Details.USERNAME, "login-test").assertEvent().getSessionId(); assertEquals(RequestType.AUTH_RESPONSE, appPage.getRequestType()); oauth.openLogout(); events.expectLogout(sessionId).user(userId).session(sessionId).assertEvent(); return changePasswordUrl; } private void resetPasswordInvalidPassword(String username, String password, String error) throws IOException, MessagingException { initiateResetPasswordFromResetPasswordPage(username); events.expectRequiredAction(EventType.SEND_RESET_PASSWORD).user(userId).session((String) null) .detail(Details.USERNAME, username).detail(Details.EMAIL, "login@test.com").assertEvent(); assertEquals(expectedMessagesCount, greenMail.getReceivedMessages().length); MimeMessage message = greenMail.getReceivedMessages()[greenMail.getReceivedMessages().length - 1]; String changePasswordUrl = getPasswordResetEmailLink(message); driver.navigate().to(changePasswordUrl.trim()); updatePasswordPage.assertCurrent(); updatePasswordPage.changePassword(password, password); updatePasswordPage.assertCurrent(); assertEquals(error, updatePasswordPage.getError()); events.expectRequiredAction(EventType.UPDATE_PASSWORD_ERROR).error(Errors.PASSWORD_REJECTED).user(userId).detail(Details.USERNAME, "login-test").assertEvent().getSessionId(); } private void initiateResetPasswordFromResetPasswordPage(String username) { loginPage.open(); loginPage.resetPassword(); resetPasswordPage.assertCurrent(); resetPasswordPage.changePassword(username); loginPage.assertCurrent(); assertEquals("You should receive an email shortly with further instructions.", loginPage.getSuccessMessage()); expectedMessagesCount++; } @Test public void resetPasswordWrongEmail() throws IOException, MessagingException, InterruptedException { initiateResetPasswordFromResetPasswordPage("invalid"); assertEquals(0, greenMail.getReceivedMessages().length); events.expectRequiredAction(EventType.RESET_PASSWORD).user((String) null).session((String) null).detail(Details.USERNAME, "invalid").removeDetail(Details.EMAIL).removeDetail(Details.CODE_ID).error("user_not_found").assertEvent(); } @Test public void resetPasswordMissingUsername() throws IOException, MessagingException, InterruptedException { loginPage.open(); loginPage.resetPassword(); resetPasswordPage.assertCurrent(); resetPasswordPage.changePassword(""); resetPasswordPage.assertCurrent(); assertEquals("Please specify username.", resetPasswordPage.getErrorMessage()); assertEquals(0, greenMail.getReceivedMessages().length); events.expectRequiredAction(EventType.RESET_PASSWORD).user((String) null).session((String) null).clearDetails().error("username_missing").assertEvent(); } @Test public void resetPasswordExpiredCode() throws IOException, MessagingException, InterruptedException { initiateResetPasswordFromResetPasswordPage("login-test"); events.expectRequiredAction(EventType.SEND_RESET_PASSWORD) .session((String)null) .user(userId).detail(Details.USERNAME, "login-test").detail(Details.EMAIL, "login@test.com").assertEvent(); assertEquals(1, greenMail.getReceivedMessages().length); MimeMessage message = greenMail.getReceivedMessages()[0]; String changePasswordUrl = getPasswordResetEmailLink(message); try { setTimeOffset(1800 + 23); driver.navigate().to(changePasswordUrl.trim()); loginPage.assertCurrent(); assertEquals("You took too long to login. Login process starting from beginning.", loginPage.getError()); events.expectRequiredAction(EventType.EXECUTE_ACTION_TOKEN_ERROR).error("expired_code").client((String) null).user(userId).session((String) null).clearDetails().detail(Details.ACTION, ResetCredentialsActionToken.TOKEN_TYPE).assertEvent(); } finally { setTimeOffset(0); } } @Test public void resetPasswordExpiredCodeShort() throws IOException, MessagingException, InterruptedException { final AtomicInteger originalValue = new AtomicInteger(); RealmRepresentation realmRep = testRealm().toRepresentation(); originalValue.set(realmRep.getActionTokenGeneratedByUserLifespan()); realmRep.setActionTokenGeneratedByUserLifespan(60); testRealm().update(realmRep); try { initiateResetPasswordFromResetPasswordPage("login-test"); events.expectRequiredAction(EventType.SEND_RESET_PASSWORD) .session((String)null) .user(userId).detail(Details.USERNAME, "login-test").detail(Details.EMAIL, "login@test.com").assertEvent(); assertEquals(1, greenMail.getReceivedMessages().length); MimeMessage message = greenMail.getReceivedMessages()[0]; String changePasswordUrl = getPasswordResetEmailLink(message); setTimeOffset(70); driver.navigate().to(changePasswordUrl.trim()); loginPage.assertCurrent(); assertEquals("You took too long to login. Login process starting from beginning.", loginPage.getError()); events.expectRequiredAction(EventType.EXECUTE_ACTION_TOKEN_ERROR).error("expired_code").client((String) null).user(userId).session((String) null).clearDetails().detail(Details.ACTION, ResetCredentialsActionToken.TOKEN_TYPE).assertEvent(); } finally { setTimeOffset(0); realmRep.setActionTokenGeneratedByUserLifespan(originalValue.get()); testRealm().update(realmRep); } } // KEYCLOAK-4016 @Test public void resetPasswordExpiredCodeAndAuthSession() throws IOException, MessagingException, InterruptedException { final AtomicInteger originalValue = new AtomicInteger(); RealmRepresentation realmRep = testRealm().toRepresentation(); originalValue.set(realmRep.getActionTokenGeneratedByUserLifespan()); realmRep.setActionTokenGeneratedByUserLifespan(60); testRealm().update(realmRep); try { initiateResetPasswordFromResetPasswordPage("login-test"); events.expectRequiredAction(EventType.SEND_RESET_PASSWORD) .session((String)null) .user(userId).detail(Details.USERNAME, "login-test").detail(Details.EMAIL, "login@test.com").assertEvent(); assertEquals(1, greenMail.getReceivedMessages().length); MimeMessage message = greenMail.getReceivedMessages()[0]; String changePasswordUrl = getPasswordResetEmailLink(message); setTimeOffset(70); log.debug("Going to reset password URI."); driver.navigate().to(oauth.AUTH_SERVER_ROOT + "/realms/test/login-actions/reset-credentials"); // This is necessary to delete KC_RESTART cookie that is restricted to /auth/realms/test path log.debug("Removing cookies."); driver.manage().deleteAllCookies(); driver.navigate().to(changePasswordUrl.trim()); errorPage.assertCurrent(); Assert.assertEquals("Reset Credential not allowed", errorPage.getError()); String backToAppLink = errorPage.getBackToApplicationLink(); Assert.assertTrue(backToAppLink.endsWith("/app/auth")); events.expectRequiredAction(EventType.EXECUTE_ACTION_TOKEN_ERROR).error("expired_code").client((String) null).user(userId).session((String) null).clearDetails().detail(Details.ACTION, ResetCredentialsActionToken.TOKEN_TYPE).assertEvent(); } finally { setTimeOffset(0); realmRep.setActionTokenGeneratedByUserLifespan(originalValue.get()); testRealm().update(realmRep); } } @Test public void resetPasswordDisabledUser() throws IOException, MessagingException, InterruptedException { UserRepresentation user = findUser("login-test"); try { user.setEnabled(false); updateUser(user); initiateResetPasswordFromResetPasswordPage("login-test"); assertEquals(0, greenMail.getReceivedMessages().length); events.expectRequiredAction(EventType.RESET_PASSWORD).session((String) null).user(userId).detail(Details.USERNAME, "login-test").removeDetail(Details.CODE_ID).error("user_disabled").assertEvent(); } finally { user.setEnabled(true); updateUser(user); } } @Test public void resetPasswordNoEmail() throws IOException, MessagingException, InterruptedException { final String email; UserRepresentation user = findUser("login-test"); email = user.getEmail(); try { user.setEmail(""); updateUser(user); initiateResetPasswordFromResetPasswordPage("login-test"); assertEquals(0, greenMail.getReceivedMessages().length); events.expectRequiredAction(EventType.RESET_PASSWORD_ERROR).session((String) null).user(userId).detail(Details.USERNAME, "login-test").removeDetail(Details.CODE_ID).error("invalid_email").assertEvent(); } finally { user.setEmail(email); updateUser(user); } } @Test public void resetPasswordWrongSmtp() throws IOException, MessagingException, InterruptedException { final String[] host = new String[1]; Map<String, String> smtpConfig = new HashMap<>(); smtpConfig.putAll(testRealm().toRepresentation().getSmtpServer()); host[0] = smtpConfig.get("host"); smtpConfig.put("host", "invalid_host"); RealmRepresentation realmRep = testRealm().toRepresentation(); Map<String, String> oldSmtp = realmRep.getSmtpServer(); try { realmRep.setSmtpServer(smtpConfig); testRealm().update(realmRep); loginPage.open(); loginPage.resetPassword(); resetPasswordPage.assertCurrent(); resetPasswordPage.changePassword("login-test"); errorPage.assertCurrent(); assertEquals("Failed to send email, please try again later.", errorPage.getError()); assertEquals(0, greenMail.getReceivedMessages().length); events.expectRequiredAction(EventType.SEND_RESET_PASSWORD_ERROR).user(userId) .session((String)null) .detail(Details.USERNAME, "login-test").removeDetail(Details.CODE_ID).error(Errors.EMAIL_SEND_FAILED).assertEvent(); } finally { // Revert SMTP back realmRep.setSmtpServer(oldSmtp); testRealm().update(realmRep); } } private void setPasswordPolicy(String policy) { RealmRepresentation realmRep = testRealm().toRepresentation(); realmRep.setPasswordPolicy(policy); testRealm().update(realmRep); } @Test public void resetPasswordWithLengthPasswordPolicy() throws IOException, MessagingException { setPasswordPolicy("length"); initiateResetPasswordFromResetPasswordPage("login-test"); assertEquals(1, greenMail.getReceivedMessages().length); MimeMessage message = greenMail.getReceivedMessages()[0]; String changePasswordUrl = getPasswordResetEmailLink(message); events.expectRequiredAction(EventType.SEND_RESET_PASSWORD).session((String)null).user(userId).detail(Details.USERNAME, "login-test").detail(Details.EMAIL, "login@test.com").assertEvent(); driver.navigate().to(changePasswordUrl.trim()); updatePasswordPage.assertCurrent(); updatePasswordPage.changePassword("invalid", "invalid"); assertEquals("Invalid password: minimum length 8.", resetPasswordPage.getErrorMessage()); events.expectRequiredAction(EventType.UPDATE_PASSWORD_ERROR).error(Errors.PASSWORD_REJECTED).user(userId).detail(Details.USERNAME, "login-test").assertEvent().getSessionId(); updatePasswordPage.changePassword("resetPasswordWithPasswordPolicy", "resetPasswordWithPasswordPolicy"); events.expectRequiredAction(EventType.UPDATE_PASSWORD).user(userId).detail(Details.USERNAME, "login-test").assertEvent().getSessionId(); assertEquals(RequestType.AUTH_RESPONSE, appPage.getRequestType()); String sessionId = events.expectLogin().user(userId).detail(Details.USERNAME, "login-test").assertEvent().getSessionId(); oauth.openLogout(); events.expectLogout(sessionId).user(userId).session(sessionId).assertEvent(); loginPage.open(); loginPage.login("login-test", "resetPasswordWithPasswordPolicy"); assertEquals(RequestType.AUTH_RESPONSE, appPage.getRequestType()); events.expectLogin().user(userId).detail(Details.USERNAME, "login-test").assertEvent(); } @Test public void resetPasswordWithPasswordHistoryPolicy() throws IOException, MessagingException { //Block passwords that are equal to previous passwords. Default value is 3. setPasswordPolicy("passwordHistory"); try { setTimeOffset(2000000); resetPassword("login-test", "password1"); resetPasswordInvalidPassword("login-test", "password1", "Invalid password: must not be equal to any of last 3 passwords."); setTimeOffset(4000000); resetPassword("login-test", "password2"); resetPasswordInvalidPassword("login-test", "password1", "Invalid password: must not be equal to any of last 3 passwords."); resetPasswordInvalidPassword("login-test", "password2", "Invalid password: must not be equal to any of last 3 passwords."); setTimeOffset(6000000); resetPassword("login-test", "password3"); resetPasswordInvalidPassword("login-test", "password1", "Invalid password: must not be equal to any of last 3 passwords."); resetPasswordInvalidPassword("login-test", "password2", "Invalid password: must not be equal to any of last 3 passwords."); resetPasswordInvalidPassword("login-test", "password3", "Invalid password: must not be equal to any of last 3 passwords."); setTimeOffset(8000000); resetPassword("login-test", "password"); } finally { setTimeOffset(0); } } @Test public void resetPasswordLinkOpenedInNewBrowser() throws IOException, MessagingException { String username = "login-test"; String resetUri = oauth.AUTH_SERVER_ROOT + "/realms/test/login-actions/reset-credentials"; driver.navigate().to(resetUri); resetPasswordPage.assertCurrent(); resetPasswordPage.changePassword(username); log.info("Should be at login page again."); loginPage.assertCurrent(); assertEquals("You should receive an email shortly with further instructions.", loginPage.getSuccessMessage()); events.expectRequiredAction(EventType.SEND_RESET_PASSWORD) .user(userId) .detail(Details.REDIRECT_URI, oauth.AUTH_SERVER_ROOT + "/realms/test/account/") .client("account") .detail(Details.USERNAME, username) .detail(Details.EMAIL, "login@test.com") .session((String)null) .assertEvent(); assertEquals(1, greenMail.getReceivedMessages().length); MimeMessage message = greenMail.getReceivedMessages()[0]; String changePasswordUrl = getPasswordResetEmailLink(message); log.debug("Going to reset password URI."); driver.navigate().to(resetUri); // This is necessary to delete KC_RESTART cookie that is restricted to /auth/realms/test path log.debug("Removing cookies."); driver.manage().deleteAllCookies(); log.debug("Going to URI from e-mail."); driver.navigate().to(changePasswordUrl.trim()); // System.out.println(driver.getPageSource()); updatePasswordPage.assertCurrent(); updatePasswordPage.changePassword("resetPassword", "resetPassword"); infoPage.assertCurrent(); assertEquals("Your account has been updated.", infoPage.getInfo()); } public static String getPasswordResetEmailLink(MimeMessage message) throws IOException, MessagingException { Multipart multipart = (Multipart) message.getContent(); final String textContentType = multipart.getBodyPart(0).getContentType(); assertEquals("text/plain; charset=UTF-8", textContentType); final String textBody = (String) multipart.getBodyPart(0).getContent(); final String textChangePwdUrl = MailUtils.getLink(textBody); final String htmlContentType = multipart.getBodyPart(1).getContentType(); assertEquals("text/html; charset=UTF-8", htmlContentType); final String htmlBody = (String) multipart.getBodyPart(1).getContent(); final String htmlChangePwdUrl = MailUtils.getLink(htmlBody); assertEquals(htmlChangePwdUrl, textChangePwdUrl); return htmlChangePwdUrl; } }
package livefyre.activities; import android.content.DialogInterface; import android.content.Intent; import android.os.Bundle; import android.support.v7.widget.Toolbar; import android.text.Html; import android.util.Log; import android.view.View; import android.widget.EditText; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ProgressBar; import android.widget.RatingBar; import android.widget.RelativeLayout; import android.widget.TextView; import com.livefyre.streamhub_android_sdk.network.WriteClient; import com.livefyre.streamhub_android_sdk.util.LFSConstants; import com.loopj.android.http.JsonHttpResponseHandler; import com.squareup.picasso.Callback; import com.squareup.picasso.Picasso; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.net.MalformedURLException; import java.util.ArrayList; import java.util.HashMap; import cz.msebera.android.httpclient.Header; import io.filepicker.Filepicker; import io.filepicker.models.FPFile; import livefyre.BaseActivity; import livefyre.LFSConfig; import livefyre.R; /** * Created by kvanadev5 on 16/06/15. */ public class NewReviewActivity extends BaseActivity { private EditText newReviewTitleEt, newReviewProsEt, newReviewConsEt, newReviewBodyEt; private TextView activityTitle, actionTv; private ImageView capturedImage; private RelativeLayout deleteCapturedImage; private RatingBar newReviewRatingBar; private ProgressBar progressBar; private LinearLayout addPhotoLL; volatile Toolbar toolbar; private JSONObject imgObj; volatile String imgUrl; private void buildToolBar() { toolbar = (Toolbar) findViewById(R.id.app_bar); //disable title on toolbar // getSupportActionBar().setDisplayShowTitleEnabled(false); ImageView homeIcon = (ImageView) findViewById(R.id.activityIcon); activityTitle = (TextView) findViewById(R.id.activityTitle); actionTv = (TextView) findViewById(R.id.actionTv); homeIcon.setBackgroundResource(R.mipmap.livefyreflame); activityTitle.setText("New Review"); actionTv.setText("Post"); } private void pullViews() { newReviewTitleEt = (EditText) findViewById(R.id.newReviewTitleEt); newReviewProsEt = (EditText) findViewById(R.id.newReviewProsEt); newReviewConsEt = (EditText) findViewById(R.id.newReviewConsEt); newReviewBodyEt = (EditText) findViewById(R.id.newReviewBodyEt); newReviewRatingBar = (RatingBar) findViewById(R.id.newReviewRatingBar); capturedImage = (ImageView) findViewById(R.id.capturedImage); deleteCapturedImage = (RelativeLayout) findViewById(R.id.deleteCapturedImage); progressBar = (ProgressBar) findViewById(R.id.progressBar); addPhotoLL = (LinearLayout) findViewById(R.id.addPhotoLL); } private void setListenersToViews() { addPhotoLL.setOnClickListener(captureImageListener); deleteCapturedImage.setOnClickListener(deleteCapturedImageListener); actionTv.setOnClickListener(postReviewListener); } private View.OnClickListener postReviewListener = new View.OnClickListener() { public void onClick(View v) { String title = newReviewTitleEt.getText().toString(); String description = newReviewBodyEt.getText().toString(); String pros = newReviewProsEt.getText().toString(); String cons = newReviewConsEt.getText().toString(); int reviewRating = (int) (newReviewRatingBar.getRating() * 20); if (title.length() == 0) { ((EditText) findViewById(R.id.newReviewTitleEt)).setError("Enter Title"); return; } if (reviewRating == 0) { showAlert("Please give Rating.", "ok", tryAgain); return; } if (pros.length() == 0) { ((EditText) findViewById(R.id.newReviewProsEt)).setError("Enter Pros"); return; } if (cons.length() == 0) { ((EditText) findViewById(R.id.newReviewConsEt)).setError("Enter Cons"); return; } if (description.length() == 0) { ((EditText) findViewById(R.id.newReviewProsEt)).setError("Enter Description"); return; } String descriptionHTML = Html.toHtml(newReviewBodyEt.getText()); if (pros.length() > 0 || cons.length() > 0) { descriptionHTML = "<p><b>Pro</b><p>" + Html.toHtml(newReviewProsEt.getText()) + "</p></p>" + "<p><b>Cons</b><p>" + Html.toHtml(newReviewConsEt.getText()) + "</p></p>" + " <p><b>Description</b><p>" + descriptionHTML + "</p></p>"; } postNewReview(newReviewTitleEt.getText().toString(), descriptionHTML, reviewRating); } }; private DialogInterface.OnClickListener tryAgain = new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface arg0, int arg1) { } }; private void postNewReview(String title, String body, int reviewRating) { if (!isNetworkAvailable()) { showToast("Network Not Available"); return; } showProgressDialog(); HashMap<String, Object> parameters = new HashMap(); parameters.put(LFSConstants.LFSPostBodyKey, body); parameters.put(LFSConstants.LFSPostTitleKey, title); parameters.put(LFSConstants.LFSPostTypeReview, reviewRating); parameters .put(LFSConstants.LFSPostType, LFSConstants.LFSPostTypeReview); parameters.put(LFSConstants.LFSPostUserTokenKey, LFSConfig.USER_TOKEN); if (imgObj != null) parameters.put(LFSConstants.LFSPostAttachment, (new JSONArray().put(imgObj)).toString()); try { WriteClient.postContent( LFSConfig.COLLECTION_ID, null, LFSConfig.USER_TOKEN, parameters, new writeclientCallback()); } catch (MalformedURLException e) { e.printStackTrace(); } } private class writeclientCallback extends JsonHttpResponseHandler { @Override public void onSuccess(int statusCode, Header[] headers, JSONObject response) { super.onSuccess(statusCode, headers, response); dismissProgressDialog(); showAlert("Review Posted Successfully.", "OK", null); } @Override public void onSuccess(int statusCode, Header[] headers, JSONArray response) { super.onSuccess(statusCode, headers, response); dismissProgressDialog(); showAlert("Review Posted Successfully.", "OK", null); } @Override public void onSuccess(int statusCode, Header[] headers, String responseString) { super.onSuccess(statusCode, headers, responseString); dismissProgressDialog(); showAlert("Review Posted Successfully.", "OK", null); } @Override public void onFailure(int statusCode, Header[] headers, Throwable throwable, JSONObject errorResponse) { super.onFailure(statusCode, headers, throwable, errorResponse); dismissProgressDialog(); Log.d("data error", "" + errorResponse); try { if (!errorResponse.isNull("msg")) { showAlert(errorResponse.getString("msg"), "OK", null); } else { showAlert("Something went wrong.", "OK", null); } } catch (JSONException e) { e.printStackTrace(); showAlert("Something went wrong.", "OK", null); } } } private View.OnClickListener captureImageListener = new View.OnClickListener() { public void onClick(View v) { Intent intent = new Intent(NewReviewActivity.this, Filepicker.class); Filepicker.setKey(LFSConfig.FILEPICKER_API_KEY); startActivityForResult(intent, Filepicker.REQUEST_CODE_GETFILE); } }; private View.OnClickListener deleteCapturedImageListener = new View.OnClickListener() { public void onClick(View v) { addPhotoLL.setVisibility(View.VISIBLE); capturedImage.setVisibility(View.GONE); deleteCapturedImage.setVisibility(View.GONE); imgUrl = ""; imgObj = null; } }; // Dialog Listeners private DialogInterface.OnClickListener selectImageDialogAction = new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface arg0, int arg1) { Intent intent = new Intent(NewReviewActivity.this, Filepicker.class); Filepicker.setKey(LFSConfig.FILEPICKER_API_KEY); startActivityForResult(intent, Filepicker.REQUEST_CODE_GETFILE); } }; @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == Filepicker.REQUEST_CODE_GETFILE) { if (resultCode != RESULT_OK) { showAlert("No Image Selected.", "SELECT IMAGE", selectImageDialogAction); addPhotoLL.setVisibility(View.VISIBLE); capturedImage.setVisibility(View.GONE); deleteCapturedImage.setVisibility(View.GONE); return; } addPhotoLL.setVisibility(View.GONE); capturedImage.setVisibility(View.VISIBLE); deleteCapturedImage.setVisibility(View.VISIBLE); ArrayList<FPFile> fpFiles = data.getParcelableArrayListExtra(Filepicker.FPFILES_EXTRA); String imgUrl = fpFiles.get(0).getUrl(); Log.d("url", imgUrl + ""); try { imgObj = new JSONObject(); imgObj.put("link", imgUrl); imgObj.put("provider_name", "LivefyreFilePicker"); imgObj.put("thumbnail_url", imgUrl); imgObj.put("type", "photo"); imgObj.put("url", imgUrl); try { progressBar.setVisibility(View.VISIBLE); Picasso.with(getBaseContext()).load(imgUrl).fit().into(capturedImage, new ImageLoadCallBack()); } catch (Exception e) { e.printStackTrace(); } } catch (JSONException e) { e.printStackTrace(); } } } private class ImageLoadCallBack implements Callback { @Override public void onSuccess() { progressBar.setVisibility(View.GONE); } @Override public void onError() { //Hide } } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_new_review); pullViews(); buildToolBar(); setListenersToViews(); } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.ide.customize; import com.intellij.ide.startup.StartupActionScriptManager; import com.intellij.idea.SplashManager; import com.intellij.idea.StartupUtil; import com.intellij.openapi.application.ApplicationNamesInfo; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.ui.JBCardLayout; import com.intellij.ui.components.JBLabel; import com.intellij.util.ui.JBUI; import org.jetbrains.annotations.Contract; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; import java.util.List; public class CustomizeIDEWizardDialog extends DialogWrapper implements ActionListener { private static final String BUTTONS = "BUTTONS"; private static final String NO_BUTTONS = "NO_BUTTONS"; private final JButton mySkipButton = new JButton("Skip Remaining and Set Defaults"); private final JButton myBackButton = new JButton("Back"); private final JButton myNextButton = new JButton("Next"); private final JBCardLayout myCardLayout = new JBCardLayout(); private final List<AbstractCustomizeWizardStep> mySteps = new ArrayList<>(); private int myIndex = 0; private final JBLabel myNavigationLabel = new JBLabel(); private final JBLabel myHeaderLabel = new JBLabel(); private final JBLabel myFooterLabel = new JBLabel(); private final CardLayout myButtonWrapperLayout = new CardLayout(); private final JPanel myButtonWrapper = new JPanel(myButtonWrapperLayout); private JPanel myContentPanel; private final boolean myHideSkipButton; public CustomizeIDEWizardDialog(@NotNull CustomizeIDEWizardStepsProvider stepsProvider) { this(stepsProvider, null, true, true); } public CustomizeIDEWizardDialog(@NotNull CustomizeIDEWizardStepsProvider stepsProvider, @Nullable StartupUtil.AppStarter appStarter, boolean beforeSplash, boolean afterSplash) { super(null, true, true); setTitle("Customize " + ApplicationNamesInfo.getInstance().getFullProductName()); getPeer().setAppIcons(); if (beforeSplash) stepsProvider.initSteps(this, mySteps); if (afterSplash) stepsProvider.initStepsAfterSplash(this, mySteps); if (appStarter != null) { int newIndex = appStarter.customizeIdeWizardDialog(mySteps); if (newIndex != -1) { myIndex = newIndex; } } myHideSkipButton = (mySteps.size() <= 1) || stepsProvider.hideSkipButton(); if (mySteps.isEmpty()) { close(CANCEL_EXIT_CODE); return; } mySkipButton.addActionListener(this); myBackButton.addActionListener(this); myNextButton.addActionListener(this); AbstractCustomizeWizardStep.applyHeaderFooterStyle(myNavigationLabel); AbstractCustomizeWizardStep.applyHeaderFooterStyle(myHeaderLabel); AbstractCustomizeWizardStep.applyHeaderFooterStyle(myFooterLabel); init(); initCurrentStep(true); setSize(400, 300); System.setProperty(StartupActionScriptManager.STARTUP_WIZARD_MODE, "true"); } @Override public final void show() { if (mySteps.isEmpty()) { throw new IllegalStateException("no steps provided"); // use showIfNeeded() instead } CustomizeIDEWizardInteractions.INSTANCE.record(CustomizeIDEWizardInteractionType.WizardDisplayed); SplashManager.executeWithHiddenSplash(getWindow(), () -> super.show()); } public final boolean showIfNeeded() { boolean willBeShown = !mySteps.isEmpty() && !isDisposed(); if (willBeShown) { show(); } return willBeShown; } @Override protected void dispose() { System.clearProperty(StartupActionScriptManager.STARTUP_WIZARD_MODE); super.dispose(); } @Override protected JComponent createCenterPanel() { JPanel result = new JPanel(new BorderLayout(5, 5)); myContentPanel = new JPanel(myCardLayout); for (AbstractCustomizeWizardStep step : mySteps) { myContentPanel.add(step, step.getTitle()); } JPanel topPanel = new JPanel(new BorderLayout(5, 5)); if (mySteps.size() > 1) { topPanel.add(myNavigationLabel, BorderLayout.NORTH); } topPanel.add(myHeaderLabel, BorderLayout.CENTER); result.add(topPanel, BorderLayout.NORTH); result.add(myContentPanel, BorderLayout.CENTER); result.add(myFooterLabel, BorderLayout.SOUTH); result.setPreferredSize(JBUI.size(700, 600)); result.setBorder(AbstractCustomizeWizardStep.createSmallEmptyBorder()); return result; } @Override protected JComponent createSouthPanel() { final JPanel buttonPanel = new JPanel(new GridBagLayout()); GridBagConstraints gbc = new GridBagConstraints(); gbc.insets.right = 5; gbc.fill = GridBagConstraints.BOTH; gbc.gridx = 0; gbc.gridy = 0; if (!myHideSkipButton) buttonPanel.add(mySkipButton, gbc); gbc.gridx++; buttonPanel.add(myBackButton, gbc); gbc.gridx++; gbc.weightx = 1; buttonPanel.add(Box.createHorizontalGlue(), gbc); gbc.gridx++; gbc.weightx = 0; buttonPanel.add(myNextButton, gbc); buttonPanel.setBorder(BorderFactory.createEmptyBorder(8, 0, 0, 0)); myButtonWrapper.add(buttonPanel, BUTTONS); myButtonWrapper.add(new JLabel(), NO_BUTTONS); myButtonWrapperLayout.show(myButtonWrapper, BUTTONS); return myButtonWrapper; } void setButtonsVisible(boolean visible) { myButtonWrapperLayout.show(myButtonWrapper, visible ? BUTTONS : NO_BUTTONS); } @Override public void actionPerformed(@NotNull ActionEvent e) { if (e.getSource() == mySkipButton) { CustomizeIDEWizardInteractions.INSTANCE.setSkippedOnPage(myIndex); doOKAction(); return; } if (e.getSource() == myBackButton) { myIndex--; initCurrentStep(false); return; } if (e.getSource() == myNextButton) { if (myIndex >= mySteps.size() - 1) { doOKAction(); return; } myIndex++; initCurrentStep(true); } } @Nullable @Override protected ActionListener createCancelAction() { return null;//Prevent closing by <Esc> } @Override public void doCancelAction() { doOKAction(); } @Override protected void doOKAction() { for (AbstractCustomizeWizardStep step : mySteps) { if (!step.beforeOkAction()) { int index = mySteps.indexOf(step); if (myIndex != index) { myIndex = index; initCurrentStep(true); } return; } } super.doOKAction(); } @Override protected boolean canRecordDialogId() { return false; } private void initCurrentStep(boolean forward) { final AbstractCustomizeWizardStep myCurrentStep = mySteps.get(myIndex); myCurrentStep.beforeShown(forward); myCardLayout.swipe(myContentPanel, myCurrentStep.getTitle(), JBCardLayout.SwipeDirection.AUTO, () -> { Component component = myCurrentStep.getDefaultFocusedComponent(); if (component != null) { IdeFocusManager.getGlobalInstance().doWhenFocusSettlesDown(() -> IdeFocusManager.getGlobalInstance().requestFocus(component, true)); } }); myBackButton.setVisible(myIndex > 0); if (myIndex > 0) { myBackButton.setText("Back to " + mySteps.get(myIndex - 1).getTitle()); } myNextButton.setText(myIndex < mySteps.size() - 1 ? "Next: " + mySteps.get(myIndex + 1).getTitle() : "Start using " + ApplicationNamesInfo.getInstance().getFullProductName()); myHeaderLabel.setText(ensureHTML(myCurrentStep.getHTMLHeader())); myFooterLabel.setText(ensureHTML(myCurrentStep.getHTMLFooter())); if (mySteps.size() > 1) { StringBuilder navHTML = new StringBuilder("<html><body>"); String arrow = myNavigationLabel.getFont().canDisplay(0x2192) ? "&#8594;" : "&gt;"; for (int i = 0; i < mySteps.size(); i++) { if (i > 0) navHTML.append("&nbsp;").append(arrow).append("&nbsp;"); if (i == myIndex) navHTML.append("<b>"); navHTML.append(mySteps.get(i).getTitle()); if (i == myIndex) navHTML.append("</b>"); } myNavigationLabel.setText(navHTML.toString()); } } @Contract("!null->!null") private static String ensureHTML(@Nullable String s) { return s == null ? null : s.startsWith("<html>") ? s : "<html>" + StringUtil.escapeXmlEntities(s) + "</html>"; } }
/*----------------------------------------------------------------------------*/ /* Copyright (c) FIRST 2008-2017. All Rights Reserved. */ /* Open Source Software - may be modified and shared by FRC teams. The code */ /* must be accompanied by the FIRST BSD license file in the root directory of */ /* the project. */ /*----------------------------------------------------------------------------*/ package edu.wpi.first.wpilibj; import java.nio.ByteBuffer; import java.nio.ByteOrder; import edu.wpi.first.wpilibj.hal.FRCNetComm.tInstances; import edu.wpi.first.wpilibj.hal.FRCNetComm.tResourceType; import edu.wpi.first.wpilibj.hal.HAL; import edu.wpi.first.wpilibj.interfaces.Accelerometer; import edu.wpi.first.wpilibj.livewindow.LiveWindow; import edu.wpi.first.wpilibj.livewindow.LiveWindowSendable; import edu.wpi.first.wpilibj.tables.ITable; /** * ADXL345 SPI Accelerometer. */ @SuppressWarnings({"TypeName", "PMD.UnusedPrivateField"}) public class ADXL345_SPI extends SensorBase implements Accelerometer, LiveWindowSendable { private static final int kPowerCtlRegister = 0x2D; private static final int kDataFormatRegister = 0x31; private static final int kDataRegister = 0x32; private static final double kGsPerLSB = 0.00390625; private static final int kAddress_Read = 0x80; private static final int kAddress_MultiByte = 0x40; private static final int kPowerCtl_Link = 0x20; private static final int kPowerCtl_AutoSleep = 0x10; private static final int kPowerCtl_Measure = 0x08; private static final int kPowerCtl_Sleep = 0x04; private static final int kDataFormat_SelfTest = 0x80; private static final int kDataFormat_SPI = 0x40; private static final int kDataFormat_IntInvert = 0x20; private static final int kDataFormat_FullRes = 0x08; private static final int kDataFormat_Justify = 0x04; public enum Axes { kX((byte) 0x00), kY((byte) 0x02), kZ((byte) 0x04); /** * The integer value representing this enumeration. */ @SuppressWarnings("MemberName") public final byte value; private Axes(byte value) { this.value = value; } } @SuppressWarnings("MemberName") public static class AllAxes { public double XAxis; public double YAxis; public double ZAxis; } protected SPI m_spi; /** * Constructor. * * @param port The SPI port that the accelerometer is connected to * @param range The range (+ or -) that the accelerometer will measure. */ public ADXL345_SPI(SPI.Port port, Range range) { m_spi = new SPI(port); init(range); LiveWindow.addSensor("ADXL345_SPI", port.value, this); } public void free() { m_spi.free(); } /** * Set SPI bus parameters, bring device out of sleep and set format. * * @param range The range (+ or -) that the accelerometer will measure. */ private void init(Range range) { m_spi.setClockRate(500000); m_spi.setMSBFirst(); m_spi.setSampleDataOnFalling(); m_spi.setClockActiveLow(); m_spi.setChipSelectActiveHigh(); // Turn on the measurements byte[] commands = new byte[2]; commands[0] = kPowerCtlRegister; commands[1] = kPowerCtl_Measure; m_spi.write(commands, 2); setRange(range); HAL.report(tResourceType.kResourceType_ADXL345, tInstances.kADXL345_SPI); } @Override public void setRange(Range range) { final byte value; switch (range) { case k2G: value = 0; break; case k4G: value = 1; break; case k8G: value = 2; break; case k16G: value = 3; break; default: throw new IllegalArgumentException(range + " unsupported"); } // Specify the data format to read byte[] commands = new byte[]{kDataFormatRegister, (byte) (kDataFormat_FullRes | value)}; m_spi.write(commands, commands.length); } @Override public double getX() { return getAcceleration(Axes.kX); } @Override public double getY() { return getAcceleration(Axes.kY); } @Override public double getZ() { return getAcceleration(Axes.kZ); } /** * Get the acceleration of one axis in Gs. * * @param axis The axis to read from. * @return Acceleration of the ADXL345 in Gs. */ public double getAcceleration(ADXL345_SPI.Axes axis) { ByteBuffer transferBuffer = ByteBuffer.allocateDirect(3); transferBuffer.put(0, (byte) ((kAddress_Read | kAddress_MultiByte | kDataRegister) + axis.value)); m_spi.transaction(transferBuffer, transferBuffer, 3); // Sensor is little endian transferBuffer.order(ByteOrder.LITTLE_ENDIAN); return transferBuffer.getShort(1) * kGsPerLSB; } /** * Get the acceleration of all axes in Gs. * * @return An object containing the acceleration measured on each axis of the ADXL345 in Gs. */ public ADXL345_SPI.AllAxes getAccelerations() { ADXL345_SPI.AllAxes data = new ADXL345_SPI.AllAxes(); if (m_spi != null) { ByteBuffer dataBuffer = ByteBuffer.allocateDirect(7); // Select the data address. dataBuffer.put(0, (byte) (kAddress_Read | kAddress_MultiByte | kDataRegister)); m_spi.transaction(dataBuffer, dataBuffer, 7); // Sensor is little endian... swap bytes dataBuffer.order(ByteOrder.LITTLE_ENDIAN); data.XAxis = dataBuffer.getShort(1) * kGsPerLSB; data.YAxis = dataBuffer.getShort(3) * kGsPerLSB; data.ZAxis = dataBuffer.getShort(5) * kGsPerLSB; } return data; } @Override public String getSmartDashboardType() { return "3AxisAccelerometer"; } private ITable m_table; @Override public void initTable(ITable subtable) { m_table = subtable; updateTable(); } @Override public void updateTable() { if (m_table != null) { m_table.putNumber("X", getX()); m_table.putNumber("Y", getY()); m_table.putNumber("Z", getZ()); } } @Override public ITable getTable() { return m_table; } @Override public void startLiveWindowMode() { } @Override public void stopLiveWindowMode() { } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.olingo.client.core.serialization; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import java.io.ByteArrayInputStream; import java.io.InputStream; import org.apache.olingo.client.api.data.ResWrap; import org.apache.olingo.commons.api.data.Entity; import org.apache.olingo.commons.api.data.EntityCollection; import org.junit.Test; public class AtomDeserializerTest { @Test public void emptyInlineEntityOlingo540() throws Exception { final String content = "" + "<entry xmlns=\"http://www.w3.org/2005/Atom\" " + "xmlns:data=\"http://docs.oasis-open.org/odata/ns/data\" " + "xmlns:metadata=\"http://docs.oasis-open.org/odata/ns/metadata\" " + "xmlns:georss=\"http://www.georss.org/georss\" xmlns:gml=\"http://www.opengis.net/gml\" " + "xml:base=\"http://services.odata.org/V3/OData/OData.svc/\">\r\n" + " <id>http://services.odata.org/V3/OData/OData.svc/Products(3)</id>\r\n" + " <category term=\"ODataDemo.Product\" " + "scheme=\"http://docs.oasis-open.org/odata/ns/scheme\" />\r\n" + " \r\n" + " <link rel=\"edit\" title=\"Product\" href=\"Products\" />\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/related/Categories\" " + "type=\"application/atom+xml;type=feed\" title=\"Categories\" href=\"Products(3)/Categories\" />\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/related/Supplier\" " + "type=\"application/atom+xml;type=entry\" title=\"Supplier\" href=\"Products(3)/Supplier\">\r\n" + " <metadata:inline>\r\n" + " </metadata:inline>\r\n" + " </link>\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/related/ProductDetail\"" + " type=\"application/atom+xml;type=entry\" title=\"ProductDetail\" " + "href=\"Products(3)/ProductDetail\" />\r\n" + " <title type=\"text\">Havina Cola</title>\r\n" + " <summary type=\"text\">The Original Key Lime Cola</summary>\r\n" + " <updated>2015-01-26T08:57:02Z</updated>\r\n" + " <author>\r\n" + " <name />\r\n" + " </author>\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/relatedlinks/Categories\" " + "type=\"application/xml\" title=\"Categories\" href=\"Products(3)/$links/Categories\" />\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/relatedlinks/Supplier\" " + "type=\"application/xml\" title=\"Supplier\" href=\"Products(3)/$links/Supplier\" />\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/relatedlinks/ProductDetail\"" + " type=\"application/xml\" title=\"ProductDetail\" href=\"Products(3)/$links/ProductDetail\" />\r\n" + " <content type=\"application/xml\">\r\n" + " <metadata:properties>\r\n" + " <data:ID metadata:type=\"Edm.Int32\">3</data:ID>\r\n" + " <data:ReleaseDate metadata:type=\"Edm.DateTime\">2005-10-01T00:00:00</data:ReleaseDate>\r\n" + " <data:DiscontinuedDate metadata:type=\"Edm.DateTime\">2006-10-01T00:00:00</data:DiscontinuedDate>\r\n" + " <data:Rating metadata:type=\"Edm.Int16\">3</data:Rating>\r\n" + " <data:Price metadata:type=\"Edm.Double\">19.9</data:Price>\r\n" + " </metadata:properties>\r\n" + " </content>\r\n" + " </entry>"; final AtomDeserializer deserializer = new AtomDeserializer(); final InputStream in = new ByteArrayInputStream(content.getBytes("UTF-8")); final ResWrap<Entity> entity = deserializer.toEntity(in); assertNotNull(entity); assertNull(entity.getPayload().getNavigationLink("Supplier").getInlineEntitySet()); } @Test public void filledInlineEntity() throws Exception { final String content = "" + "<entry xmlns=\"http://www.w3.org/2005/Atom\" " + "xmlns:data=\"http://docs.oasis-open.org/odata/ns/data\" " + "xmlns:metadata=\"http://docs.oasis-open.org/odata/ns/metadata\" " + "xmlns:georss=\"http://www.georss.org/georss\" " + "xmlns:gml=\"http://www.opengis.net/gml\" " + "xml:base=\"http://services.odata.org/V4/OData/OData.svc/\">\r\n" + " <id>http://services.odata.org/V4/OData/OData.svc/Products(3)</id>\r\n" + " <category term=\"#ODataDemo.Product\" " + "scheme=\"http://docs.oasis-open.org/odata/ns/scheme\" />\r\n" + " \r\n" + " <link rel=\"edit\" title=\"Product\" href=\"Products\" />\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/related/Categories\" " + "type=\"application/atom+xml;type=feed\" title=\"Categories\" href=\"Products(3)/Categories\" />\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/related/Supplier\" " + "type=\"application/atom+xml;type=entry\" title=\"Supplier\" href=\"Products(3)/Supplier\">\r\n" + " <metadata:inline>\r\n" + " <entry>\r\n" + " <id>http://services.odata.org/V4/OData/OData.svc/Suppliers(0)</id>\r\n" + " <category term=\"ODataDemo.Supplier\" " + "scheme=\"http://docs.oasis-open.org/odata/ns/scheme\" />\r\n" + " <link rel=\"edit\" title=\"Supplier\" href=\"Suppliers(0)\" />\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/related/Products\" " + "type=\"application/atom+xml;type=feed\" title=\"Products\" href=\"Suppliers(0)/Products\" />\r\n" + " <title type=\"text\">Exotic Liquids</title>\r\n" + " <updated>2015-01-26T08:57:02Z</updated>\r\n" + " <author>\r\n" + " <name />\r\n" + " </author>\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/relatedlinks/Products\" " + "type=\"application/xml\" title=\"Products\" href=\"Suppliers(0)/$links/Products\" />\r\n" + " <content type=\"application/xml\">\r\n" + " <metadata:properties>\r\n" + " <data:ID metadata:type=\"Edm.Int32\">0</data:ID>\r\n" + " <data:Name>Exotic Liquids</data:Name>\r\n" + " <data:Address metadata:type=\"ODataDemo.Address\">\r\n" + " <data:Street>NE 228th</data:Street>\r\n" + " <data:City>Sammamish</data:City>\r\n" + " <data:State>WA</data:State>\r\n" + " <data:ZipCode>98074</data:ZipCode>\r\n" + " <data:Country>USA</data:Country>\r\n" + " </data:Address>\r\n" + " <data:Location metadata:type=\"Edm.GeographyPoint\">\r\n" + " <gml:Point gml:srsName=\"http://www.opengis.net/def/crs/EPSG/0/4326\">\r\n" + " <gml:pos>47.6316604614258 -122.03547668457</gml:pos>\r\n" + " </gml:Point>\r\n" + " </data:Location>\r\n" + " <data:Concurrency metadata:type=\"Edm.Int32\">0</data:Concurrency>\r\n" + " </metadata:properties>\r\n" + " </content>\r\n" + " </entry>" + " </metadata:inline>\r\n" + " </link>\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/related/ProductDetail\" " + "type=\"application/atom+xml;type=entry\" " + "title=\"ProductDetail\" href=\"Products(3)/ProductDetail\" />\r\n" + " <title type=\"text\">Havina Cola</title>\r\n" + " <summary type=\"text\">The Original Key Lime Cola</summary>\r\n" + " <updated>2015-01-26T08:57:02Z</updated>\r\n" + " <author>\r\n" + " <name />\r\n" + " </author>\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/relatedlinks/Categories\" " + "type=\"application/xml\" title=\"Categories\" href=\"Products(3)/$links/Categories\" />\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/relatedlinks/Supplier\" " + "type=\"application/xml\" title=\"Supplier\" href=\"Products(3)/$links/Supplier\" />\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/relatedlinks/ProductDetail\" " + "type=\"application/xml\" title=\"ProductDetail\" href=\"Products(3)/$links/ProductDetail\" />\r\n" + " <content type=\"application/xml\">\r\n" + " <metadata:properties>\r\n" + " <data:ID metadata:type=\"Edm.Int32\">3</data:ID>\r\n" + " <data:ReleaseDate metadata:type=\"Edm.DateTime\">2005-10-01T00:00:00</data:ReleaseDate>\r\n" + " <data:DiscontinuedDate metadata:type=\"Edm.DateTime\">2006-10-01T00:00:00</data:DiscontinuedDate>\r\n" + " <data:Rating metadata:type=\"Edm.Int16\">3</data:Rating>\r\n" + " <data:Price metadata:type=\"Edm.Double\">19.9</data:Price>\r\n" + " </metadata:properties>\r\n" + " </content>\r\n" + " </entry>"; final AtomDeserializer deserializer = new AtomDeserializer(); final InputStream in = new ByteArrayInputStream(content.getBytes("UTF-8")); final ResWrap<Entity> entity = deserializer.toEntity(in); assertNotNull(entity); final Entity inlineEntity = entity.getPayload().getNavigationLink("Supplier").getInlineEntity(); assertNotNull(inlineEntity); assertEquals(new Integer(0), inlineEntity.getProperty("ID").getValue()); assertEquals("Exotic Liquids", inlineEntity.getProperty("Name").getValue()); } @Test public void emptyInlineEntityCollection() throws Exception { final String content = "" + "<entry xmlns=\"http://www.w3.org/2005/Atom\" " + "xmlns:data=\"http://docs.oasis-open.org/odata/ns/data\" " + "xmlns:metadata=\"http://docs.oasis-open.org/odata/ns/metadata\" " + "xmlns:georss=\"http://www.georss.org/georss\" xmlns:gml=\"http://www.opengis.net/gml\" " + "xml:base=\"http://services.odata.org/V3/OData/OData.svc/\">\r\n" + " <id>http://services.odata.org/V3/OData/OData.svc/Products(3)</id>\r\n" + " <category term=\"ODataDemo.Product\" " + "scheme=\"http://docs.oasis-open.org/odata/ns/scheme\" />\r\n" + " \r\n" + " <link rel=\"edit\" title=\"Product\" href=\"Products(3)\" />\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/related/Categories\" " + "type=\"application/atom+xml;type=feed\" title=\"Categories\" href=\"Products(3)/Categories\" />\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/related/Supplier\" " + "type=\"application/atom+xml;type=feed\" title=\"Supplier\" href=\"Products(3)/Supplier\">\r\n" + " <metadata:inline>\r\n" + " <feed>\r\n" + " </feed>\r\n" + " </metadata:inline>\r\n" + " </link>\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/related/ProductDetail\" " + "type=\"application/atom+xml;type=entry\" " + "title=\"ProductDetail\" href=\"Products(3)/ProductDetail\" />\r\n" + " <title type=\"text\">Havina Cola</title>\r\n" + " <summary type=\"text\">The Original Key Lime Cola</summary>\r\n" + " <updated>2015-01-26T08:57:02Z</updated>\r\n" + " <author>\r\n" + " <name />\r\n" + " </author>\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/relatedlinks/Categories\" " + "type=\"application/xml\" title=\"Categories\" href=\"Products(3)/$links/Categories\" />\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/relatedlinks/Supplier\" " + "type=\"application/xml\" title=\"Supplier\" href=\"Products(3)/$links/Supplier\" />\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/relatedlinks/ProductDetail\" " + "type=\"application/xml\" title=\"ProductDetail\" href=\"Products(3)/$links/ProductDetail\" />\r\n" + " <content type=\"application/xml\">\r\n" + " <metadata:properties>\r\n" + " <data:ID metadata:type=\"Edm.Int32\">3</data:ID>\r\n" + " <data:ReleaseDate metadata:type=\"Edm.DateTime\">2005-10-01T00:00:00</data:ReleaseDate>\r\n" + " <data:DiscontinuedDate metadata:type=\"Edm.DateTime\">2006-10-01T00:00:00</data:DiscontinuedDate>\r\n" + " <data:Rating metadata:type=\"Edm.Int16\">3</data:Rating>\r\n" + " <data:Price metadata:type=\"Edm.Double\">19.9</data:Price>\r\n" + " </metadata:properties>\r\n" + " </content>\r\n" + " </entry>"; final AtomDeserializer deserializer = new AtomDeserializer(); final InputStream in = new ByteArrayInputStream(content.getBytes("UTF-8")); final ResWrap<Entity> entity = deserializer.toEntity(in); assertNotNull(entity); final EntityCollection inlineEntitySet = entity.getPayload().getNavigationLink("Supplier").getInlineEntitySet(); assertNotNull(inlineEntitySet); assertEquals(0, inlineEntitySet.getEntities().size()); } @Test public void filledInlineEntityCollection() throws Exception { final String content = "" + "<entry xmlns=\"http://www.w3.org/2005/Atom\" " + "xmlns:data=\"http://docs.oasis-open.org/odata/ns/data\" " + "xmlns:metadata=\"http://docs.oasis-open.org/odata/ns/metadata\" " + "xmlns:georss=\"http://www.georss.org/georss\" xmlns:gml=\"http://www.opengis.net/gml\" " + "xml:base=\"http://services.odata.org/V3/OData/OData.svc/\">\r\n" + " <id>http://services.odata.org/V3/OData/OData.svc/Products(3)</id>\r\n" + " <category term=\"ODataDemo.Product\" " + "scheme=\"http://docs.oasis-open.org/odata/ns/scheme\" />\r\n" + " \r\n" + " <link rel=\"edit\" title=\"Product\" href=\"Products(3)\" />\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/related/Categories\" " + "type=\"application/atom+xml;type=feed\" title=\"Categories\" href=\"Products(3)/Categories\" />\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/related/Supplier\" " + "type=\"application/atom+xml;type=feed\" title=\"Supplier\" href=\"Products(3)/Supplier\">\r\n" + " <metadata:inline>\r\n" + " <feed>\r\n" + " <entry>\r\n" + " <id>http://services.odata.org/V3/OData/OData.svc/Suppliers(0)</id>\r\n" + " <category term=\"ODataDemo.Supplier\" " + "scheme=\"http://docs.oasis-open.org/odata/ns/scheme\" />\r\n" + " <link rel=\"edit\" title=\"Supplier\" href=\"Suppliers(0)\" />\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/related/Products\" " + "type=\"application/atom+xml;type=feed\" title=\"Products\" href=\"Suppliers(0)/Products\" />\r\n" + " <title type=\"text\">Exotic Liquids</title>\r\n" + " <updated>2015-01-26T08:57:02Z</updated>\r\n" + " <author>\r\n" + " <name />\r\n" + " </author>\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/relatedlinks/Products\" " + "type=\"application/xml\" title=\"Products\" href=\"Suppliers(0)/$links/Products\" />\r\n" + " <content type=\"application/xml\">\r\n" + " <metadata:properties>\r\n" + " <data:ID metadata:type=\"Edm.Int32\">0</data:ID>\r\n" + " <data:Name>Exotic Liquids</data:Name>\r\n" + " <data:Address metadata:type=\"ODataDemo.Address\">\r\n" + " <data:Street>NE 228th</data:Street>\r\n" + " <data:City>Sammamish</data:City>\r\n" + " <data:State>WA</data:State>\r\n" + " <data:ZipCode>98074</data:ZipCode>\r\n" + " <data:Country>USA</data:Country>\r\n" + " </data:Address>\r\n" + " <data:Location metadata:type=\"Edm.GeographyPoint\">\r\n" + " <gml:Point gml:srsName=\"http://www.opengis.net/def/crs/EPSG/0/4326\">\r\n" + " <gml:pos>47.6316604614258 -122.03547668457</gml:pos>\r\n" + " </gml:Point>\r\n" + " </data:Location>\r\n" + " <data:Concurrency metadata:type=\"Edm.Int32\">0</data:Concurrency>\r\n" + " </metadata:properties>\r\n" + " </content>\r\n" + " </entry>\r\n" + " </feed>\r\n" + " </metadata:inline>\r\n" + " </link>\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/related/ProductDetail\" " + "type=\"application/atom+xml;type=entry\" " + "title=\"ProductDetail\" href=\"Products(3)/ProductDetail\" />\r\n" + " <title type=\"text\">Havina Cola</title>\r\n" + " <summary type=\"text\">The Original Key Lime Cola</summary>\r\n" + " <updated>2015-01-26T08:57:02Z</updated>\r\n" + " <author>\r\n" + " <name />\r\n" + " </author>\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/relatedlinks/Categories\" " + "type=\"application/xml\" title=\"Categories\" href=\"Products(3)/$links/Categories\" />\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/relatedlinks/Supplier\" " + "type=\"application/xml\" title=\"Supplier\" href=\"Products(3)/$links/Supplier\" />\r\n" + " <link rel=\"http://docs.oasis-open.org/odata/ns/relatedlinks/ProductDetail\" " + "type=\"application/xml\" title=\"ProductDetail\" href=\"Products(3)/$links/ProductDetail\" />\r\n" + " <content type=\"application/xml\">\r\n" + " <metadata:properties>\r\n" + " <data:ID metadata:type=\"Edm.Int32\">3</data:ID>\r\n" + " <data:ReleaseDate metadata:type=\"Edm.DateTime\">2005-10-01T00:00:00</data:ReleaseDate>\r\n" + " <data:DiscontinuedDate metadata:type=\"Edm.DateTime\">2006-10-01T00:00:00</data:DiscontinuedDate>\r\n" + " <data:Rating metadata:type=\"Edm.Int16\">3</data:Rating>\r\n" + " <data:Price metadata:type=\"Edm.Double\">19.9</data:Price>\r\n" + " </metadata:properties>\r\n" + " </content>\r\n" + " </entry>"; final AtomDeserializer deserializer = new AtomDeserializer(); final InputStream in = new ByteArrayInputStream(content.getBytes("UTF-8")); final ResWrap<Entity> entity = deserializer.toEntity(in); assertNotNull(entity); final EntityCollection inlineEntitySet = entity.getPayload().getNavigationLink("Supplier").getInlineEntitySet(); assertNotNull(inlineEntitySet); assertEquals(1, inlineEntitySet.getEntities().size()); } }
/******************************************************************************* * <copyright> * * Copyright (c) 2014 Eteration A.S. * All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * * Derivative Works * Parts of this program are derived from content from Eclipse Foundation * that are made available under the terms of the Eclipse Public License v1.0. * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Naci Dai, Eteration A.S. - initial API, implementation and documentation * * </copyright> * *******************************************************************************/ package org.glassmaker.ui; import java.io.File; import java.io.IOException; import java.net.URL; import org.eclipse.core.runtime.FileLocator; import org.eclipse.core.runtime.Path; import org.eclipse.core.runtime.Status; import org.eclipse.jface.resource.ImageDescriptor; import org.eclipse.jface.resource.ImageRegistry; import org.eclipse.jface.text.templates.ContextTypeRegistry; import org.eclipse.jface.text.templates.persistence.TemplateStore; import org.eclipse.ui.editors.text.templates.ContributionContextTypeRegistry; import org.eclipse.ui.editors.text.templates.ContributionTemplateStore; import org.eclipse.ui.plugin.AbstractUIPlugin; import org.glassmaker.ui.editor.CardContextType; import org.osgi.framework.BundleContext; /** * The activator class controls the plug-in life cycle */ public class GlassmakerUIPlugin extends AbstractUIPlugin { // The plug-in ID public static final String PLUGIN_ID = "org.glassmaker.ui.editor"; //$NON-NLS-1$ // The shared instance private static GlassmakerUIPlugin plugin; /** * The template store for the card editor. */ private TemplateStore fTemplateStore; /** * The template context type registry for the card editor. */ private ContextTypeRegistry fContextTypeRegistry; /** * The constructor */ public GlassmakerUIPlugin() { } /* * (non-Javadoc) * * @see * org.eclipse.ui.plugin.AbstractUIPlugin#start(org.osgi.framework.BundleContext * ) */ public void start(BundleContext context) throws Exception { super.start(context); plugin = this; } /* * (non-Javadoc) * * @see * org.eclipse.ui.plugin.AbstractUIPlugin#stop(org.osgi.framework.BundleContext * ) */ public void stop(BundleContext context) throws Exception { plugin = null; super.stop(context); } /** * Returns the shared instance * * @return the shared instance */ public static GlassmakerUIPlugin getDefault() { return plugin; } /** * Returns an image descriptor for the image file at the given plug-in * relative path * * @param path * the path * @return the image descriptor */ public static ImageDescriptor getImageDescriptor(String path) { return imageDescriptorFromPlugin(PLUGIN_ID, path); } protected void initializeImageRegistry(ImageRegistry reg) { super.initializeImageRegistry(reg); reg.put("org.glassmaker.ui.templates.text", ImageDescriptor.createFromURL(getBundle().getEntry("/icons/card_text.gif"))); reg.put("org.glassmaker.ui.templates.autoresize", ImageDescriptor.createFromURL(getBundle().getEntry("/icons/card_autosize.gif"))); reg.put("org.glassmaker.ui.templates.hybrid", ImageDescriptor.createFromURL(getBundle().getEntry("/icons/card_hybrid.gif"))); reg.put("org.glassmaker.ui.templates.hybridmosaic", ImageDescriptor.createFromURL(getBundle().getEntry("/icons/card_hybridmosaic.gif"))); reg.put("org.glassmaker.ui.templates.multipage", ImageDescriptor.createFromURL(getBundle().getEntry("/icons/card_multipage.gif"))); reg.put("org.glassmaker.ui.templates.simpleevent", ImageDescriptor.createFromURL(getBundle().getEntry("/icons/card_simpleevent.gif"))); reg.put("org.glassmaker.ui.templates.list", ImageDescriptor.createFromURL(getBundle().getEntry("/icons/card_list.gif"))); reg.put("org.glassmaker.ui.templates.knowledge", ImageDescriptor.createFromURL(getBundle().getEntry("/icons/card_knowledge.gif"))); reg.put("org.glassmaker.ui.templates.knowledgemosaic", ImageDescriptor.createFromURL(getBundle().getEntry("/icons/card_knowledgemosaic.gif"))); reg.put("org.glassmaker.ui.templates.stock", ImageDescriptor.createFromURL(getBundle().getEntry("/icons/card_stock.gif"))); reg.put("org.glassmaker.ui.templates.sports", ImageDescriptor.createFromURL(getBundle().getEntry("/icons/card_sports.gif"))); reg.put("org.glassmaker.ui.templates.flight", ImageDescriptor.createFromURL(getBundle().getEntry("/icons/card_flight.gif"))); reg.put("org.glassmaker.ui.templates.movie", ImageDescriptor.createFromURL(getBundle().getEntry("/icons/card_movie.gif"))); reg.put("org.glassmaker.ui.templates.transit", ImageDescriptor.createFromURL(getBundle().getEntry("/icons/card_transit.gif"))); reg.put("org.glassmaker.ui.templates.author", ImageDescriptor.createFromURL(getBundle().getEntry("/icons/card_author.gif"))); reg.put("org.glassmaker.ui.templates.simplemessage", ImageDescriptor.createFromURL(getBundle().getEntry("/icons/card_simplemessage.gif"))); } public static void log(int severity, String msg, Throwable t) { plugin.getLog().log(new Status(severity, PLUGIN_ID, msg, t)); } public static void logError(String msg, Exception t) { log(Status.ERROR, msg, t); } public static void logWarning(String msg) { log(Status.WARNING, msg, null); } public static void logInfo(String msg) { log(Status.INFO, msg, null); } public static void logWarning(String msg, Throwable t) { log(Status.WARNING, msg, t); } public static File getPreviewFile(String name) { File previewFile = null; URL previewPath = FileLocator.find(plugin.getBundle(), new Path("/preview"), null); try { previewFile = new File(FileLocator.resolve(previewPath).getPath()+"/"+name); } catch (Exception e) { logError(e.getMessage(), e); } return previewFile; } /** * Returns the template store for the html editor templates. * * @return the template store for the html editor templates */ public TemplateStore getTemplateStore() { if (fTemplateStore == null) { fTemplateStore = new ContributionTemplateStore(getTemplateContextRegistry(), getPreferenceStore(), "org.eclipse.wst.sse.ui.custom_templates"); try { fTemplateStore.load(); } catch (IOException e) { logError("",e); } } return fTemplateStore; } /** * Returns the template context type registry for the html plugin. * * @return the template context type registry for the html plugin */ public ContextTypeRegistry getTemplateContextRegistry() { if (fContextTypeRegistry == null) { ContributionContextTypeRegistry registry = new ContributionContextTypeRegistry(); registry.addContextType(CardContextType.CONTEXT_TYPE); fContextTypeRegistry = registry; } return fContextTypeRegistry; } }
package net.ontopia.presto.spi.impl.mongodb; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import net.ontopia.presto.spi.PrestoField; import net.ontopia.presto.spi.PrestoSchemaProvider; import net.ontopia.presto.spi.PrestoTopic; import net.ontopia.presto.spi.PrestoType; import net.ontopia.presto.spi.jackson.JacksonDataProvider; import net.ontopia.presto.spi.jackson.JacksonTopic; import net.ontopia.presto.spi.utils.Utils; import org.mongojack.DBCursor; import org.mongojack.JacksonDBCollection; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.databind.node.ObjectNode; import com.mongodb.BasicDBList; import com.mongodb.BasicDBObject; import com.mongodb.DB; import com.mongodb.DBCollection; import com.mongodb.Mongo; import com.mongodb.MongoURI; public abstract class MongoDataProvider extends JacksonDataProvider { private static Logger log = LoggerFactory.getLogger(MongoDataProvider.class); private Map<String,Mongo> mongos = new HashMap<String,Mongo>(); private Map<String,JacksonDBCollection<ObjectNode, Object>> collections = new HashMap<String,JacksonDBCollection<ObjectNode, Object>>(); public MongoDataProvider(PrestoSchemaProvider schemaProvider) { super(schemaProvider); } private static final String DEFAULT_MONGO_URI = "mongodb://localhost"; protected Mongo createMongo(String databaseId) { try { return new Mongo(new MongoURI(getMongoURI())); } catch (Exception e) { throw new RuntimeException(e); } } protected String getMongoURI() { return DEFAULT_MONGO_URI; } // -- PrestoDataProvider @Override public String getProviderId() { return "mongodb"; } @Override public PrestoTopic getTopicById(String topicId) { // look up by document id ObjectNode doc = null; if (topicId != null) { doc = findTopicById(topicId); } if (doc == null) { PrestoTopic topic = lazyLoad(topicId); if (topic != null) { return topic; } log.warn("Topic with id '" + topicId + "' not found."); } return existing(doc); } @Override public Collection<PrestoTopic> getTopicsByIds(Collection<String> topicIds) { Collection<PrestoTopic> result = new ArrayList<PrestoTopic>(); aggregateTopicsById(topicIds, result); return includeLazyTopics(result, topicIds); } @Override public Collection<? extends Object> getAvailableFieldValues(PrestoTopic topic, final PrestoField field, String query) { Collection<PrestoType> types = field.getAvailableFieldValueTypes(); if (types.isEmpty()) { return Collections.emptyList(); } List<PrestoTopic> result = new ArrayList<PrestoTopic>(); aggregateTopicsByType(types, result); Collections.sort(result, new Comparator<PrestoTopic>() { @Override public int compare(PrestoTopic o1, PrestoTopic o2) { return Utils.compareComparables(o1.getName(field), o2.getName(field)); } }); return result; } @Override public void close() { for (Mongo mongo : mongos.values()) { try { mongo.close(); } catch (Exception e) { log.warn("Could not close mongo connection: " + mongo, e); } } } // -- DefaultDataProvider @Override public void create(PrestoTopic topic) { ObjectNode data = ((JacksonTopic)topic).getData(); String typeId = topic.getTypeId(); JacksonDBCollection<ObjectNode, Object> collection = getCollectionByTypeId(typeId); String topicId = identityStrategy.generateId(typeId, data); if (topicId != null) { data.put("_id", topicId); } collection.insert(data); } @Override public void update(PrestoTopic topic) { ObjectNode data = ((JacksonTopic)topic).getData(); JacksonDBCollection<ObjectNode, Object> collection = getCollectionByTypeId(topic.getTypeId()); collection.updateById(identityStrategy.externalToInternalTopicId(topic.getId()), data); } @Override public boolean delete(PrestoTopic topic) { JacksonDBCollection<ObjectNode, Object> collection = getCollectionByTypeId(topic.getTypeId()); collection.remove(new BasicDBObject("_id", identityStrategy.externalToInternalTopicId(topic.getId()))); return true; } // -- data collections strategy protected JacksonDBCollection<ObjectNode, Object> getCollectionByKey(String collectionKey) { JacksonDBCollection<ObjectNode, Object> coll = collections.get(collectionKey); if (coll == null) { String databaseId = getDatabaseIdByCollectionKey(collectionKey); Mongo mongo = getMongo(databaseId); DB db = mongo.getDB(databaseId); String collectionId = getCollectionIdByCollectionKey(collectionKey); DBCollection dbCollection = db.getCollection(collectionId); coll = JacksonDBCollection.wrap(dbCollection, ObjectNode.class); collections.put(collectionId, coll); } return coll; } protected Mongo getMongo(String databaseId) { Mongo mongo = mongos.get(databaseId); if (mongo == null) { mongo = createMongo(databaseId); mongos.put(databaseId, mongo); } return mongo; } protected JacksonDBCollection<ObjectNode, Object> getCollectionByTopicId(String topicId) { String collectionKey = getCollectionKeyByTopicId(topicId); return getCollectionByKey(collectionKey); } protected JacksonDBCollection<ObjectNode, Object> getCollectionByTypeId(String typeId) { String collectionKey = getCollectionKeyByTypeId(typeId); return getCollectionByKey(collectionKey); } protected abstract String getCollectionKeyByTopicId(String topicId); protected abstract String getCollectionKeyByTypeId(String typeId); protected abstract String getDatabaseIdByCollectionKey(String collectionKey); protected abstract String getCollectionIdByCollectionKey(String collectionKey); protected ObjectNode findTopicById(String topicId) { JacksonDBCollection<ObjectNode, Object> coll = getCollectionByTopicId(topicId); return coll.findOne(new BasicDBObject("_id", getIdentityStrategy().externalToInternalTopicId(topicId))); } protected void aggregateTopicsById(Collection<String> topicIds, Collection<PrestoTopic> result) { Map<String,List<String>> collectionKeys = new HashMap<String,List<String>>(); for (String topicId : topicIds) { String collectionKey = getCollectionKeyByTopicId(topicId); List<String> partitionedTopicIds = collectionKeys.get(collectionKey); if (partitionedTopicIds == null) { partitionedTopicIds = new ArrayList<String>(); collectionKeys.put(collectionKey, partitionedTopicIds); } partitionedTopicIds.add(topicId); } for (String collectionKey : collectionKeys.keySet()) { JacksonDBCollection<ObjectNode, Object> coll = getCollectionByKey(collectionKey); List<String> partitionedTopicIds = collectionKeys.get(collectionKey); aggregateResult(coll.find(new BasicDBObject("_id", new BasicDBObject("$in", getIdentityStrategy().externalToInternalTopicIds(partitionedTopicIds)))), result); } } protected void aggregateTopicsByType(Collection<PrestoType> types, Collection<PrestoTopic> result) { Map<String,BasicDBList> collectionKeys = new HashMap<String,BasicDBList>(); for (PrestoType type : types) { String typeId = type.getId(); String collectionKey = getCollectionKeyByTypeId(typeId); BasicDBList partitionedTypeIds = collectionKeys.get(collectionKey); if (partitionedTypeIds == null) { partitionedTypeIds = new BasicDBList(); collectionKeys.put(collectionKey, partitionedTypeIds); } partitionedTypeIds.add(typeId); } for (String collectionKey : collectionKeys.keySet()) { JacksonDBCollection<ObjectNode, Object> coll = getCollectionByKey(collectionKey); BasicDBList partitionedTypeIds = collectionKeys.get(collectionKey); aggregateResult(coll.find(new BasicDBObject(":type", new BasicDBObject("$in", partitionedTypeIds))), result); } } protected void aggregateResult(DBCursor<ObjectNode> cursor, Collection<PrestoTopic> result) { try { for (ObjectNode docNode : cursor) { if (docNode.isObject()) { result.add(existing(docNode)); } } } finally { cursor.close(); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.bucket.range; import com.google.common.collect.Lists; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.util.InPlaceMergeSorter; import org.elasticsearch.common.Nullable; import org.elasticsearch.index.fielddata.DoubleValues; import org.elasticsearch.search.aggregations.*; import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.format.ValueFormat; import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import org.elasticsearch.search.aggregations.support.format.ValueParser; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; import java.util.List; /** * */ public class RangeAggregator extends BucketsAggregator { public static class Range { public String key; public double from = Double.NEGATIVE_INFINITY; String fromAsStr; public double to = Double.POSITIVE_INFINITY; String toAsStr; public Range(String key, double from, String fromAsStr, double to, String toAsStr) { this.key = key; this.from = from; this.fromAsStr = fromAsStr; this.to = to; this.toAsStr = toAsStr; } boolean matches(double value) { return value >= from && value < to; } @Override public String toString() { return "[" + from + " to " + to + ")"; } public void process(ValueParser parser, SearchContext context) { assert parser != null; if (fromAsStr != null) { from = parser.parseDouble(fromAsStr, context); } if (toAsStr != null) { to = parser.parseDouble(toAsStr, context); } } } private final ValuesSource.Numeric valuesSource; private final @Nullable ValueFormatter formatter; private final Range[] ranges; private final boolean keyed; private final InternalRange.Factory rangeFactory; private DoubleValues values; final double[] maxTo; public RangeAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, @Nullable ValueFormat format, InternalRange.Factory rangeFactory, List<Range> ranges, boolean keyed, AggregationContext aggregationContext, Aggregator parent) { super(name, BucketAggregationMode.MULTI_BUCKETS, factories, ranges.size() * (parent == null ? 1 : parent.estimatedBucketCount()), aggregationContext, parent); assert valuesSource != null; this.valuesSource = valuesSource; this.formatter = format != null ? format.formatter() : null; this.keyed = keyed; this.rangeFactory = rangeFactory; this.ranges = ranges.toArray(new Range[ranges.size()]); ValueParser parser = format != null ? format.parser() : ValueParser.RAW; for (int i = 0; i < this.ranges.length; i++) { this.ranges[i].process(parser, context.searchContext()); } sortRanges(this.ranges); maxTo = new double[this.ranges.length]; maxTo[0] = this.ranges[0].to; for (int i = 1; i < this.ranges.length; ++i) { maxTo[i] = Math.max(this.ranges[i].to,maxTo[i-1]); } } @Override public boolean shouldCollect() { return true; } @Override public void setNextReader(AtomicReaderContext reader) { values = valuesSource.doubleValues(); } private final long subBucketOrdinal(long owningBucketOrdinal, int rangeOrd) { return owningBucketOrdinal * ranges.length + rangeOrd; } @Override public void collect(int doc, long owningBucketOrdinal) throws IOException { final int valuesCount = values.setDocument(doc); for (int i = 0, lo = 0; i < valuesCount; ++i) { final double value = values.nextValue(); lo = collect(doc, value, owningBucketOrdinal, lo); } } private int collect(int doc, double value, long owningBucketOrdinal, int lowBound) throws IOException { int lo = lowBound, hi = ranges.length - 1; // all candidates are between these indexes int mid = (lo + hi) >>> 1; while (lo <= hi) { if (value < ranges[mid].from) { hi = mid - 1; } else if (value >= maxTo[mid]) { lo = mid + 1; } else { break; } mid = (lo + hi) >>> 1; } if (lo > hi) return lo; // no potential candidate // binary search the lower bound int startLo = lo, startHi = mid; while (startLo <= startHi) { final int startMid = (startLo + startHi) >>> 1; if (value >= maxTo[startMid]) { startLo = startMid + 1; } else { startHi = startMid - 1; } } // binary search the upper bound int endLo = mid, endHi = hi; while (endLo <= endHi) { final int endMid = (endLo + endHi) >>> 1; if (value < ranges[endMid].from) { endHi = endMid - 1; } else { endLo = endMid + 1; } } assert startLo == lowBound || value >= maxTo[startLo - 1]; assert endHi == ranges.length - 1 || value < ranges[endHi + 1].from; for (int i = startLo; i <= endHi; ++i) { if (ranges[i].matches(value)) { collectBucket(doc, subBucketOrdinal(owningBucketOrdinal, i)); } } return endHi + 1; } @Override public InternalAggregation buildAggregation(long owningBucketOrdinal) { List<org.elasticsearch.search.aggregations.bucket.range.Range.Bucket> buckets = Lists.newArrayListWithCapacity(ranges.length); for (int i = 0; i < ranges.length; i++) { Range range = ranges[i]; final long bucketOrd = subBucketOrdinal(owningBucketOrdinal, i); org.elasticsearch.search.aggregations.bucket.range.Range.Bucket bucket = rangeFactory.createBucket(range.key, range.from, range.to, bucketDocCount(bucketOrd),bucketAggregations(bucketOrd), formatter); buckets.add(bucket); } // value source can be null in the case of unmapped fields return rangeFactory.create(name, buckets, formatter, keyed); } @Override public InternalAggregation buildEmptyAggregation() { InternalAggregations subAggs = buildEmptySubAggregations(); List<org.elasticsearch.search.aggregations.bucket.range.Range.Bucket> buckets = Lists.newArrayListWithCapacity(ranges.length); for (int i = 0; i < ranges.length; i++) { Range range = ranges[i]; org.elasticsearch.search.aggregations.bucket.range.Range.Bucket bucket = rangeFactory.createBucket(range.key, range.from, range.to, 0, subAggs, formatter); buckets.add(bucket); } // value source can be null in the case of unmapped fields return rangeFactory.create(name, buckets, formatter, keyed); } private static final void sortRanges(final Range[] ranges) { new InPlaceMergeSorter() { @Override protected void swap(int i, int j) { final Range tmp = ranges[i]; ranges[i] = ranges[j]; ranges[j] = tmp; } @Override protected int compare(int i, int j) { int cmp = Double.compare(ranges[i].from, ranges[j].from); if (cmp == 0) { cmp = Double.compare(ranges[i].to, ranges[j].to); } return cmp; } }.sort(0, ranges.length); } public static class Unmapped extends NonCollectingAggregator { private final List<RangeAggregator.Range> ranges; private final boolean keyed; private final InternalRange.Factory factory; private final ValueFormatter formatter; public Unmapped(String name, List<RangeAggregator.Range> ranges, boolean keyed, ValueFormat format, AggregationContext context, Aggregator parent, InternalRange.Factory factory) { super(name, context, parent); this.ranges = ranges; ValueParser parser = format != null ? format.parser() : ValueParser.RAW; for (Range range : this.ranges) { range.process(parser, context.searchContext()); } this.keyed = keyed; this.formatter = format != null ? format.formatter() : null; this.factory = factory; } @Override public InternalAggregation buildEmptyAggregation() { InternalAggregations subAggs = buildEmptySubAggregations(); List<org.elasticsearch.search.aggregations.bucket.range.Range.Bucket> buckets = new ArrayList<>(ranges.size()); for (RangeAggregator.Range range : ranges) { buckets.add(factory.createBucket(range.key, range.from, range.to, 0, subAggs, formatter)); } return factory.create(name, buckets, formatter, keyed); } } public static class Factory extends ValuesSourceAggregatorFactory<ValuesSource.Numeric> { private final InternalRange.Factory rangeFactory; private final List<Range> ranges; private final boolean keyed; public Factory(String name, ValuesSourceConfig<ValuesSource.Numeric> valueSourceConfig, InternalRange.Factory rangeFactory, List<Range> ranges, boolean keyed) { super(name, rangeFactory.type(), valueSourceConfig); this.rangeFactory = rangeFactory; this.ranges = ranges; this.keyed = keyed; } @Override protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent) { return new Unmapped(name, ranges, keyed, config.format(), aggregationContext, parent, rangeFactory); } @Override protected Aggregator create(ValuesSource.Numeric valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) { return new RangeAggregator(name, factories, valuesSource, config.format(), rangeFactory, ranges, keyed, aggregationContext, parent); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; import java.util.Arrays; import static org.elasticsearch.test.VersionUtils.randomVersionBetween; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; public class RootObjectMapperTests extends ESSingleNodeTestCase { public void testNumericDetection() throws Exception { MergeReason reason = randomFrom(MergeReason.MAPPING_UPDATE, MergeReason.INDEX_TEMPLATE); String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .field("numeric_detection", false) .endObject() .endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(mapping), reason); assertEquals(mapping, mapper.mappingSource().toString()); // update with a different explicit value String mapping2 = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .field("numeric_detection", true) .endObject() .endObject()); mapper = mapperService.merge("type", new CompressedXContent(mapping2), reason); assertEquals(mapping2, mapper.mappingSource().toString()); // update with an implicit value: no change String mapping3 = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .endObject() .endObject()); mapper = mapperService.merge("type", new CompressedXContent(mapping3), reason); assertEquals(mapping2, mapper.mappingSource().toString()); } public void testDateDetection() throws Exception { MergeReason reason = randomFrom(MergeReason.MAPPING_UPDATE, MergeReason.INDEX_TEMPLATE); String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .field("date_detection", true) .endObject() .endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(mapping), reason); assertEquals(mapping, mapper.mappingSource().toString()); // update with a different explicit value String mapping2 = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .field("date_detection", false) .endObject() .endObject()); mapper = mapperService.merge("type", new CompressedXContent(mapping2), reason); assertEquals(mapping2, mapper.mappingSource().toString()); // update with an implicit value: no change String mapping3 = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .endObject() .endObject()); mapper = mapperService.merge("type", new CompressedXContent(mapping3), reason); assertEquals(mapping2, mapper.mappingSource().toString()); } public void testDateFormatters() throws Exception { MergeReason reason = randomFrom(MergeReason.MAPPING_UPDATE, MergeReason.INDEX_TEMPLATE); String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .field("dynamic_date_formats", Arrays.asList("yyyy-MM-dd")) .endObject() .endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(mapping), reason); assertEquals(mapping, mapper.mappingSource().toString()); // no update if formatters are not set explicitly String mapping2 = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .endObject() .endObject()); mapper = mapperService.merge("type", new CompressedXContent(mapping2), reason); assertEquals(mapping, mapper.mappingSource().toString()); String mapping3 = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .field("dynamic_date_formats", Arrays.asList()) .endObject() .endObject()); mapper = mapperService.merge("type", new CompressedXContent(mapping3), reason); assertEquals(mapping3, mapper.mappingSource().toString()); } public void testDynamicTemplates() throws Exception { String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .startArray("dynamic_templates") .startObject() .startObject("my_template") .field("match_mapping_type", "string") .startObject("mapping") .field("type", "keyword") .endObject() .endObject() .endObject() .endArray() .endObject() .endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); assertEquals(mapping, mapper.mappingSource().toString()); // no update if templates are not set explicitly String mapping2 = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .endObject() .endObject()); mapper = mapperService.merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE); assertEquals(mapping, mapper.mappingSource().toString()); String mapping3 = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .field("dynamic_templates", Arrays.asList()) .endObject() .endObject()); mapper = mapperService.merge("type", new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE); assertEquals(mapping3, mapper.mappingSource().toString()); } public void testDynamicTemplatesForIndexTemplate() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startArray("dynamic_templates") .startObject() .startObject("first_template") .field("path_match", "first") .startObject("mapping") .field("type", "keyword") .endObject() .endObject() .endObject() .startObject() .startObject("second_template") .field("path_match", "second") .startObject("mapping") .field("type", "keyword") .endObject() .endObject() .endObject() .endArray() .endObject()); MapperService mapperService = createIndex("test").mapperService(); mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MergeReason.INDEX_TEMPLATE); // There should be no update if templates are not set. mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("field") .field("type", "integer") .endObject() .endObject() .endObject()); DocumentMapper mapper = mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MergeReason.INDEX_TEMPLATE); DynamicTemplate[] templates = mapper.root().dynamicTemplates(); assertEquals(2, templates.length); assertEquals("first_template", templates[0].name()); assertEquals("first", templates[0].pathMatch()); assertEquals("second_template", templates[1].name()); assertEquals("second", templates[1].pathMatch()); // Dynamic templates should be appended and deduplicated. mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() .startArray("dynamic_templates") .startObject() .startObject("third_template") .field("path_match", "third") .startObject("mapping") .field("type", "integer") .endObject() .endObject() .endObject() .startObject() .startObject("second_template") .field("path_match", "second_updated") .startObject("mapping") .field("type", "double") .endObject() .endObject() .endObject() .endArray() .endObject()); mapper = mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MergeReason.INDEX_TEMPLATE); templates = mapper.root().dynamicTemplates(); assertEquals(3, templates.length); assertEquals("first_template", templates[0].name()); assertEquals("first", templates[0].pathMatch()); assertEquals("second_template", templates[1].name()); assertEquals("second_updated", templates[1].pathMatch()); assertEquals("third_template", templates[2].name()); assertEquals("third", templates[2].pathMatch()); } public void testIllegalFormatField() throws Exception { String dynamicMapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .startArray("dynamic_date_formats") .startArray().value("test_format").endArray() .endArray() .endObject() .endObject()); String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .startArray("date_formats") .startArray().value("test_format").endArray() .endArray() .endObject() .endObject()); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); for (String m : Arrays.asList(mapping, dynamicMapping)) { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(m))); assertEquals("Invalid format: [[test_format]]: expected string value", e.getMessage()); } } public void testIllegalDynamicTemplates() throws Exception { String mapping = Strings.toString(XContentFactory.jsonBuilder() .startObject() .startObject("type") .startObject("dynamic_templates") .endObject() .endObject() .endObject()); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); MapperParsingException e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(mapping))); assertEquals("Dynamic template syntax error. An array of named objects is expected.", e.getMessage()); } public void testIllegalDynamicTemplateUnknownFieldType() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder(); mapping.startObject(); { mapping.startObject("type"); mapping.startArray("dynamic_templates"); { mapping.startObject(); mapping.startObject("my_template"); mapping.field("match_mapping_type", "string"); mapping.startObject("mapping"); mapping.field("type", "string"); mapping.endObject(); mapping.endObject(); mapping.endObject(); } mapping.endArray(); mapping.endObject(); } mapping.endObject(); MapperService mapperService = createIndex("test").mapperService(); MapperParsingException e = expectThrows(MapperParsingException.class, () -> mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MergeReason.MAPPING_UPDATE)); assertThat(e.getRootCause(), instanceOf(IllegalArgumentException.class)); assertThat(e.getRootCause().getMessage(), equalTo("No mapper found for type [string]")); } public void testIllegalDynamicTemplateUnknownAttribute() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder(); mapping.startObject(); { mapping.startObject("type"); mapping.startArray("dynamic_templates"); { mapping.startObject(); mapping.startObject("my_template"); mapping.field("match_mapping_type", "string"); mapping.startObject("mapping"); mapping.field("type", "keyword"); mapping.field("foo", "bar"); mapping.endObject(); mapping.endObject(); mapping.endObject(); } mapping.endArray(); mapping.endObject(); } mapping.endObject(); MapperService mapperService = createIndex("test").mapperService(); MapperParsingException e = expectThrows(MapperParsingException.class, () -> mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MergeReason.MAPPING_UPDATE)); assertThat(e.getRootCause(), instanceOf(MapperParsingException.class)); assertThat(e.getRootCause().getMessage(), equalTo("unknown parameter [foo] on mapper [__dynamic__my_template] of type [keyword]")); } public void testIllegalDynamicTemplateInvalidAttribute() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder(); mapping.startObject(); { mapping.startObject("type"); mapping.startArray("dynamic_templates"); { mapping.startObject(); mapping.startObject("my_template"); mapping.field("match_mapping_type", "string"); mapping.startObject("mapping"); mapping.field("type", "text"); mapping.field("analyzer", "foobar"); mapping.endObject(); mapping.endObject(); mapping.endObject(); } mapping.endArray(); mapping.endObject(); } mapping.endObject(); MapperService mapperService = createIndex("test").mapperService(); MapperParsingException e = expectThrows(MapperParsingException.class, () -> mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MergeReason.MAPPING_UPDATE)); assertThat(e.getRootCause(), instanceOf(MapperParsingException.class)); assertThat(e.getRootCause().getMessage(), equalTo("analyzer [foobar] not found for field [__dynamic__my_template]")); } public void testIllegalDynamicTemplateNoMappingType() throws Exception { MapperService mapperService; { XContentBuilder mapping = XContentFactory.jsonBuilder(); mapping.startObject(); { mapping.startObject("type"); mapping.startArray("dynamic_templates"); { mapping.startObject(); mapping.startObject("my_template"); if (randomBoolean()) { mapping.field("match_mapping_type", "*"); } else { mapping.field("match", "string_*"); } mapping.startObject("mapping"); mapping.field("type", "{dynamic_type}"); mapping.field("index_phrases", true); mapping.endObject(); mapping.endObject(); mapping.endObject(); } mapping.endArray(); mapping.endObject(); } mapping.endObject(); mapperService = createIndex("test").mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MergeReason.MAPPING_UPDATE); assertThat(mapper.mappingSource().toString(), containsString("\"index_phrases\":true")); } { XContentBuilder mapping = XContentFactory.jsonBuilder(); mapping.startObject(); { mapping.startObject("type"); mapping.startArray("dynamic_templates"); { mapping.startObject(); mapping.startObject("my_template"); if (randomBoolean()) { mapping.field("match_mapping_type", "*"); } else { mapping.field("match", "string_*"); } mapping.startObject("mapping"); mapping.field("type", "{dynamic_type}"); mapping.field("foo", "bar"); mapping.endObject(); mapping.endObject(); mapping.endObject(); } mapping.endArray(); mapping.endObject(); } mapping.endObject(); MapperParsingException e = expectThrows(MapperParsingException.class, () -> mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MergeReason.MAPPING_UPDATE)); assertThat(e.getRootCause(), instanceOf(MapperParsingException.class)); assertThat(e.getRootCause().getMessage(), equalTo("unknown parameter [foo] on mapper [__dynamic__my_template] of type [binary]")); } } @Override protected boolean forbidPrivateIndexSettings() { return false; } public void testIllegalDynamicTemplate7DotXIndex() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder(); mapping.startObject(); { mapping.startObject("type"); mapping.startArray("dynamic_templates"); { mapping.startObject(); mapping.startObject("my_template"); mapping.field("match_mapping_type", "string"); mapping.startObject("mapping"); mapping.field("type", "string"); mapping.endObject(); mapping.endObject(); mapping.endObject(); } mapping.endArray(); mapping.endObject(); } mapping.endObject(); Version createdVersion = randomVersionBetween(random(), Version.V_7_0_0, Version.V_7_7_0); Settings indexSettings = Settings.builder() .put(IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), createdVersion) .build(); MapperService mapperService = createIndex("test", indexSettings).mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MergeReason.MAPPING_UPDATE); assertThat(mapper.mappingSource().toString(), containsString("\"type\":\"string\"")); assertWarnings("dynamic template [my_template] has invalid content [{\"match_mapping_type\":\"string\",\"mapping\":{\"type\":" + "\"string\"}}], caused by [No mapper found for type [string]]"); } }
/** * Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.engine.function.config; import java.util.Map; import org.joda.beans.Bean; import org.joda.beans.BeanBuilder; import org.joda.beans.BeanDefinition; import org.joda.beans.JodaBeanUtils; import org.joda.beans.MetaProperty; import org.joda.beans.Property; import org.joda.beans.PropertyDefinition; import org.joda.beans.impl.direct.DirectBean; import org.joda.beans.impl.direct.DirectBeanBuilder; import org.joda.beans.impl.direct.DirectMetaBean; import org.joda.beans.impl.direct.DirectMetaProperty; import org.joda.beans.impl.direct.DirectMetaPropertyMap; import com.opengamma.util.ArgumentChecker; /** * Static function configuration representation. */ @BeanDefinition public class StaticFunctionConfiguration extends DirectBean implements FunctionConfiguration { private static final long serialVersionUID = 1L; @PropertyDefinition(validate = "notNull") private String _definitionClassName; /** * Creates an instance. * * @param definitionClassName * the definition class name, not-null. */ public StaticFunctionConfiguration(final String definitionClassName) { ArgumentChecker.notNull(definitionClassName, "definitionClassName"); _definitionClassName = definitionClassName; } /** * Constructor for builder. */ StaticFunctionConfiguration() { } @Override public int compareTo(final FunctionConfiguration other) { if (other instanceof ParameterizedFunctionConfiguration) { // Static goes first return -1; } else if (other instanceof StaticFunctionConfiguration) { // Sort by class name return _definitionClassName.compareTo(((StaticFunctionConfiguration) other)._definitionClassName); } throw new UnsupportedOperationException("Can't compare " + StaticFunctionConfiguration.class + " and " + other.getClass()); } //------------------------- AUTOGENERATED START ------------------------- ///CLOVER:OFF /** * The meta-bean for {@code StaticFunctionConfiguration}. * @return the meta-bean, not null */ public static StaticFunctionConfiguration.Meta meta() { return StaticFunctionConfiguration.Meta.INSTANCE; } static { JodaBeanUtils.registerMetaBean(StaticFunctionConfiguration.Meta.INSTANCE); } @Override public StaticFunctionConfiguration.Meta metaBean() { return StaticFunctionConfiguration.Meta.INSTANCE; } //----------------------------------------------------------------------- /** * Gets the definitionClassName. * @return the value of the property, not null */ public String getDefinitionClassName() { return _definitionClassName; } /** * Sets the definitionClassName. * @param definitionClassName the new value of the property, not null */ public void setDefinitionClassName(String definitionClassName) { JodaBeanUtils.notNull(definitionClassName, "definitionClassName"); this._definitionClassName = definitionClassName; } /** * Gets the the {@code definitionClassName} property. * @return the property, not null */ public final Property<String> definitionClassName() { return metaBean().definitionClassName().createProperty(this); } //----------------------------------------------------------------------- @Override public StaticFunctionConfiguration clone() { return JodaBeanUtils.cloneAlways(this); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj != null && obj.getClass() == this.getClass()) { StaticFunctionConfiguration other = (StaticFunctionConfiguration) obj; return JodaBeanUtils.equal(getDefinitionClassName(), other.getDefinitionClassName()); } return false; } @Override public int hashCode() { int hash = getClass().hashCode(); hash = hash * 31 + JodaBeanUtils.hashCode(getDefinitionClassName()); return hash; } @Override public String toString() { StringBuilder buf = new StringBuilder(64); buf.append("StaticFunctionConfiguration{"); int len = buf.length(); toString(buf); if (buf.length() > len) { buf.setLength(buf.length() - 2); } buf.append('}'); return buf.toString(); } protected void toString(StringBuilder buf) { buf.append("definitionClassName").append('=').append(JodaBeanUtils.toString(getDefinitionClassName())).append(',').append(' '); } //----------------------------------------------------------------------- /** * The meta-bean for {@code StaticFunctionConfiguration}. */ public static class Meta extends DirectMetaBean { /** * The singleton instance of the meta-bean. */ static final Meta INSTANCE = new Meta(); /** * The meta-property for the {@code definitionClassName} property. */ private final MetaProperty<String> _definitionClassName = DirectMetaProperty.ofReadWrite( this, "definitionClassName", StaticFunctionConfiguration.class, String.class); /** * The meta-properties. */ private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap( this, null, "definitionClassName"); /** * Restricted constructor. */ protected Meta() { } @Override protected MetaProperty<?> metaPropertyGet(String propertyName) { switch (propertyName.hashCode()) { case 95245328: // definitionClassName return _definitionClassName; } return super.metaPropertyGet(propertyName); } @Override public BeanBuilder<? extends StaticFunctionConfiguration> builder() { return new DirectBeanBuilder<StaticFunctionConfiguration>(new StaticFunctionConfiguration()); } @Override public Class<? extends StaticFunctionConfiguration> beanType() { return StaticFunctionConfiguration.class; } @Override public Map<String, MetaProperty<?>> metaPropertyMap() { return _metaPropertyMap$; } //----------------------------------------------------------------------- /** * The meta-property for the {@code definitionClassName} property. * @return the meta-property, not null */ public final MetaProperty<String> definitionClassName() { return _definitionClassName; } //----------------------------------------------------------------------- @Override protected Object propertyGet(Bean bean, String propertyName, boolean quiet) { switch (propertyName.hashCode()) { case 95245328: // definitionClassName return ((StaticFunctionConfiguration) bean).getDefinitionClassName(); } return super.propertyGet(bean, propertyName, quiet); } @Override protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) { switch (propertyName.hashCode()) { case 95245328: // definitionClassName ((StaticFunctionConfiguration) bean).setDefinitionClassName((String) newValue); return; } super.propertySet(bean, propertyName, newValue, quiet); } @Override protected void validate(Bean bean) { JodaBeanUtils.notNull(((StaticFunctionConfiguration) bean)._definitionClassName, "definitionClassName"); } } ///CLOVER:ON //-------------------------- AUTOGENERATED END -------------------------- }
package nz.org.winters.legodisplaysequencer.sequence; import android.content.Intent; import android.support.test.InstrumentationRegistry; import android.support.test.runner.AndroidJUnit4; import android.support.v4.content.LocalBroadcastManager; import com.google.firebase.database.DatabaseReference; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InOrder; import org.mockito.Mockito; import java.util.HashMap; import java.util.Map; import nz.org.winters.legodisplaysequencer.AppBroadcastHandler; import nz.org.winters.legodisplaysequencer.DeviceActionSender; import nz.org.winters.legodisplaysequencer.R; import nz.org.winters.legodisplaysequencer.TypeDefines; import nz.org.winters.legodisplaysequencer.Utils; import nz.org.winters.legodisplaysequencer.storage.FirebaseProperties; import nz.org.winters.legodisplaysequencer.storage.LegoColour; import static nz.org.winters.legodisplaysequencer.CustomMatchers.intentEq; import static nz.org.winters.legodisplaysequencer.CustomMatchers.legoColourEq; import static nz.org.winters.legodisplaysequencer.TypeDefines.TRAIN_DIRECTION_BACKWARDS; import static org.junit.Assert.assertEquals; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.BDDMockito.given; import static org.mockito.BDDMockito.then; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; /** * @author MWinters */ @RunWith(AndroidJUnit4.class) public class SequenceItemDriveTrainTest { @Before public void setUp() throws Exception { Utils.isTestingPi = true; } @Test public void testDisplayString(){ SequenceItemDriveTrain item = new SequenceItemDriveTrain(1, TypeDefines.TRAIN_DIRECTION_FORWARDS, 150, LegoColour.colours.Lego_Bright_Red.getLegoColour(),false); assertEquals(1,item.getTrainNumber()); assertEquals(TypeDefines.TRAIN_DIRECTION_FORWARDS, item.getDriveDirection()); assertEquals(150,item.getSpeed()); assertEquals(LegoColour.colours.Lego_Bright_Red.getLegoColour().getColour(),item.getDriveToColour().getColour()); assertEquals("Train 1 Forwards to Red at 150 - Wait", item.getDisplayString(InstrumentationRegistry.getTargetContext())); item.setDriveDirection(TRAIN_DIRECTION_BACKWARDS); item.setDriveToColour(LegoColour.colours.Lego_Brick_Yellow.getLegoColour()); item.setTrainNumber(2); item.setSpeed(200); item.setDontWait(true); assertEquals("Train 2 Backwards to Tan at 200 - Continue", item.getDisplayString(InstrumentationRegistry.getTargetContext())); } @Test public void testRunWithWait(){ SequenceItemDriveTrain item = new SequenceItemDriveTrain(1, TypeDefines.TRAIN_DIRECTION_FORWARDS, 150, LegoColour.colours.Lego_Bright_Red.getLegoColour(),false); DeviceActionSender deviceActionSenderMock = mock(DeviceActionSender.class); AppBroadcastHandler appBroadcastHandlerMock = mock(AppBroadcastHandler.class); AppBroadcastHandler.setInstance(appBroadcastHandlerMock); LocalBroadcastManager localBroadcastManager = LocalBroadcastManager.getInstance(InstrumentationRegistry.getTargetContext()); InOrder inOrder = Mockito.inOrder(deviceActionSenderMock, appBroadcastHandlerMock); given(appBroadcastHandlerMock.getBroadcastManager()).willReturn(localBroadcastManager); Intent intentC = new Intent("completed"); Intent intentE = new Intent("errored"); item.run( InstrumentationRegistry.getTargetContext(), deviceActionSenderMock, appBroadcastHandlerMock, intentC, intentE); then(appBroadcastHandlerMock).should(inOrder, times(1)).getBroadcastManager(); //then(localBroadcastManagerMock).should(inOrder,times(1)).registerReceiver(eq(any(BroadcastReceiver.class)),eq(any(IntentFilter.class))); //noinspection WrongConstant then(deviceActionSenderMock).should(inOrder, times(1)). driveTrain(eq(1L), eq(TypeDefines.TRAIN_DIRECTION_FORWARDS), eq(150L), legoColourEq(LegoColour.colours.Lego_Bright_Red.getLegoColour())); Intent intentTrainStop = new Intent(TypeDefines.INTENT_RECEIVED_STOPPED); intentTrainStop.putExtra(TypeDefines.EXTRA_TRAIN_NUMBER, 1L); localBroadcastManager.sendBroadcastSync(intentTrainStop); Intent intentMatch = new Intent("completed"); intentMatch.putExtra(TypeDefines.EXTRA_SEQUENCE_NAME, "Drive Train"); then(appBroadcastHandlerMock).should(inOrder, times(1)).sendLocalBroadcast(intentEq(intentMatch)); then(appBroadcastHandlerMock).should(inOrder, times(1)).getBroadcastManager(); inOrder.verifyNoMoreInteractions(); } @Test public void testRunErrors(){ SequenceItemDriveTrain item = new SequenceItemDriveTrain(-1, TypeDefines.TRAIN_DIRECTION_FORWARDS, 150, LegoColour.colours.Lego_Bright_Red.getLegoColour(),false); DeviceActionSender deviceActionSenderMock = mock(DeviceActionSender.class); AppBroadcastHandler appBroadcastHandlerMock = mock(AppBroadcastHandler.class); AppBroadcastHandler.setInstance(appBroadcastHandlerMock); LocalBroadcastManager localBroadcastManager = LocalBroadcastManager.getInstance(InstrumentationRegistry.getTargetContext()); InOrder inOrder = Mockito.inOrder(deviceActionSenderMock, appBroadcastHandlerMock); given(appBroadcastHandlerMock.getBroadcastManager()).willReturn(localBroadcastManager); Intent intentC = new Intent("completed"); Intent intentE = new Intent("errored"); item.run( InstrumentationRegistry.getTargetContext(), deviceActionSenderMock, appBroadcastHandlerMock, intentC, intentE); Intent intentMatch = new Intent("errored"); intentMatch.putExtra(TypeDefines.EXTRA_SEQUENCE_NAME, "Drive Train"); intentMatch.putExtra(TypeDefines.EXTRA_MESSAGE, "No train"); then(appBroadcastHandlerMock).should(inOrder, times(1)).sendLocalBroadcast(intentEq(intentMatch)); inOrder.verifyNoMoreInteractions(); Utils.isTestingPi = false; item.run( InstrumentationRegistry.getTargetContext(), deviceActionSenderMock, appBroadcastHandlerMock, intentC, intentE); intentMatch = new Intent("completed"); intentMatch.putExtra(TypeDefines.EXTRA_SEQUENCE_NAME, "Drive Train"); then(appBroadcastHandlerMock).should(inOrder, times(1)).sendLocalBroadcast(intentEq(intentMatch)); inOrder.verifyNoMoreInteractions(); } @Test public void testRunWithWaitThenCancel(){ SequenceItemDriveTrain item = new SequenceItemDriveTrain(1, TypeDefines.TRAIN_DIRECTION_FORWARDS, 150, LegoColour.colours.Lego_Bright_Red.getLegoColour(),false); DeviceActionSender deviceActionSenderMock = mock(DeviceActionSender.class); AppBroadcastHandler appBroadcastHandlerMock = mock(AppBroadcastHandler.class); AppBroadcastHandler.setInstance(appBroadcastHandlerMock); LocalBroadcastManager localBroadcastManager = LocalBroadcastManager.getInstance(InstrumentationRegistry.getTargetContext()); InOrder inOrder = Mockito.inOrder(deviceActionSenderMock, appBroadcastHandlerMock); given(appBroadcastHandlerMock.getBroadcastManager()).willReturn(localBroadcastManager); Intent intentC = new Intent("completed"); Intent intentE = new Intent("errored"); item.run( InstrumentationRegistry.getTargetContext(), deviceActionSenderMock, appBroadcastHandlerMock, intentC, intentE); then(appBroadcastHandlerMock).should(inOrder, times(1)).getBroadcastManager(); //then(localBroadcastManagerMock).should(inOrder,times(1)).registerReceiver(eq(any(BroadcastReceiver.class)),eq(any(IntentFilter.class))); //noinspection WrongConstant then(deviceActionSenderMock).should(inOrder, times(1)). driveTrain(eq(1L), eq(TypeDefines.TRAIN_DIRECTION_FORWARDS), eq(150L), legoColourEq(LegoColour.colours.Lego_Bright_Red.getLegoColour())); item.cancel(); then(deviceActionSenderMock).should(inOrder, times(1)). driveTrain(eq(1L), eq(TypeDefines.TRAIN_DIRECTION_STOP), eq(0L), legoColourEq(LegoColour.colours.None.getLegoColour())); Intent intentTrainStop = new Intent(TypeDefines.INTENT_RECEIVED_STOPPED); intentTrainStop.putExtra(TypeDefines.EXTRA_TRAIN_NUMBER, 1L); localBroadcastManager.sendBroadcastSync(intentTrainStop); Intent intentMatch = new Intent("errored"); intentMatch.putExtra(TypeDefines.EXTRA_SEQUENCE_NAME, "Drive Train"); intentMatch.putExtra(TypeDefines.EXTRA_MESSAGE, "Cancelled"); then(appBroadcastHandlerMock).should(inOrder, times(1)).sendLocalBroadcast(intentEq(intentMatch)); then(appBroadcastHandlerMock).should(inOrder, times(1)).getBroadcastManager(); inOrder.verifyNoMoreInteractions(); } @Test public void testRunWithDontWait(){ SequenceItemDriveTrain item = new SequenceItemDriveTrain(1, TypeDefines.TRAIN_DIRECTION_FORWARDS, 150, LegoColour.colours.Lego_Bright_Red.getLegoColour(),true); DeviceActionSender deviceActionSenderMock = mock(DeviceActionSender.class); AppBroadcastHandler appBroadcastHandlerMock = mock(AppBroadcastHandler.class); AppBroadcastHandler.setInstance(appBroadcastHandlerMock); LocalBroadcastManager localBroadcastManager = LocalBroadcastManager.getInstance(InstrumentationRegistry.getTargetContext()); InOrder inOrder = Mockito.inOrder(deviceActionSenderMock, appBroadcastHandlerMock); given(appBroadcastHandlerMock.getBroadcastManager()).willReturn(localBroadcastManager); Intent intentC = new Intent("completed"); Intent intentE = new Intent("errored"); item.run( InstrumentationRegistry.getTargetContext(), deviceActionSenderMock, appBroadcastHandlerMock, intentC, intentE); // then(appBroadcastHandlerMock).should(inOrder, times(1)).getBroadcastManager(); //then(localBroadcastManagerMock).should(inOrder,times(1)).registerReceiver(eq(any(BroadcastReceiver.class)),eq(any(IntentFilter.class))); //noinspection WrongConstant then(deviceActionSenderMock).should(inOrder, times(1)). driveTrain(eq(1L), eq(TypeDefines.TRAIN_DIRECTION_FORWARDS), eq(150L), legoColourEq(LegoColour.colours.Lego_Bright_Red.getLegoColour())); // Intent intentTrainStop = new Intent(TypeDefines.INTENT_RECEIVED_STOPPED); // intentTrainStop.putExtra(TypeDefines.EXTRA_TRAIN_NUMBER, 1L); // localBroadcastManager.sendBroadcastSync(intentTrainStop); Intent intentMatch = new Intent("completed"); intentMatch.putExtra(TypeDefines.EXTRA_SEQUENCE_NAME, "Drive Train"); then(appBroadcastHandlerMock).should(inOrder, times(1)).sendLocalBroadcast(intentEq(intentMatch)); // then(appBroadcastHandlerMock).should(inOrder, times(1)).getBroadcastManager(); inOrder.verifyNoMoreInteractions(); } @Test public void testFromFirebaseProperties(){ Map<String, Object> properties =new HashMap<>(); properties.put(FirebaseProperties.TRAIN_NUMBER,2L); properties.put(FirebaseProperties.DRIVE_DIRECTION,"Backwards"); properties.put(FirebaseProperties.COLOUR,"Lego_Bright_Blue"); properties.put(FirebaseProperties.SPEED,150L); properties.put(FirebaseProperties.DONT_WAIT,false); properties.put(FirebaseProperties.SEQUENCE_ORDER,10L); SequenceItemDriveTrain item = new SequenceItemDriveTrain("a-key",properties); assertEquals("a-key",item.getKey()); assertEquals(2L,item.getTrainNumber()); assertEquals(TypeDefines.TRAIN_DIRECTION_BACKWARDS, item.getDriveDirection()); assertEquals("Lego_Bright_Blue",item.getDriveToColourString()); assertEquals(150L,item.getSpeed()); assertEquals(false,item.isDontWait()); assertEquals(10L,item.getSequenceOrder()); } @Test public void testFirebaseWrite(){ DatabaseReference mockDatabaseReference = mock(DatabaseReference.class); DatabaseReference mockChildDatabaseReference1 = mock(DatabaseReference.class); DatabaseReference mockChildDatabaseReference2 = mock(DatabaseReference.class); doReturn(mockChildDatabaseReference1).when(mockDatabaseReference).child(eq(Sequence.DATABASE_REFERENCE_ITEMS)); doReturn(mockChildDatabaseReference2).when(mockChildDatabaseReference1).child(eq("a-key")); SequenceItemDriveTrain item = new SequenceItemDriveTrain(1, TypeDefines.TRAIN_DIRECTION_FORWARDS, 150, LegoColour.colours.Lego_Bright_Red.getLegoColour(),true); item.setKey("a-key"); InOrder inOrder = Mockito.inOrder(mockDatabaseReference, mockChildDatabaseReference1,mockChildDatabaseReference2); item.write(mockDatabaseReference); then(mockDatabaseReference).should(inOrder,times(1)).child(eq(Sequence.DATABASE_REFERENCE_ITEMS)); then(mockChildDatabaseReference1).should(inOrder,times(1)).child(eq("a-key")); then(mockChildDatabaseReference2).should(inOrder,times(1)).setValue(eq(item)); inOrder.verifyNoMoreInteractions(); } @Test public void testSmallMethods(){ SequenceItemDriveTrain item = new SequenceItemDriveTrain(1, TypeDefines.TRAIN_DIRECTION_FORWARDS, 150, LegoColour.colours.Lego_Bright_Red.getLegoColour(),true); assertEquals(SequenceItem.SEQUENCE_VIEW_TYPE_TRAIN_DRIVE,item.getViewType()); assertEquals(SequenceItem.SEQUENCE_TYPE_DRIVE_TRAIN_CLASS, item.getClassName()); assertEquals(R.drawable.ic_train_black, item.getIconResId()); assertEquals("Lego_Bright_Red",item.getDriveToColourString()); item.setDriveToColourString("Lego_Bright_Blue"); assertEquals(LegoColour.colours.Lego_Bright_Blue.getLegoColour().getColour(), item.getDriveToColour().getColour()); item.setDriveDirection(TypeDefines.TRAIN_DIRECTION_SPEED_CHANGE); assertEquals("SpeedChange",item.getDriveDirectionString()); item.setDriveDirectionString("Forwards"); assertEquals(TypeDefines.TRAIN_DIRECTION_FORWARDS,item.getDriveDirection()); assertEquals("Forwards",item.getDriveDirectionString()); item.setDriveDirectionString("Backwards"); assertEquals(TypeDefines.TRAIN_DIRECTION_BACKWARDS,item.getDriveDirection()); assertEquals("Backwards",item.getDriveDirectionString()); item.setDriveDirectionString("Stop"); assertEquals(TypeDefines.TRAIN_DIRECTION_STOP,item.getDriveDirection()); assertEquals("Stop",item.getDriveDirectionString()); item.setDriveDirectionString("SpeedChange"); assertEquals(TypeDefines.TRAIN_DIRECTION_SPEED_CHANGE,item.getDriveDirection()); assertEquals("SpeedChange",item.getDriveDirectionString()); assertEquals(0L, item.getSequenceOrder()); item.setSequenceOrder(1L); assertEquals(1L, item.getSequenceOrder()); SequenceItem copy = item.makeCopy(); copy.setOnValueChanged(null); // for coverage. assertEquals(SequenceItem.SEQUENCE_TYPE_DRIVE_TRAIN_CLASS, copy.getClassName()); assertEquals(1L, ((SequenceItemDriveTrain)copy).getTrainNumber()); } }
/* * Copyright (c) 2001-2007 Sun Microsystems, Inc. All rights reserved. * * The Sun Project JXTA(TM) Software License * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. The end-user documentation included with the redistribution, if any, must * include the following acknowledgment: "This product includes software * developed by Sun Microsystems, Inc. for JXTA(TM) technology." * Alternately, this acknowledgment may appear in the software itself, if * and wherever such third-party acknowledgments normally appear. * * 4. The names "Sun", "Sun Microsystems, Inc.", "JXTA" and "Project JXTA" must * not be used to endorse or promote products derived from this software * without prior written permission. For written permission, please contact * Project JXTA at http://www.jxta.org. * * 5. Products derived from this software may not be called "JXTA", nor may * "JXTA" appear in their name, without prior written permission of Sun. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SUN * MICROSYSTEMS OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * JXTA is a registered trademark of Sun Microsystems, Inc. in the United * States and other countries. * * Please see the license information page at : * <http://www.jxta.org/project/www/license.html> for instructions on use of * the license in source files. * * ==================================================================== * * This software consists of voluntary contributions made by many individuals * on behalf of Project JXTA. For more information on Project JXTA, please see * http://www.jxta.org. * * This license is based on the BSD license adopted by the Apache Foundation. */ package net.jxta.util; import net.jxta.document.AdvertisementFactory; import net.jxta.document.MimeMediaType; import net.jxta.document.StructuredDocument; import net.jxta.document.StructuredDocumentFactory; import net.jxta.document.XMLDocument; import net.jxta.endpoint.Message; import net.jxta.endpoint.MessageElement; import net.jxta.endpoint.Messenger; import net.jxta.endpoint.StringMessageElement; import net.jxta.endpoint.TextDocumentMessageElement; import net.jxta.id.IDFactory; import net.jxta.impl.endpoint.tcp.TcpMessenger; import net.jxta.logging.Logging; import net.jxta.peergroup.PeerGroup; import net.jxta.pipe.InputPipe; import net.jxta.pipe.PipeMsgEvent; import net.jxta.pipe.PipeMsgListener; import net.jxta.pipe.PipeService; import net.jxta.protocol.PeerAdvertisement; import net.jxta.protocol.PipeAdvertisement; import java.io.ByteArrayInputStream; import java.io.IOException; import java.net.SocketException; import java.util.logging.Logger; import java.util.Properties; /** * The server side of a JxtaBiDiPipe. The intent of this object is accept connection requests. * JxtaServerPipe follows the same pattern as java.net.ServerSocket, without it no connection can be * established. */ public class JxtaServerPipe implements PipeMsgListener { private static final Logger LOG = Logger.getLogger(JxtaServerPipe.class.getName()); protected static final String nameSpace = "JXTABIP"; protected static final String credTag = "Cred"; protected static final String reqPipeTag = "reqPipe"; protected static final String remPeerTag = "remPeer"; protected static final String remPipeTag = "remPipe"; protected static final String closeTag = "close"; protected static final String reliableTag = "reliable"; protected static final String directSupportedTag = "direct"; protected static final String connectionPropertiesTag = "connectionproperties"; public static final int DEFAULT_TIMEOUT = 30 * 1000; // 30 seconds public static final int DEFAULT_BACKLOG = 50; private PeerGroup group; private InputPipe serverPipe; private PipeAdvertisement pipeadv; private final Object closeLock = new Object(); private boolean bound = false; private boolean closed = false; protected StructuredDocument myCredentialDoc = null; private volatile ServerPipeAcceptListener listener; private final QueuingServerPipeAcceptor defaultListener; /** * Default constructor for the JxtaServerPipe * <p/> * backlog is set to {@link #DEFAULT_BACKLOG}. * accept timeout is set to {@link #DEFAULT_TIMEOUT}. * <p> call to accept() for this ServerPipe will * block for only this amount of time. If the timeout expires, * a java.net.SocketTimeoutException is raised, though the ServerPipe is still valid. * <p/> * * @param group JXTA PeerGroup * @param pipeadv PipeAdvertisement on which pipe requests are accepted * @throws IOException if an I/O error occurs */ public JxtaServerPipe(PeerGroup group, PipeAdvertisement pipeadv) throws IOException { this(group, pipeadv, DEFAULT_BACKLOG, DEFAULT_TIMEOUT); } /** * Constructor for the JxtaServerPipe object * * @param group JXTA PeerGroup * @param pipeadv PipeAdvertisement on which pipe requests are accepted * @param backlog the maximum length of the queue. * @throws IOException if an I/O error occurs */ public JxtaServerPipe(PeerGroup group, PipeAdvertisement pipeadv, int backlog) throws IOException { this(group, pipeadv, backlog, DEFAULT_TIMEOUT); } /** * Constructor for the JxtaServerPipe * * @param group JXTA PeerGroup * @param pipeadv PipeAdvertisement on which pipe requests are accepted * @param backlog the maximum length of the queue. * @param timeout call to accept() for this ServerPipe will * block for only this amount of time. If the timeout expires, * a java.net.SocketTimeoutException is raised, though the ServerPipe is still valid. * @throws IOException if an I/O error occurs */ public JxtaServerPipe(PeerGroup group, PipeAdvertisement pipeadv, int backlog, int timeout) throws IOException { this.defaultListener = new QueuingServerPipeAcceptor(backlog, timeout); this.listener = defaultListener; bind(group, pipeadv); } /** * Creates a server pipe for the specified group, configured using the properties of the specified pipe * advertisement. Additionally, accepting incoming pipe connections will be sent asynchronously to the * provided {@link ServerPipeAcceptListener}. This form of the constructor is intended for those clients * who wish to immediately handle incoming connections rather than using the blocking {@link #accept()} method * synchronously. Please note that if the object is constructed this way, the accept method will no longer * function, and will instead immediately return null. * * @param group JXTA PeerGroup * @param pipeadv PipeAdvertisement on which pipe requests are accepted * @param listener the listener to which all incoming connections will be sent immediately and asynchronously. * @throws IOException if an I/O error occurs * @throws IllegalArgumentException if the listener is null */ public JxtaServerPipe(PeerGroup group, PipeAdvertisement pipeadv, ServerPipeAcceptListener listener) throws IOException { // we still set a valid default listener even though it is not used. This is used later as a quick // way of checking whether we are using the default or not. if(listener == null) { throw new IllegalArgumentException("listener must not be null"); } this.defaultListener = new QueuingServerPipeAcceptor(1, 0); this.listener = listener; bind(group, pipeadv); } /** * Binds the <code>JxtaServerPipe</code> to a specific pipe advertisement * * @param group JXTA PeerGroup * @param pipeadv PipeAdvertisement on which pipe requests are accepted * @throws IOException if an I/O error occurs */ public void bind(PeerGroup group, PipeAdvertisement pipeadv) throws IOException { this.group = group; this.pipeadv = pipeadv; PipeService pipeSvc = group.getPipeService(); serverPipe = pipeSvc.createInputPipe(pipeadv, this); setBound(); } /** * Binds the <code>JxtaServerPipe</code> to a specific pipe advertisement * * @param group JXTA PeerGroup * @param pipeadv PipeAdvertisement on which pipe requests are accepted * @param backlog the maximum length of the queue. * @throws IOException if an I/O error occurs * @deprecated as of version 2.7, backlog must be specified to the constructor of the server pipe * only. */ @Deprecated public void bind(PeerGroup group, PipeAdvertisement pipeadv, int backlog) throws IOException { bind(group, pipeadv); } /** * Listens for a connection to be made to this socket and accepts * it. The method blocks until a connection is made. * * @return the connection accepted, null otherwise * @throws IOException if an I/O error occurs */ public JxtaBiDiPipe accept() throws IOException { checkNotClosed(); checkBound(); if(usingBlockingAccept()) { return defaultListener.acceptBackwardsCompatible(); } else { throw new IllegalStateException("cannot call accept() if a custom ServerPipeAcceptListener is in use"); } } public boolean usingBlockingAccept() { return listener == defaultListener; } /** * Gets the group associated with this JxtaServerPipe * * @return The group value */ public PeerGroup getGroup() { return group; } /** * Gets the PipeAdvertisement associated with this JxtaServerPipe * * @return The pipeAdv value */ public PipeAdvertisement getPipeAdv() { return pipeadv; } /** * Closes this JxtaServerPipe (closes the underlying input pipe). * * @throws IOException if an I/O error occurs */ public void close() throws IOException { synchronized (closeLock) { if (isClosed()) { return; } if (bound) { // close all the pipe serverPipe.close(); listener.serverPipeClosed(); bound = false; } closed = true; } } /** * Sets the bound attribute of the JxtaServerPipe */ void setBound() { bound = true; } private void checkNotClosed() throws SocketException { if (isClosed()) { throw new SocketException("Server Pipe is closed"); } } private void checkBound() throws SocketException { if (!isBound()) { throw new SocketException("JxtaServerPipe is not bound yet"); } } /** * Gets the Timeout attribute of the JxtaServerPipe. * * @return The timeout value in use for the {@link #accept()} method. * @throws IOException if an I/O error occurs. * @throws IllegalStateException if a custom {@link ServerPipeAcceptListener} is in use. */ public synchronized int getPipeTimeout() throws IOException { checkNotClosed(); if(usingBlockingAccept()) { return defaultListener.getTimeoutBackwardsCompatible(); } else { throw new IllegalStateException("Custom ServerPipeAcceptListener is in use, timeout does not apply"); } } /** * Sets the Timeout attribute of the JxtaServerPipe. A timeout of 0 blocks forever, and * a timeout value less than zero is illegal. * * @throws SocketException if an I/O error occurs * @throws IllegalStateException if a custom {@link ServerPipeAcceptListener} is in use. */ public synchronized void setPipeTimeout(int timeout) throws SocketException { checkNotClosed(); if(usingBlockingAccept()) { defaultListener.setTimeoutBackwardsCompatible(timeout); } else { throw new IllegalStateException("Custom ServerPipeAcceptListener is in use, timeout does not apply"); } } /** * Returns the closed state of the JxtaServerPipe. * * @return true if the socket has been closed */ public boolean isClosed() { synchronized (closeLock) { return closed; } } /** * Returns the binding state of the JxtaServerPipe. * * @return true if the ServerSocket successfully bound to an address */ public boolean isBound() { return bound; } /** * {@inheritDoc} */ public void pipeMsgEvent(PipeMsgEvent event) { Message message = event.getMessage(); if (message == null) { return; } JxtaBiDiPipe bidi = processMessage(message); // make sure we have a socket returning if (bidi == null) { return; } listener.pipeAccepted(bidi); } /** * Method processMessage is the heart of this class. * <p/> * This takes new incoming connect messages and constructs the JxtaBiDiPipe * to talk to the new client. * <p/> * The ResponseMessage is created and sent. * * @param msg The client connection request (assumed not null) * @return JxtaBiDiPipe Which may be null if an error occurs. */ private JxtaBiDiPipe processMessage(Message msg) { PipeAdvertisement outputPipeAdv = null; PeerAdvertisement peerAdv = null; StructuredDocument credDoc = null; Properties connectionProperties = null; try { MessageElement el = msg.getMessageElement(nameSpace, credTag); if (el != null) { credDoc = StructuredDocumentFactory.newStructuredDocument(el); } el = msg.getMessageElement(nameSpace, reqPipeTag); if (el != null) { XMLDocument asDoc = (XMLDocument) StructuredDocumentFactory.newStructuredDocument(el); outputPipeAdv = (PipeAdvertisement) AdvertisementFactory.newAdvertisement(asDoc); } el = msg.getMessageElement(nameSpace, remPeerTag); if (el != null) { XMLDocument asDoc = (XMLDocument) StructuredDocumentFactory.newStructuredDocument(el); peerAdv = (PeerAdvertisement) AdvertisementFactory.newAdvertisement(asDoc); } el = msg.getMessageElement(nameSpace, reliableTag); boolean isReliable = false; if (el != null) { isReliable = Boolean.valueOf((el.toString())); Logging.logCheckedFine(LOG, "Connection request [isReliable] :", isReliable); } el = msg.getMessageElement(nameSpace, directSupportedTag); boolean directSupported = false; if (el != null) { directSupported = Boolean.valueOf((el.toString())); Logging.logCheckedFine(LOG, "Connection request [directSupported] :", directSupported); } el = msg.getMessageElement(nameSpace, connectionPropertiesTag); byte[] connectionPropertiesBytes = null; if (el != null) { connectionPropertiesBytes = el.getBytes(false); Logging.logCheckedFine(LOG, "Connection request [connectionPropertiesBytes] :", connectionPropertiesBytes); if (connectionPropertiesBytes != null) connectionProperties = bytesToProperties(connectionPropertiesBytes); } Messenger msgr; boolean direct = false; // if (directSupported) { // msgr = JxtaBiDiPipe.getDirectMessenger(group, outputPipeAdv, peerAdv); // if (msgr == null) { // msgr = JxtaBiDiPipe.lightweightOutputPipe(group, outputPipeAdv, peerAdv); // } else { // direct = true; // } // } else { msgr = JxtaBiDiPipe.lightweightOutputPipe(group, outputPipeAdv, peerAdv); // } if (msgr != null) { Logging.logCheckedFine(LOG, "Reliability set to :", isReliable); PipeAdvertisement newpipe = newInputPipe(group, outputPipeAdv); JxtaBiDiPipe pipe = null; if (connectionProperties != null) { pipe = new JxtaBiDiPipe(group, msgr, newpipe, credDoc, isReliable, direct, connectionProperties); } else { pipe = new JxtaBiDiPipe(group, msgr, newpipe, credDoc, isReliable, direct); } pipe.setRemotePeerAdvertisement(peerAdv); pipe.setRemotePipeAdvertisement(outputPipeAdv); sendResponseMessage(group, msgr, newpipe); return pipe; } } catch (IOException e) { // deal with the error Logging.logCheckedFine(LOG, "IOException occured\n", e); } return null; } private Properties bytesToProperties(byte[] propsBytes) { Properties properties = new Properties(); ByteArrayInputStream bis = new ByteArrayInputStream(propsBytes); try { properties.load(bis); } catch (IOException e) { } return properties; } /** * Method sendResponseMessage get the createResponseMessage and sends it. * * @param group the peer group * @param msgr the remote node messenger * @param pipeAd the pipe advertisement * @throws IOException for failures sending the response message. */ protected void sendResponseMessage(PeerGroup group, Messenger msgr, PipeAdvertisement pipeAd) throws IOException { Message msg = new Message(); PeerAdvertisement peerAdv = group.getPeerAdvertisement(); if (myCredentialDoc == null) { myCredentialDoc = JxtaBiDiPipe.getCredDoc(group); } if (myCredentialDoc != null) { msg.addMessageElement(JxtaServerPipe.nameSpace, new TextDocumentMessageElement(credTag, (XMLDocument) myCredentialDoc, null)); } final String neverAllowDirectBreaksRelay = Boolean.toString(false); msg.addMessageElement(JxtaServerPipe.nameSpace, new StringMessageElement(JxtaServerPipe.directSupportedTag, neverAllowDirectBreaksRelay, null)); msg.addMessageElement(JxtaServerPipe.nameSpace, new TextDocumentMessageElement(remPipeTag, (XMLDocument) pipeAd.getDocument(MimeMediaType.XMLUTF8), null)); msg.addMessageElement(nameSpace, new TextDocumentMessageElement(remPeerTag, (XMLDocument) peerAdv.getSignedDocument(), null)); if (msgr instanceof TcpMessenger) { ((TcpMessenger) msgr).sendMessageDirect(msg, null, null, true); } else { msgr.sendMessage(msg); } } /** * Utility method newInputPipe is used to get new pipe advertisement (w/random pipe ID) from old one. * <p/> * Called by JxtaSocket to make pipe (name -> name.remote) for open message * <p/> * Called by JxtaServerSocket to make pipe (name.remote -> name.remote.remote) for response message * * @param group the peer group * @param pipeadv to get the basename and type from * @return PipeAdvertisement a new pipe advertisement */ protected static PipeAdvertisement newInputPipe(PeerGroup group, PipeAdvertisement pipeadv) { PipeAdvertisement adv = (PipeAdvertisement) AdvertisementFactory.newAdvertisement(PipeAdvertisement.getAdvertisementType()); adv.setPipeID(IDFactory.newPipeID(group.getPeerGroupID())); adv.setName(pipeadv.getName()); adv.setType(pipeadv.getType()); return adv; } /** * get the credential doc * * @return Credential StructuredDocument */ public StructuredDocument getCredentialDoc() { return myCredentialDoc; } /** * Sets the connection credential doc * If no credentials are set, the default group credential will be used * * @param doc Credential StructuredDocument */ public void setCredentialDoc(StructuredDocument doc) { this.myCredentialDoc = doc; } /** * {@inheritDoc} * <p/> * Closes the JxtaServerPipe. */ @Override protected void finalize() throws Throwable { try { if (!closed) { Logging.logCheckedWarning(LOG, "JxtaServerPipe is being finalized without being previously closed. This is likely a user's bug."); } close(); } finally { super.finalize(); } } }
/* * Copyright 2012 Shared Learning Collaborative, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.slc.sli.bulk.extract.lea; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import org.joda.time.DateTime; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.slc.sli.bulk.extract.extractor.EntityExtractor; import org.slc.sli.bulk.extract.files.ExtractFile; import org.slc.sli.bulk.extract.util.EdOrgExtractHelper; import org.slc.sli.common.constants.EntityNames; import org.slc.sli.common.constants.ParameterConstants; import org.slc.sli.common.util.datetime.DateHelper; import org.slc.sli.domain.Entity; import org.slc.sli.domain.NeutralQuery; import org.slc.sli.domain.Repository; public class StaffEdorgAssignmentExtractorTest { private StaffEdorgAssignmentExtractor extractor; @Mock private EntityExtractor mockExtractor; @Mock private ExtractFileMap mockMap; @Mock private Repository<Entity> mockRepo; @Mock private Entity mockEntity; @Mock private ExtractFile mockFile; @Mock private EdOrgExtractHelper mockEdOrgExtractHelper; @Mock private DateHelper mockDateHelper; private ExtractorHelper extractorHelper; private Map<String, Object> entityBody; private EntityToEdOrgCache edorgToLeaCache; @Before public void setUp() { MockitoAnnotations.initMocks(this); entityBody = new HashMap<String, Object>(); extractorHelper = new ExtractorHelper(mockEdOrgExtractHelper); extractorHelper.setDateHelper(mockDateHelper); Mockito.when(mockEntity.getBody()).thenReturn(entityBody); Mockito.when(mockEntity.getType()).thenReturn(EntityNames.STAFF_ED_ORG_ASSOCIATION); edorgToLeaCache = new EntityToEdOrgCache(); edorgToLeaCache.addEntry("LEA", "School1"); edorgToLeaCache.addEntry("LEA", "School2"); } @After public void tearDown() { } @Test public void testExtractOneEntity() { Mockito.when(mockRepo.findEach(Mockito.eq(EntityNames.STAFF_ED_ORG_ASSOCIATION), Mockito.eq(new NeutralQuery()))).thenAnswer(new Answer<Iterator<Entity>>() { @Override public Iterator<Entity> answer(InvocationOnMock invocation) throws Throwable { return Arrays.asList(mockEntity).listIterator(0); } }); Mockito.when(mockMap.getExtractFileForEdOrg("LEA")).thenReturn(mockFile); Map<String, List<String>> edOrgLineages = new HashMap<String, List<String>>(); edOrgLineages.put("School1", Arrays.asList("LEA", "School1")); edOrgLineages.put("LEA", Arrays.asList("LEA")); Mockito.when(mockEdOrgExtractHelper.getEdOrgLineages()).thenReturn(edOrgLineages); Mockito.when(mockDateHelper.getDate(Mockito.eq(entityBody), Mockito.anyString())).thenReturn(DateTime.parse("1941-12-07", DateHelper.getDateTimeFormat())); entityBody.put(ParameterConstants.STAFF_REFERENCE, "PHarbor"); entityBody.put(ParameterConstants.EDUCATION_ORGANIZATION_REFERENCE, "School1"); entityBody.put(ParameterConstants.BEGIN_DATE, "1941-12-07"); StaffEdorgAssignmentExtractor realExtractor = new StaffEdorgAssignmentExtractor(mockExtractor, mockMap, mockRepo, extractorHelper, mockEdOrgExtractHelper); extractor = Mockito.spy(realExtractor); Mockito.doReturn(true).when(extractor).shouldExtract(Mockito.eq(mockEntity), Mockito.any(DateTime.class)); extractor.extractEntities(edorgToLeaCache); Mockito.verify(mockExtractor).extractEntity(Mockito.eq(mockEntity), Mockito.eq(mockFile), Mockito.eq(EntityNames.STAFF_ED_ORG_ASSOCIATION)); } @Test public void testExtractManyEntity() { Mockito.when(mockRepo.findEach(Mockito.eq(EntityNames.STAFF_ED_ORG_ASSOCIATION), Mockito.eq(new NeutralQuery()))) .thenAnswer(new Answer<Iterator<Entity>>() { @Override public Iterator<Entity> answer(InvocationOnMock invocation) throws Throwable { return Arrays.asList(mockEntity, mockEntity).listIterator(0); } }); Mockito.when(mockMap.getExtractFileForEdOrg("LEA")).thenReturn(mockFile); Map<String, List<String>> edOrgLineages = new HashMap<String, List<String>>(); edOrgLineages.put("School1", Arrays.asList("LEA", "School1")); edOrgLineages.put("LEA", Arrays.asList("LEA")); Mockito.when(mockEdOrgExtractHelper.getEdOrgLineages()).thenReturn(edOrgLineages); Mockito.when(mockDateHelper.getDate(Mockito.eq(entityBody), Mockito.anyString())).thenReturn(DateTime.parse("1776-07-04", DateHelper.getDateTimeFormat())); entityBody.put(ParameterConstants.STAFF_REFERENCE, "TJefferson"); entityBody.put(ParameterConstants.EDUCATION_ORGANIZATION_REFERENCE, "School1"); entityBody.put(ParameterConstants.BEGIN_DATE, "1776-07-04"); StaffEdorgAssignmentExtractor realExtractor = new StaffEdorgAssignmentExtractor(mockExtractor, mockMap, mockRepo, extractorHelper, mockEdOrgExtractHelper); extractor = Mockito.spy(realExtractor); Mockito.doReturn(true).when(extractor).shouldExtract(Mockito.eq(mockEntity), Mockito.any(DateTime.class)); extractor.extractEntities(edorgToLeaCache); Mockito.verify(mockExtractor, Mockito.times(2)).extractEntity(Mockito.eq(mockEntity), Mockito.eq(mockFile), Mockito.eq(EntityNames.STAFF_ED_ORG_ASSOCIATION)); } @Test public void testExtractNoEntityBecauseOfExpiration() { Mockito.when(mockRepo.findEach(Mockito.eq(EntityNames.STAFF_ED_ORG_ASSOCIATION), Mockito.eq(new NeutralQuery()))) .thenReturn(Arrays.asList(mockEntity, mockEntity).iterator()); Mockito.when(mockMap.getExtractFileForEdOrg("LEA")).thenReturn(mockFile); Map<String, List<String>> edOrgLineages = new HashMap<String, List<String>>(); edOrgLineages.put("School1", Arrays.asList("LEA", "School1")); edOrgLineages.put("LEA", Arrays.asList("LEA")); Mockito.when(mockEdOrgExtractHelper.getEdOrgLineages()).thenReturn(edOrgLineages); Mockito.when(mockDateHelper.getDate(Mockito.eq(entityBody), Mockito.anyString())).thenReturn(DateTime.parse("1927-02-14", DateHelper.getDateTimeFormat())); entityBody.put(ParameterConstants.STAFF_REFERENCE, "ACapone"); entityBody.put(ParameterConstants.EDUCATION_ORGANIZATION_REFERENCE, "School1"); entityBody.put(ParameterConstants.BEGIN_DATE, "1927-02-14"); StaffEdorgAssignmentExtractor realExtractor = new StaffEdorgAssignmentExtractor(mockExtractor, mockMap, mockRepo, extractorHelper, mockEdOrgExtractHelper); extractor = Mockito.spy(realExtractor); Mockito.doReturn(false).when(extractor).shouldExtract(Mockito.eq(mockEntity), Mockito.any(DateTime.class)); extractor.extractEntities(edorgToLeaCache); Mockito.verify(mockExtractor, Mockito.never()).extractEntity(Mockito.eq(mockEntity), Mockito.eq(mockFile), Mockito.eq(EntityNames.STAFF_ED_ORG_ASSOCIATION)); } @Test public void testExtractNoEntityBecauseOfLEAMiss() { Mockito.when(mockRepo.findEach(Mockito.eq(EntityNames.STAFF_ED_ORG_ASSOCIATION), Mockito.eq(new NeutralQuery()))) .thenReturn(Arrays.asList(mockEntity, mockEntity).iterator()); Mockito.when(mockMap.getExtractFileForEdOrg("LEA")).thenReturn(null); Map<String, List<String>> edOrgLineages = new HashMap<String, List<String>>(); edOrgLineages.put("School1", Arrays.asList("LEA", "School1")); edOrgLineages.put("LEA", Arrays.asList("LEA")); Mockito.when(mockEdOrgExtractHelper.getEdOrgLineages()).thenReturn(edOrgLineages); Mockito.when(mockDateHelper.getDate(Mockito.eq(entityBody), Mockito.anyString())).thenReturn(DateTime.parse("1847-02-11", DateHelper.getDateTimeFormat())); entityBody.put(ParameterConstants.STAFF_REFERENCE, "TEdison"); entityBody.put(ParameterConstants.EDUCATION_ORGANIZATION_REFERENCE, "School1"); entityBody.put(ParameterConstants.BEGIN_DATE, "1847-02-11"); StaffEdorgAssignmentExtractor realExtractor = new StaffEdorgAssignmentExtractor(mockExtractor, mockMap, mockRepo, extractorHelper, mockEdOrgExtractHelper); extractor = Mockito.spy(realExtractor); Mockito.doReturn(true).when(extractor).shouldExtract(Mockito.eq(mockEntity), Mockito.any(DateTime.class)); extractor.extractEntities(edorgToLeaCache); Mockito.verify(mockExtractor, Mockito.never()).extractEntity(Mockito.eq(mockEntity), Mockito.eq(mockFile), Mockito.eq(EntityNames.STAFF_ED_ORG_ASSOCIATION)); } }
package org.apache.lucene.document; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.io.Reader; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.NumericTokenStream; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.document.FieldType.NumericType; import org.apache.lucene.index.IndexWriter; // javadocs import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableFieldType; import org.apache.lucene.util.BytesRef; import org.apache.lucene.index.FieldInvertState; // javadocs /** * Expert: directly create a field for a document. Most * users should use one of the sugar subclasses: {@link * IntField}, {@link LongField}, {@link FloatField}, {@link * DoubleField}, {@link BinaryDocValuesField}, {@link * NumericDocValuesField}, {@link SortedDocValuesField}, {@link * StringField}, {@link TextField}, {@link StoredField}. * * <p/> A field is a section of a Document. Each field has three * parts: name, type and value. Values may be text * (String, Reader or pre-analyzed TokenStream), binary * (byte[]), or numeric (a Number). Fields are optionally stored in the * index, so that they may be returned with hits on the document. * * <p/> * NOTE: the field type is an {@link IndexableFieldType}. Making changes * to the state of the IndexableFieldType will impact any * Field it is used in. It is strongly recommended that no * changes be made after Field instantiation. */ public class Field implements IndexableField { /** * Field's type */ protected final FieldType type; /** * Field's name */ protected final String name; /** Field's value */ protected Object fieldsData; /** Pre-analyzed tokenStream for indexed fields; this is * separate from fieldsData because you are allowed to * have both; eg maybe field has a String value but you * customize how it's tokenized */ protected TokenStream tokenStream; /** * Field's boost * @see #boost() */ protected float boost = 1.0f; /** * Expert: creates a field with no initial value. * Intended only for custom Field subclasses. * @param name field name * @param type field type * @throws IllegalArgumentException if either the name or type * is null. */ protected Field(String name, FieldType type) { if (name == null) { throw new IllegalArgumentException("name cannot be null"); } this.name = name; if (type == null) { throw new IllegalArgumentException("type cannot be null"); } this.type = type; } /** * Create field with Reader value. * @param name field name * @param reader reader value * @param type field type * @throws IllegalArgumentException if either the name or type * is null, or if the field's type is stored(), or * if tokenized() is false. * @throws NullPointerException if the reader is null */ public Field(String name, Reader reader, FieldType type) { if (name == null) { throw new IllegalArgumentException("name cannot be null"); } if (type == null) { throw new IllegalArgumentException("type cannot be null"); } if (reader == null) { throw new NullPointerException("reader cannot be null"); } if (type.stored()) { throw new IllegalArgumentException("fields with a Reader value cannot be stored"); } if (type.indexed() && !type.tokenized()) { throw new IllegalArgumentException("non-tokenized fields must use String values"); } this.name = name; this.fieldsData = reader; this.type = type; } /** * Create field with TokenStream value. * @param name field name * @param tokenStream TokenStream value * @param type field type * @throws IllegalArgumentException if either the name or type * is null, or if the field's type is stored(), or * if tokenized() is false, or if indexed() is false. * @throws NullPointerException if the tokenStream is null */ public Field(String name, TokenStream tokenStream, FieldType type) { if (name == null) { throw new IllegalArgumentException("name cannot be null"); } if (tokenStream == null) { throw new NullPointerException("tokenStream cannot be null"); } if (!type.indexed() || !type.tokenized()) { throw new IllegalArgumentException("TokenStream fields must be indexed and tokenized"); } if (type.stored()) { throw new IllegalArgumentException("TokenStream fields cannot be stored"); } this.name = name; this.fieldsData = null; this.tokenStream = tokenStream; this.type = type; } /** * Create field with binary value. * * <p>NOTE: the provided byte[] is not copied so be sure * not to change it until you're done with this field. * @param name field name * @param value byte array pointing to binary content (not copied) * @param type field type * @throws IllegalArgumentException if the field name is null, * or the field's type is indexed() * @throws NullPointerException if the type is null */ public Field(String name, byte[] value, FieldType type) { this(name, value, 0, value.length, type); } /** * Create field with binary value. * * <p>NOTE: the provided byte[] is not copied so be sure * not to change it until you're done with this field. * @param name field name * @param value byte array pointing to binary content (not copied) * @param offset starting position of the byte array * @param length valid length of the byte array * @param type field type * @throws IllegalArgumentException if the field name is null, * or the field's type is indexed() * @throws NullPointerException if the type is null */ public Field(String name, byte[] value, int offset, int length, FieldType type) { this(name, new BytesRef(value, offset, length), type); } /** * Create field with binary value. * * <p>NOTE: the provided BytesRef is not copied so be sure * not to change it until you're done with this field. * @param name field name * @param bytes BytesRef pointing to binary content (not copied) * @param type field type * @throws IllegalArgumentException if the field name is null, * or the field's type is indexed() * @throws NullPointerException if the type is null */ public Field(String name, BytesRef bytes, FieldType type) { if (name == null) { throw new IllegalArgumentException("name cannot be null"); } if (bytes == null) { throw new IllegalArgumentException("bytes cannot be null"); } if (type.indexed()) { throw new IllegalArgumentException("Fields with BytesRef values cannot be indexed"); } this.fieldsData = bytes; this.type = type; this.name = name; } // TODO: allow direct construction of int, long, float, double value too..? /** * Create field with String value. * @param name field name * @param value string value * @param type field type * @throws IllegalArgumentException if either the name or value * is null, or if the field's type is neither indexed() nor stored(), * or if indexed() is false but storeTermVectors() is true. * @throws NullPointerException if the type is null */ public Field(String name, String value, FieldType type) { if (name == null) { throw new IllegalArgumentException("name cannot be null"); } if (value == null) { throw new IllegalArgumentException("value cannot be null"); } if (!type.stored() && !type.indexed()) { throw new IllegalArgumentException("it doesn't make sense to have a field that " + "is neither indexed nor stored"); } this.type = type; this.name = name; this.fieldsData = value; } /** * The value of the field as a String, or null. If null, the Reader value or * binary value is used. Exactly one of stringValue(), readerValue(), and * getBinaryValue() must be set. */ @Override public String stringValue() { if (fieldsData instanceof String || fieldsData instanceof Number) { return fieldsData.toString(); } else { return null; } } /** * The value of the field as a Reader, or null. If null, the String value or * binary value is used. Exactly one of stringValue(), readerValue(), and * getBinaryValue() must be set. */ @Override public Reader readerValue() { return fieldsData instanceof Reader ? (Reader) fieldsData : null; } /** * The TokenStream for this field to be used when indexing, or null. If null, * the Reader value or String value is analyzed to produce the indexed tokens. */ public TokenStream tokenStreamValue() { return tokenStream; } /** * <p> * Expert: change the value of this field. This can be used during indexing to * re-use a single Field instance to improve indexing speed by avoiding GC * cost of new'ing and reclaiming Field instances. Typically a single * {@link Document} instance is re-used as well. This helps most on small * documents. * </p> * * <p> * Each Field instance should only be used once within a single * {@link Document} instance. See <a * href="http://wiki.apache.org/lucene-java/ImproveIndexingSpeed" * >ImproveIndexingSpeed</a> for details. * </p> */ public void setStringValue(String value) { if (!(fieldsData instanceof String)) { throw new IllegalArgumentException("cannot change value type from " + fieldsData.getClass().getSimpleName() + " to String"); } if (value == null) { throw new IllegalArgumentException("value cannot be null"); } fieldsData = value; } /** * Expert: change the value of this field. See * {@link #setStringValue(String)}. */ public void setReaderValue(Reader value) { if (!(fieldsData instanceof Reader)) { throw new IllegalArgumentException("cannot change value type from " + fieldsData.getClass().getSimpleName() + " to Reader"); } fieldsData = value; } /** * Expert: change the value of this field. See * {@link #setStringValue(String)}. */ public void setBytesValue(byte[] value) { setBytesValue(new BytesRef(value)); } /** * Expert: change the value of this field. See * {@link #setStringValue(String)}. * * <p>NOTE: the provided BytesRef is not copied so be sure * not to change it until you're done with this field. */ public void setBytesValue(BytesRef value) { if (!(fieldsData instanceof BytesRef)) { throw new IllegalArgumentException("cannot change value type from " + fieldsData.getClass().getSimpleName() + " to BytesRef"); } if (type.indexed()) { throw new IllegalArgumentException("cannot set a BytesRef value on an indexed field"); } if (value == null) { throw new IllegalArgumentException("value cannot be null"); } fieldsData = value; } /** * Expert: change the value of this field. See * {@link #setStringValue(String)}. */ public void setByteValue(byte value) { if (!(fieldsData instanceof Byte)) { throw new IllegalArgumentException("cannot change value type from " + fieldsData.getClass().getSimpleName() + " to Byte"); } fieldsData = Byte.valueOf(value); } /** * Expert: change the value of this field. See * {@link #setStringValue(String)}. */ public void setShortValue(short value) { if (!(fieldsData instanceof Short)) { throw new IllegalArgumentException("cannot change value type from " + fieldsData.getClass().getSimpleName() + " to Short"); } fieldsData = Short.valueOf(value); } /** * Expert: change the value of this field. See * {@link #setStringValue(String)}. */ public void setIntValue(int value) { if (!(fieldsData instanceof Integer)) { throw new IllegalArgumentException("cannot change value type from " + fieldsData.getClass().getSimpleName() + " to Integer"); } fieldsData = Integer.valueOf(value); } /** * Expert: change the value of this field. See * {@link #setStringValue(String)}. */ public void setLongValue(long value) { if (!(fieldsData instanceof Long)) { throw new IllegalArgumentException("cannot change value type from " + fieldsData.getClass().getSimpleName() + " to Long"); } fieldsData = Long.valueOf(value); } /** * Expert: change the value of this field. See * {@link #setStringValue(String)}. */ public void setFloatValue(float value) { if (!(fieldsData instanceof Float)) { throw new IllegalArgumentException("cannot change value type from " + fieldsData.getClass().getSimpleName() + " to Float"); } fieldsData = Float.valueOf(value); } /** * Expert: change the value of this field. See * {@link #setStringValue(String)}. */ public void setDoubleValue(double value) { if (!(fieldsData instanceof Double)) { throw new IllegalArgumentException("cannot change value type from " + fieldsData.getClass().getSimpleName() + " to Double"); } fieldsData = Double.valueOf(value); } /** * Expert: sets the token stream to be used for indexing and causes * isIndexed() and isTokenized() to return true. May be combined with stored * values from stringValue() or getBinaryValue() */ public void setTokenStream(TokenStream tokenStream) { if (!type.indexed() || !type.tokenized()) { throw new IllegalArgumentException("TokenStream fields must be indexed and tokenized"); } if (type.numericType() != null) { throw new IllegalArgumentException("cannot set private TokenStream on numeric fields"); } this.tokenStream = tokenStream; } @Override public String name() { return name; } /** * {@inheritDoc} * <p> * The default value is <code>1.0f</code> (no boost). * @see #setBoost(float) */ @Override public float boost() { return boost; } /** * Sets the boost factor on this field. * @throws IllegalArgumentException if this field is not indexed, * or if it omits norms. * @see #boost() */ public void setBoost(float boost) { if (boost != 1.0f) { if (type.indexed() == false || type.omitNorms()) { throw new IllegalArgumentException("You cannot set an index-time boost on an unindexed field, or one that omits norms"); } } this.boost = boost; } @Override public Number numericValue() { if (fieldsData instanceof Number) { return (Number) fieldsData; } else { return null; } } @Override public BytesRef binaryValue() { if (fieldsData instanceof BytesRef) { return (BytesRef) fieldsData; } else { return null; } } /** Prints a Field for human consumption. */ @Override public String toString() { StringBuilder result = new StringBuilder(); result.append(type.toString()); result.append('<'); result.append(name); result.append(':'); if (fieldsData != null) { result.append(fieldsData); } result.append('>'); return result.toString(); } /** Returns the {@link FieldType} for this field. */ @Override public FieldType fieldType() { return type; } @Override public TokenStream tokenStream(Analyzer analyzer, TokenStream reuse) throws IOException { if (!fieldType().indexed()) { return null; } final NumericType numericType = fieldType().numericType(); if (numericType != null) { if (!(reuse instanceof NumericTokenStream && ((NumericTokenStream)reuse).getPrecisionStep() == type.numericPrecisionStep())) { // lazy init the TokenStream as it is heavy to instantiate // (attributes,...) if not needed (stored field loading) reuse = new NumericTokenStream(type.numericPrecisionStep()); } final NumericTokenStream nts = (NumericTokenStream) reuse; // initialize value in TokenStream final Number val = (Number) fieldsData; switch (numericType) { case INT: nts.setIntValue(val.intValue()); break; case LONG: nts.setLongValue(val.longValue()); break; case FLOAT: nts.setFloatValue(val.floatValue()); break; case DOUBLE: nts.setDoubleValue(val.doubleValue()); break; default: throw new AssertionError("Should never get here"); } return reuse; } if (!fieldType().tokenized()) { if (stringValue() == null) { throw new IllegalArgumentException("Non-Tokenized Fields must have a String value"); } if (!(reuse instanceof StringTokenStream)) { // lazy init the TokenStream as it is heavy to instantiate // (attributes,...) if not needed (stored field loading) reuse = new StringTokenStream(); } ((StringTokenStream) reuse).setValue(stringValue()); return reuse; } if (tokenStream != null) { return tokenStream; } else if (readerValue() != null) { return analyzer.tokenStream(name(), readerValue()); } else if (stringValue() != null) { return analyzer.tokenStream(name(), stringValue()); } throw new IllegalArgumentException("Field must have either TokenStream, String, Reader or Number value; got " + this); } static final class StringTokenStream extends TokenStream { private final CharTermAttribute termAttribute = addAttribute(CharTermAttribute.class); private final OffsetAttribute offsetAttribute = addAttribute(OffsetAttribute.class); private boolean used = false; private String value = null; /** Creates a new TokenStream that returns a String as single token. * <p>Warning: Does not initialize the value, you must call * {@link #setValue(String)} afterwards! */ StringTokenStream() { } /** Sets the string value. */ void setValue(String value) { this.value = value; } @Override public boolean incrementToken() { if (used) { return false; } clearAttributes(); termAttribute.append(value); offsetAttribute.setOffset(0, value.length()); used = true; return true; } @Override public void end() throws IOException { super.end(); final int finalOffset = value.length(); offsetAttribute.setOffset(finalOffset, finalOffset); } @Override public void reset() { used = false; } @Override public void close() { value = null; } } /** Specifies whether and how a field should be stored. */ public static enum Store { /** Store the original field value in the index. This is useful for short texts * like a document's title which should be displayed with the results. The * value is stored in its original form, i.e. no analyzer is used before it is * stored. */ YES, /** Do not store the field's value in the index. */ NO } // // Deprecated transition API below: // /** Specifies whether and how a field should be indexed. * * @deprecated This is here only to ease transition from * the pre-4.0 APIs. */ @Deprecated public static enum Index { /** Do not index the field value. This field can thus not be searched, * but one can still access its contents provided it is * {@link Field.Store stored}. */ NO { @Override public boolean isIndexed() { return false; } @Override public boolean isAnalyzed() { return false; } @Override public boolean omitNorms() { return true; } }, /** Index the tokens produced by running the field's * value through an Analyzer. This is useful for * common text. */ ANALYZED { @Override public boolean isIndexed() { return true; } @Override public boolean isAnalyzed() { return true; } @Override public boolean omitNorms() { return false; } }, /** Index the field's value without using an Analyzer, so it can be searched. * As no analyzer is used the value will be stored as a single term. This is * useful for unique Ids like product numbers. */ NOT_ANALYZED { @Override public boolean isIndexed() { return true; } @Override public boolean isAnalyzed() { return false; } @Override public boolean omitNorms() { return false; } }, /** Expert: Index the field's value without an Analyzer, * and also disable the indexing of norms. Note that you * can also separately enable/disable norms by calling * {@link FieldType#setOmitNorms}. No norms means that * index-time field and document boosting and field * length normalization are disabled. The benefit is * less memory usage as norms take up one byte of RAM * per indexed field for every document in the index, * during searching. Note that once you index a given * field <i>with</i> norms enabled, disabling norms will * have no effect. In other words, for this to have the * above described effect on a field, all instances of * that field must be indexed with NOT_ANALYZED_NO_NORMS * from the beginning. */ NOT_ANALYZED_NO_NORMS { @Override public boolean isIndexed() { return true; } @Override public boolean isAnalyzed() { return false; } @Override public boolean omitNorms() { return true; } }, /** Expert: Index the tokens produced by running the * field's value through an Analyzer, and also * separately disable the storing of norms. See * {@link #NOT_ANALYZED_NO_NORMS} for what norms are * and why you may want to disable them. */ ANALYZED_NO_NORMS { @Override public boolean isIndexed() { return true; } @Override public boolean isAnalyzed() { return true; } @Override public boolean omitNorms() { return true; } }; /** Get the best representation of the index given the flags. */ public static Index toIndex(boolean indexed, boolean analyzed) { return toIndex(indexed, analyzed, false); } /** Expert: Get the best representation of the index given the flags. */ public static Index toIndex(boolean indexed, boolean analyzed, boolean omitNorms) { // If it is not indexed nothing else matters if (!indexed) { return Index.NO; } // typical, non-expert if (!omitNorms) { if (analyzed) { return Index.ANALYZED; } return Index.NOT_ANALYZED; } // Expert: Norms omitted if (analyzed) { return Index.ANALYZED_NO_NORMS; } return Index.NOT_ANALYZED_NO_NORMS; } public abstract boolean isIndexed(); public abstract boolean isAnalyzed(); public abstract boolean omitNorms(); } /** Specifies whether and how a field should have term vectors. * * @deprecated This is here only to ease transition from * the pre-4.0 APIs. */ @Deprecated public static enum TermVector { /** Do not store term vectors. */ NO { @Override public boolean isStored() { return false; } @Override public boolean withPositions() { return false; } @Override public boolean withOffsets() { return false; } }, /** Store the term vectors of each document. A term vector is a list * of the document's terms and their number of occurrences in that document. */ YES { @Override public boolean isStored() { return true; } @Override public boolean withPositions() { return false; } @Override public boolean withOffsets() { return false; } }, /** * Store the term vector + token position information * * @see #YES */ WITH_POSITIONS { @Override public boolean isStored() { return true; } @Override public boolean withPositions() { return true; } @Override public boolean withOffsets() { return false; } }, /** * Store the term vector + Token offset information * * @see #YES */ WITH_OFFSETS { @Override public boolean isStored() { return true; } @Override public boolean withPositions() { return false; } @Override public boolean withOffsets() { return true; } }, /** * Store the term vector + Token position and offset information * * @see #YES * @see #WITH_POSITIONS * @see #WITH_OFFSETS */ WITH_POSITIONS_OFFSETS { @Override public boolean isStored() { return true; } @Override public boolean withPositions() { return true; } @Override public boolean withOffsets() { return true; } }; /** Get the best representation of a TermVector given the flags. */ public static TermVector toTermVector(boolean stored, boolean withOffsets, boolean withPositions) { // If it is not stored, nothing else matters. if (!stored) { return TermVector.NO; } if (withOffsets) { if (withPositions) { return Field.TermVector.WITH_POSITIONS_OFFSETS; } return Field.TermVector.WITH_OFFSETS; } if (withPositions) { return Field.TermVector.WITH_POSITIONS; } return Field.TermVector.YES; } public abstract boolean isStored(); public abstract boolean withPositions(); public abstract boolean withOffsets(); } /** Translates the pre-4.0 enums for specifying how a * field should be indexed into the 4.0 {@link FieldType} * approach. * * @deprecated This is here only to ease transition from * the pre-4.0 APIs. */ @Deprecated public static final FieldType translateFieldType(Store store, Index index, TermVector termVector) { final FieldType ft = new FieldType(); ft.setStored(store == Store.YES); switch(index) { case ANALYZED: ft.setIndexed(true); ft.setTokenized(true); break; case ANALYZED_NO_NORMS: ft.setIndexed(true); ft.setTokenized(true); ft.setOmitNorms(true); break; case NOT_ANALYZED: ft.setIndexed(true); ft.setTokenized(false); break; case NOT_ANALYZED_NO_NORMS: ft.setIndexed(true); ft.setTokenized(false); ft.setOmitNorms(true); break; case NO: break; } switch(termVector) { case NO: break; case YES: ft.setStoreTermVectors(true); break; case WITH_POSITIONS: ft.setStoreTermVectors(true); ft.setStoreTermVectorPositions(true); break; case WITH_OFFSETS: ft.setStoreTermVectors(true); ft.setStoreTermVectorOffsets(true); break; case WITH_POSITIONS_OFFSETS: ft.setStoreTermVectors(true); ft.setStoreTermVectorPositions(true); ft.setStoreTermVectorOffsets(true); break; } ft.freeze(); return ft; } /** * Create a field by specifying its name, value and how it will * be saved in the index. Term vectors will not be stored in the index. * * @param name The name of the field * @param value The string to process * @param store Whether <code>value</code> should be stored in the index * @param index Whether the field should be indexed, and if so, if it should * be tokenized before indexing * @throws NullPointerException if name or value is <code>null</code> * @throws IllegalArgumentException if the field is neither stored nor indexed * * @deprecated Use {@link StringField}, {@link TextField} instead. */ @Deprecated public Field(String name, String value, Store store, Index index) { this(name, value, translateFieldType(store, index, TermVector.NO)); } /** * Create a field by specifying its name, value and how it will * be saved in the index. * * @param name The name of the field * @param value The string to process * @param store Whether <code>value</code> should be stored in the index * @param index Whether the field should be indexed, and if so, if it should * be tokenized before indexing * @param termVector Whether term vector should be stored * @throws NullPointerException if name or value is <code>null</code> * @throws IllegalArgumentException in any of the following situations: * <ul> * <li>the field is neither stored nor indexed</li> * <li>the field is not indexed but termVector is <code>TermVector.YES</code></li> * </ul> * * @deprecated Use {@link StringField}, {@link TextField} instead. */ @Deprecated public Field(String name, String value, Store store, Index index, TermVector termVector) { this(name, value, translateFieldType(store, index, termVector)); } /** * Create a tokenized and indexed field that is not stored. Term vectors will * not be stored. The Reader is read only when the Document is added to the index, * i.e. you may not close the Reader until {@link IndexWriter#addDocument} * has been called. * * @param name The name of the field * @param reader The reader with the content * @throws NullPointerException if name or reader is <code>null</code> * * @deprecated Use {@link TextField} instead. */ @Deprecated public Field(String name, Reader reader) { this(name, reader, TermVector.NO); } /** * Create a tokenized and indexed field that is not stored, optionally with * storing term vectors. The Reader is read only when the Document is added to the index, * i.e. you may not close the Reader until {@link IndexWriter#addDocument} * has been called. * * @param name The name of the field * @param reader The reader with the content * @param termVector Whether term vector should be stored * @throws NullPointerException if name or reader is <code>null</code> * * @deprecated Use {@link TextField} instead. */ @Deprecated public Field(String name, Reader reader, TermVector termVector) { this(name, reader, translateFieldType(Store.NO, Index.ANALYZED, termVector)); } /** * Create a tokenized and indexed field that is not stored. Term vectors will * not be stored. This is useful for pre-analyzed fields. * The TokenStream is read only when the Document is added to the index, * i.e. you may not close the TokenStream until {@link IndexWriter#addDocument} * has been called. * * @param name The name of the field * @param tokenStream The TokenStream with the content * @throws NullPointerException if name or tokenStream is <code>null</code> * * @deprecated Use {@link TextField} instead */ @Deprecated public Field(String name, TokenStream tokenStream) { this(name, tokenStream, TermVector.NO); } /** * Create a tokenized and indexed field that is not stored, optionally with * storing term vectors. This is useful for pre-analyzed fields. * The TokenStream is read only when the Document is added to the index, * i.e. you may not close the TokenStream until {@link IndexWriter#addDocument} * has been called. * * @param name The name of the field * @param tokenStream The TokenStream with the content * @param termVector Whether term vector should be stored * @throws NullPointerException if name or tokenStream is <code>null</code> * * @deprecated Use {@link TextField} instead */ @Deprecated public Field(String name, TokenStream tokenStream, TermVector termVector) { this(name, tokenStream, translateFieldType(Store.NO, Index.ANALYZED, termVector)); } /** * Create a stored field with binary value. Optionally the value may be compressed. * * @param name The name of the field * @param value The binary value * * @deprecated Use {@link StoredField} instead. */ @Deprecated public Field(String name, byte[] value) { this(name, value, translateFieldType(Store.YES, Index.NO, TermVector.NO)); } /** * Create a stored field with binary value. Optionally the value may be compressed. * * @param name The name of the field * @param value The binary value * @param offset Starting offset in value where this Field's bytes are * @param length Number of bytes to use for this Field, starting at offset * * @deprecated Use {@link StoredField} instead. */ @Deprecated public Field(String name, byte[] value, int offset, int length) { this(name, value, offset, length, translateFieldType(Store.YES, Index.NO, TermVector.NO)); } }
/* Copyright (c) 2012-2013 Boundless and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Distribution License v1.0 * which accompanies this distribution, and is available at * https://www.eclipse.org/org/documents/edl-v10.html * * Contributors: * Gabriel Roldan (Boundless) - initial implementation */ package org.locationtech.geogig.api; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import java.util.List; import javax.annotation.Nullable; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.vividsolutions.jts.geom.Envelope; /** * The basic leaf element of a revision tree. */ public class NodeRef implements Bounded, Comparable<NodeRef> { public static final String ROOT = ""; /** * The character '/' used to separate paths (e.g. {@code path/to/node}) */ public static final char PATH_SEPARATOR = '/'; /** * Full path from the root tree to the object this ref points to */ private String parentPath; /** * The {@code Node} this object points to */ private Node node; /** * possibly {@link ObjectId#NULL NULL} id for the object describing the object this ref points * to */ private ObjectId metadataId; /** * Constructs a new {@code Node} objects from a {@code Node} object without metadataId. It * assumes that the passed {@code Node} does not have a metadataId value, and will not use it, * even it it is present. * * @param node a Node representing the element this Node points to * @param parentPath the path of the parent tree, may be an empty string * @param metadataId the metadataId of the element */ public NodeRef(Node node, String parentPath, ObjectId metadataId) { Preconditions.checkNotNull(node, "node is null"); Preconditions.checkNotNull(parentPath, "parentPath is null, did you mean an empty string?"); Preconditions.checkNotNull(metadataId, "metadataId is null, did you mean ObjectId.NULL?"); this.node = node; this.parentPath = parentPath; this.metadataId = metadataId; } /** * Returns the parent path of the object this ref points to * * @return */ public String getParentPath() { return parentPath; } /** * Returns the {@code Node} this object points to * * @return the {@code Node} this object points to */ public Node getNode() { return node; } /** * Returns the full path from the root tree to the object this ref points to * <p> * This is a derived property, shortcut for * <code>{@link #getParentPath()} + "/" + getNode().getName() </code> */ public String path() { return NodeRef.appendChild(parentPath, node.getName()); } /** * @return the simple name of the {@link Node} this noderef points to */ public String name() { return node.getName(); } /** * The id of the object this edge points to */ public ObjectId objectId() { return node.getObjectId(); } /** * The node's metadata id, which can be given by the {@link Node#getMetadataId() node itself} or * the metadata id given to this {@link NodeRef} constructor if the {@code Node} does not have a * metadata id set, so that Nodes can inherit the metadata id from its parent tree. * * @return the node's metadata id if provided by {@link Node#getMetadataId()} or this node ref * metadata id otherwise. */ public ObjectId getMetadataId() { if (node.getMetadataId().isPresent() && !node.getMetadataId().get().isNull()) { return node.getMetadataId().get(); } else { return this.metadataId; } } /** * type of object this ref points to */ public RevObject.TYPE getType() { return node.getType(); } /** * Tests equality over another {@code NodeRef} based on {@link #getParentPath() parent path}, * {@link #getNode() node} name and id, and {@link #getMetadataId()} */ @Override public boolean equals(Object o) { if (!(o instanceof NodeRef)) { return false; } NodeRef r = (NodeRef) o; return parentPath.equals(r.parentPath) && node.equals(r.node) && getMetadataId().equals(r.getMetadataId()); } /** * Hash code is based on {@link #getParentPath() parent path}, {@link #getNode() node} name and * id, and {@link #getMetadataId()} */ @Override public int hashCode() { return 17 ^ parentPath.hashCode() * node.getObjectId().hashCode() * getMetadataId().hashCode(); } /** * Provides for natural ordering of {@code NodeRef}, based on {@link #path()} */ @Override public int compareTo(NodeRef o) { int c = parentPath.compareTo(o.getParentPath()); if (c == 0) { return node.compareTo(o.getNode()); } return c; } /** * @return the Node represented as a readable string. */ @Override public String toString() { return new StringBuilder("NodeRef").append('[').append(path()).append(" -> ") .append(node.getObjectId()).append(']').toString(); } /** * Returns the parent path of {@code fullPath}. * <p> * Given {@code fullPath == "path/to/node"} returns {@code "path/to"}, given {@code "node"} * returns {@code ""}, given {@code null} returns {@code null} * * @param fullPath the full path to extract the parent path from * @return non null parent path, empty string if {@code fullPath} has no children (i.e. no * {@link #PATH_SEPARATOR}). */ public static @Nullable String parentPath(@Nullable String fullPath) { if (fullPath == null || fullPath.isEmpty()) { return null; } int idx = fullPath.lastIndexOf(PATH_SEPARATOR); if (idx == -1) { return ROOT; } return fullPath.substring(0, idx); } /** * Determines if the input path is valid. * * @param path * @throws IllegalArgumentException */ public static void checkValidPath(final String path) { if (path == null) { throw new IllegalArgumentException("null path"); } if (path.isEmpty()) { throw new IllegalArgumentException("empty path"); } if (path.charAt(path.length() - 1) == PATH_SEPARATOR) { throw new IllegalArgumentException("path cannot end with path separator: " + path); } } /** * Returns the node of {@code fullPath}. * <p> * Given {@code fullPath == "path/to/node"} returns {@code "node" }, given {@code "node"} * returns {@code "node"}, given {@code null} returns {@code null} * * @param fullPath the full path to extract the node from * @return non null node, original string if {@code fullPath} has no path (i.e. no * {@link #PATH_SEPARATOR}). */ public static @Nullable String nodeFromPath(@Nullable String fullPath) { if (fullPath == null || fullPath.isEmpty()) { return null; } int idx = fullPath.lastIndexOf(PATH_SEPARATOR); if (idx == -1) { return fullPath; } return fullPath.substring(idx + 1, fullPath.length()); } /** * Determines if the given node path is a direct child of the parent path. * * @param parentPath * @param nodePath * @return true of {@code nodePath} is a direct child of {@code parentPath}, {@code false} if * unrelated, sibling, same path, or nested child */ public static boolean isDirectChild(String parentPath, String nodePath) { checkNotNull(parentPath, "parentPath"); checkNotNull(nodePath, "nodePath"); int idx = nodePath.lastIndexOf(PATH_SEPARATOR); if (parentPath.isEmpty()) { return !nodePath.isEmpty() && idx == -1; } return idx == parentPath.length() && nodePath.substring(0, idx).equals(parentPath); } /** * Determines if the given node path is a child of the given parent path. * * @param parentPath * @param nodePath * @return true of {@code nodePath} is a child of {@code parentPath} at any depth level, * {@code false} if unrelated, sibling, or same path */ public static boolean isChild(String parentPath, String nodePath) { checkNotNull(parentPath, "parentPath"); checkNotNull(nodePath, "nodePath"); return nodePath.length() > parentPath.length() && (parentPath.isEmpty() || nodePath.charAt(parentPath.length()) == PATH_SEPARATOR) && nodePath.startsWith(parentPath); } /** * Given {@code path == "path/to/node"} returns {@code ["path", "path/to", "path/to/node"]} * * @param path the path to analyze * @return a sorted list of all paths that lead to the given path */ public static List<String> allPathsTo(final String path) { checkNotNull(path); checkArgument(!path.isEmpty()); StringBuilder sb = new StringBuilder(); List<String> paths = Lists.newArrayList(); final String[] steps = path.split("" + PATH_SEPARATOR); int i = 0; do { sb.append(steps[i]); paths.add(sb.toString()); sb.append(PATH_SEPARATOR); i++; } while (i < steps.length); return paths; } /** * Splits the given tree {@code path} into its node name components * * @param path non null, possibly empty path * @return a list of path steps, or an empty list if the path is empty */ public static ImmutableList<String> split(final String path) { checkNotNull(path); if (path.isEmpty()) { return ImmutableList.of(); } final String[] steps = path.split("" + PATH_SEPARATOR); return ImmutableList.copyOf(steps); } /** * Constructs a new path by appending a child name to an existing parent path. * * @param parentTreePath full parent path * @param childName name to append * * @return a new full path made by appending {@code childName} to {@code parentTreePath} */ public static String appendChild(String parentTreePath, String childName) { checkNotNull(parentTreePath); checkNotNull(childName); return ROOT.equals(parentTreePath) ? childName : new StringBuilder(parentTreePath) .append(PATH_SEPARATOR).append(childName).toString(); } @Override public boolean intersects(Envelope env) { return node.intersects(env); } @Override public void expand(Envelope env) { node.expand(env); } /** * @return the depth of the given path, being zero if the path is the root path (i.e. the empty * string) or > 0 depending on how many steps compose the path */ public static int depth(String path) { return split(path).size(); } public static String removeParent(final String parentPath, final String childPath) { checkArgument(isChild(parentPath, childPath)); ImmutableList<String> parent = split(parentPath); ImmutableList<String> child = split(childPath); child = child.subList(parent.size(), child.size()); String strippedChildPath = child.get(0); for (int i = 1; i < child.size(); i++) { appendChild(strippedChildPath, child.get(i)); } return strippedChildPath; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.planner.physical.visitor; import org.apache.drill.exec.planner.physical.ExchangePrel; import org.apache.drill.exec.planner.physical.JoinPrel; import org.apache.drill.exec.planner.physical.Prel; import org.apache.drill.exec.planner.physical.ProjectPrel; import org.apache.drill.exec.planner.physical.ScanPrel; import org.apache.drill.exec.planner.physical.ScreenPrel; import org.apache.drill.exec.planner.physical.WriterPrel; import org.apache.drill.exec.planner.physical.UnnestPrel; import org.apache.drill.exec.planner.physical.CorrelatePrel; /** * Debug-time class that prints a PRel tree to the console for * inspection. Insert this into code during development to see * the state of the tree at various points of interest during * the planning process. * <p> * Use this by inserting lines into our prel transforms to see * what is happening. This is useful if you must understand the transforms, * or change them. For example: * <p> * In file: {@link DefaultSqlHandler#convertToPrel()}: * <pre><code> * PrelVisualizerVisitor.print("Before EER", phyRelNode); // Debug only * phyRelNode = ExcessiveExchangeIdentifier.removeExcessiveEchanges(phyRelNode, targetSliceSize); * PrelVisualizerVisitor.print("After EER", phyRelNode); // Debug only * <code></pre> */ public class PrelVisualizerVisitor implements PrelVisitor<Void, PrelVisualizerVisitor.VisualizationState, Exception> { public static class VisualizationState { public static String INDENT = " "; StringBuilder out = new StringBuilder(); int level; public void startNode(Prel prel) { indent(); out.append("{ "); out.append(prel.getClass().getSimpleName()); out.append("\n"); push(); } public void endNode() { pop(); indent(); out.append("}"); out.append("\n"); } private void indent() { for (int i = 0; i < level; i++) { out.append(INDENT); } } public void push() { level++; } public void pop() { level--; } public void endFields() { // TODO Auto-generated method stub } public void visitField(String label, boolean value) { visitField(label, Boolean.toString(value)); } private void visitField(String label, String value) { indent(); out.append(label) .append(" = ") .append(value) .append("\n"); } public void visitField(String label, Object[] values) { if (values == null) { visitField(label, "null"); return; } StringBuilder buf = new StringBuilder(); buf.append("["); boolean first = true; for (Object obj : values) { if (! first) { buf.append(", "); } first = false; if (obj == null) { buf.append("null"); } else { buf.append(obj.toString()); } } buf.append("]"); visitField(label, buf.toString()); } @Override public String toString() { return out.toString(); } } public static void print(String label, Prel prel) { System.out.println(label); System.out.println(visualize(prel)); } public static String visualize(Prel prel) { try { VisualizationState state = new VisualizationState(); prel.accept(new PrelVisualizerVisitor(), state); return state.toString(); } catch (Exception e) { e.printStackTrace(); return "** ERROR **"; } } @Override public Void visitExchange(ExchangePrel prel, VisualizationState value) throws Exception { visitBasePrel(prel, value); endNode(prel, value); return null; } private void visitBasePrel(Prel prel, VisualizationState value) { value.startNode(prel); value.visitField("encodings", prel.getSupportedEncodings()); value.visitField("needsReorder", prel.needsFinalColumnReordering()); } private void endNode(Prel prel, VisualizationState value) throws Exception { value.endFields(); visitChildren(prel, value); value.endNode(); } private void visitChildren(Prel prel, VisualizationState value) throws Exception { value.indent(); value.out.append("children = [\n"); value.push(); for (Prel child : prel) { child.accept(this, value); } value.pop(); value.indent(); value.out.append("]\n"); } @Override public Void visitScreen(ScreenPrel prel, VisualizationState value) throws Exception { visitBasePrel(prel, value); endNode(prel, value); return null; } @Override public Void visitWriter(WriterPrel prel, VisualizationState value) throws Exception { visitBasePrel(prel, value); endNode(prel, value); return null; } @Override public Void visitScan(ScanPrel prel, VisualizationState value) throws Exception { visitBasePrel(prel, value); endNode(prel, value); return null; } @Override public Void visitJoin(JoinPrel prel, VisualizationState value) throws Exception { visitBasePrel(prel, value); endNode(prel, value); return null; } @Override public Void visitProject(ProjectPrel prel, VisualizationState value) throws Exception { visitBasePrel(prel, value); endNode(prel, value); return null; } @Override public Void visitPrel(Prel prel, VisualizationState value) throws Exception { visitBasePrel(prel, value); endNode(prel, value); return null; } @Override public Void visitUnnest(UnnestPrel prel, VisualizationState value) throws Exception { visitPrel(prel, value); return null; } @Override public Void visitCorrelate(CorrelatePrel prel, VisualizationState value) throws Exception { visitPrel(prel, value); return null; } }
/* * Copyright 2015-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.rsocket.internal; import io.netty.util.ReferenceCounted; import io.rsocket.internal.jctools.queues.MpscUnboundedArrayQueue; import java.util.Objects; import java.util.Queue; import java.util.concurrent.CancellationException; import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import java.util.concurrent.atomic.AtomicLongFieldUpdater; import org.reactivestreams.Subscriber; import org.reactivestreams.Subscription; import reactor.core.CoreSubscriber; import reactor.core.Exceptions; import reactor.core.Fuseable; import reactor.core.publisher.FluxProcessor; import reactor.core.publisher.Operators; import reactor.util.annotation.Nullable; import reactor.util.concurrent.Queues; import reactor.util.context.Context; /** * A Processor implementation that takes a custom queue and allows only a single subscriber. * * <p>The implementation keeps the order of signals. * * @param <T> the input and output type */ public final class UnboundedProcessor<T> extends FluxProcessor<T, T> implements Fuseable.QueueSubscription<T>, Fuseable { final Queue<T> queue; final Queue<T> priorityQueue; volatile boolean done; Throwable error; // important to not loose the downstream too early and miss discard hook, while // having relevant hasDownstreams() boolean hasDownstream; volatile CoreSubscriber<? super T> actual; volatile boolean cancelled; volatile boolean terminated; volatile int once; @SuppressWarnings("rawtypes") static final AtomicIntegerFieldUpdater<UnboundedProcessor> ONCE = AtomicIntegerFieldUpdater.newUpdater(UnboundedProcessor.class, "once"); volatile int wip; @SuppressWarnings("rawtypes") static final AtomicIntegerFieldUpdater<UnboundedProcessor> WIP = AtomicIntegerFieldUpdater.newUpdater(UnboundedProcessor.class, "wip"); volatile int discardGuard; @SuppressWarnings("rawtypes") static final AtomicIntegerFieldUpdater<UnboundedProcessor> DISCARD_GUARD = AtomicIntegerFieldUpdater.newUpdater(UnboundedProcessor.class, "discardGuard"); volatile long requested; @SuppressWarnings("rawtypes") static final AtomicLongFieldUpdater<UnboundedProcessor> REQUESTED = AtomicLongFieldUpdater.newUpdater(UnboundedProcessor.class, "requested"); boolean outputFused; public UnboundedProcessor() { this.queue = new MpscUnboundedArrayQueue<>(Queues.SMALL_BUFFER_SIZE); this.priorityQueue = new MpscUnboundedArrayQueue<>(Queues.SMALL_BUFFER_SIZE); } @Override public int getBufferSize() { return Integer.MAX_VALUE; } @Override public Object scanUnsafe(Attr key) { if (Attr.BUFFERED == key) return queue.size(); if (Attr.PREFETCH == key) return Integer.MAX_VALUE; return super.scanUnsafe(key); } void drainRegular(Subscriber<? super T> a) { int missed = 1; final Queue<T> q = queue; final Queue<T> pq = priorityQueue; for (; ; ) { long r = requested; long e = 0L; while (r != e) { boolean d = done; T t = pq.poll(); boolean empty = t == null; if (empty) { t = q.poll(); empty = t == null; } if (checkTerminated(d, empty, a)) { if (!empty) { release(t); } return; } if (empty) { break; } a.onNext(t); e++; } if (r == e) { if (checkTerminated(done, q.isEmpty() && pq.isEmpty(), a)) { return; } } if (e != 0 && r != Long.MAX_VALUE) { REQUESTED.addAndGet(this, -e); } missed = WIP.addAndGet(this, -missed); if (missed == 0) { break; } } } void drainFused(Subscriber<? super T> a) { int missed = 1; for (; ; ) { if (cancelled) { if (terminated) { this.clear(); } hasDownstream = false; return; } boolean d = done; a.onNext(null); if (d) { hasDownstream = false; Throwable ex = error; if (ex != null) { a.onError(ex); } else { a.onComplete(); } return; } missed = WIP.addAndGet(this, -missed); if (missed == 0) { break; } } } public void drain() { final int previousWip = WIP.getAndIncrement(this); if (previousWip != 0) { if (previousWip < 0 || terminated) { this.clear(); } return; } int missed = 1; for (; ; ) { Subscriber<? super T> a = actual; if (a != null) { if (outputFused) { drainFused(a); } else { drainRegular(a); } return; } missed = WIP.addAndGet(this, -missed); if (missed == 0) { break; } } } boolean checkTerminated(boolean d, boolean empty, Subscriber<? super T> a) { if (cancelled) { this.clear(); hasDownstream = false; return true; } if (d && empty) { this.clear(); Throwable e = error; hasDownstream = false; if (e != null) { a.onError(e); } else { a.onComplete(); } return true; } return false; } @Override public void onSubscribe(Subscription s) { if (done || cancelled) { s.cancel(); } else { s.request(Long.MAX_VALUE); } } @Override public int getPrefetch() { return Integer.MAX_VALUE; } @Override public Context currentContext() { CoreSubscriber<? super T> actual = this.actual; return actual != null ? actual.currentContext() : Context.empty(); } public void onNextPrioritized(T t) { if (done || cancelled) { Operators.onNextDropped(t, currentContext()); release(t); return; } if (!priorityQueue.offer(t)) { Throwable ex = Operators.onOperatorError(null, Exceptions.failWithOverflow(), t, currentContext()); onError(Operators.onOperatorError(null, ex, t, currentContext())); release(t); return; } drain(); } @Override public void onNext(T t) { if (done || cancelled) { Operators.onNextDropped(t, currentContext()); release(t); return; } if (!queue.offer(t)) { Throwable ex = Operators.onOperatorError(null, Exceptions.failWithOverflow(), t, currentContext()); onError(Operators.onOperatorError(null, ex, t, currentContext())); release(t); return; } drain(); } @Override public void onError(Throwable t) { if (done || cancelled) { Operators.onErrorDropped(t, currentContext()); return; } error = t; done = true; drain(); } @Override public void onComplete() { if (done || cancelled) { return; } done = true; drain(); } @Override public void subscribe(CoreSubscriber<? super T> actual) { Objects.requireNonNull(actual, "subscribe"); if (once == 0 && ONCE.compareAndSet(this, 0, 1)) { actual.onSubscribe(this); this.actual = actual; drain(); } else { Operators.error( actual, new IllegalStateException("UnboundedProcessor " + "allows only a single Subscriber")); } } @Override public void request(long n) { if (Operators.validate(n)) { Operators.addCap(REQUESTED, this, n); drain(); } } @Override public void cancel() { if (cancelled) { return; } cancelled = true; if (WIP.getAndIncrement(this) == 0) { if (!outputFused || terminated) { this.clear(); } hasDownstream = false; } } @Override @Nullable public T poll() { Queue<T> pq = this.priorityQueue; if (!pq.isEmpty()) { return pq.poll(); } return queue.poll(); } @Override public int size() { return priorityQueue.size() + queue.size(); } @Override public boolean isEmpty() { return priorityQueue.isEmpty() && queue.isEmpty(); } @Override public void clear() { terminated = true; for (; ; ) { int wip = this.wip; clearSafely(); if (WIP.compareAndSet(this, wip, Integer.MIN_VALUE)) { return; } } } void clearSafely() { if (DISCARD_GUARD.getAndIncrement(this) != 0) { return; } int missed = 1; for (; ; ) { T t; while ((t = queue.poll()) != null) { release(t); } while ((t = priorityQueue.poll()) != null) { release(t); } missed = DISCARD_GUARD.addAndGet(this, -missed); if (missed == 0) { break; } } } @Override public int requestFusion(int requestedMode) { if ((requestedMode & Fuseable.ASYNC) != 0) { outputFused = true; return Fuseable.ASYNC; } return Fuseable.NONE; } @Override public void dispose() { if (cancelled) { return; } error = new CancellationException("Disposed"); done = true; if (WIP.getAndIncrement(this) == 0) { cancelled = true; final CoreSubscriber<? super T> a = this.actual; if (!outputFused || terminated) { clear(); } if (a != null) { try { a.onError(error); } catch (Throwable ignored) { } } hasDownstream = false; } } @Override public boolean isDisposed() { return cancelled || done; } @Override public boolean isTerminated() { return done; } @Override @Nullable public Throwable getError() { return error; } @Override public long downstreamCount() { return hasDownstreams() ? 1L : 0L; } @Override public boolean hasDownstreams() { return hasDownstream; } void release(T t) { if (t instanceof ReferenceCounted) { ReferenceCounted refCounted = (ReferenceCounted) t; if (refCounted.refCnt() > 0) { try { refCounted.release(); } catch (Throwable ex) { // no ops } } } } }
package org.motechproject.metrics.service.impl; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.RatioGauge; import org.motechproject.metrics.api.Counter; import org.motechproject.metrics.api.Gauge; import org.motechproject.metrics.api.Histogram; import org.motechproject.metrics.api.Meter; import org.motechproject.metrics.api.Metric; import org.motechproject.metrics.api.Timer; import org.motechproject.metrics.config.MetricsConfigFacade; import org.motechproject.metrics.exception.MetricAlreadyExistsException; import org.motechproject.metrics.model.CounterAdapter; import org.motechproject.metrics.model.Enablable; import org.motechproject.metrics.model.GaugeAdapter; import org.motechproject.metrics.model.HistogramAdapter; import org.motechproject.metrics.model.MeterAdapter; import org.motechproject.metrics.model.TimerAdapter; import org.motechproject.metrics.service.MetricRegistryService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.function.Supplier; /** * Represents a registry that creates and/or retrieves a variety of metric types and allows registered metrics to be * enabled or disabled. */ @Service("metricRegistryService") public class MetricRegistryServiceImpl implements MetricRegistryService { private final MetricRegistry metricRegistry; private final MetricsConfigFacade metricsConfigFacade; private static final String EXCEPTION_TEMPLATE = "A metric with name: %s already exists."; private Map<String, Metric> metrics; @Autowired public MetricRegistryServiceImpl(MetricRegistry metricRegistry, MetricsConfigFacade metricsConfigFacade) { this.metricRegistry = metricRegistry; this.metricsConfigFacade = metricsConfigFacade; metrics = new ConcurrentHashMap<>(); } /** * Sets all registered metrics to enabled or disabled depending on the value of the enabled parameter. * * @param enabled If true, sets all metrics to enabled; sets all metrics to disabled otherwise. */ @Override public void setEnabled(boolean enabled) { for (Metric metric: metrics.values()) { if (metric instanceof Enablable && ((Enablable) metric).isEnabled() != enabled) { ((Enablable) metric).setEnabled(enabled); } } } /** * Get the counter associated with the given name. * * @param name the name of the counter * @return the counter associated with the given name */ @Override public Counter counter(final String name) { return getOrAdd(name, MetricBuilder.COUNTERS); } /** * Get the histogram associated with the given name. * * @param name the name of the histogram * @return the histogram associated with the given name */ @Override public Histogram histogram(final String name) { return getOrAdd(name, MetricBuilder.HISTOGRAMS); } /** * Get the meter associated with the given name. * * @param name the name of the meter * @return the meter associated with the given name */ @Override public Meter meter(final String name) { return getOrAdd(name, MetricBuilder.METERS); } /** * Get the timer associated with the given name. * * @param name the name of the meter * @return the meter associated with the given name */ @Override public Timer timer(final String name) { return getOrAdd(name, MetricBuilder.TIMERS); } /** * Register an implementation of the gauge interface. * * @param name the name of the gauge * @param gauge the implementation of the gauge interface * @param <T> the type of the gauge's return value * @return the registered gauge */ @Override public <T> Gauge<T> registerGauge(final String name, final Gauge<T> gauge) { com.codahale.metrics.Gauge<T> theGauge; try { theGauge = metricRegistry.register(name, new com.codahale.metrics.Gauge<T>() { @Override public T getValue() { return gauge.getValue(); } }); } catch (IllegalArgumentException ex) { throw new MetricAlreadyExistsException(String.format(EXCEPTION_TEMPLATE, name), ex); } return new GaugeAdapter<T>(theGauge); } /** * Register a ratio gauge. * * @param name the name of the gauge * @param numerator a function returning a number represents the value of the numerator * @param denominator a function returning a number that represents the value of the denominator * @param <T> a type of number * @return the registered gauge */ @Override public <T extends Number> Gauge<Double> registerRatioGauge(final String name, Supplier<T> numerator, Supplier<T> denominator) { com.codahale.metrics.RatioGauge theGauge; try { theGauge = metricRegistry.register(name, new RatioGauge() { @Override protected Ratio getRatio() { return Ratio.of(numerator.get().doubleValue(), denominator.get().doubleValue()); } }); } catch (IllegalArgumentException ex) { throw new MetricAlreadyExistsException(String.format(EXCEPTION_TEMPLATE, name), ex); } return new GaugeAdapter<>(theGauge); } /** * Returns whether a metric is registered or not by the given metric name. * * @param name the name of the metric * @return true if registered, false otherwise */ @Override public boolean isRegistered(String name) { return metricRegistry.getNames().contains(name); } @SuppressWarnings("unchecked") private <T extends Metric> T getOrAdd(String name, MetricBuilder<T> builder) { Metric metric = metrics.get(name); if (metric == null) { try { T built = builder.createMetric(name, metricRegistry, metricsConfigFacade); metrics.put(name, built); return built; } catch (IllegalArgumentException ex) { throw new MetricAlreadyExistsException(String.format(EXCEPTION_TEMPLATE, name), ex); } } else if (builder.isInstance(metric)) { return (T) metric; } else { throw new MetricAlreadyExistsException(String.format(EXCEPTION_TEMPLATE, name)); } } /** * Encapsulates the default method by which counters, histograms, meters, and timers are created and wrapped in the * appropriate adapter. * * @param <T> the type of metric */ private interface MetricBuilder<T extends Metric> { MetricBuilder<Counter> COUNTERS = new MetricBuilder<Counter>() { @Override public Counter createMetric(String name, MetricRegistry registry, MetricsConfigFacade config) throws IllegalArgumentException { com.codahale.metrics.Counter counter = registry.counter(name); return new CounterAdapter(counter, config.isMetricsEnabled()); } @Override public boolean isInstance(Metric metric) { return Counter.class.isInstance(metric); } }; MetricBuilder<Histogram> HISTOGRAMS = new MetricBuilder<Histogram>() { @Override public Histogram createMetric(String name, MetricRegistry registry, MetricsConfigFacade config) throws IllegalArgumentException { com.codahale.metrics.Histogram histogram = registry.histogram(name); return new HistogramAdapter(histogram, config.isMetricsEnabled()); } @Override public boolean isInstance(Metric metric) { return Histogram.class.isInstance(metric); } }; MetricBuilder<Meter> METERS = new MetricBuilder<Meter>() { @Override public Meter createMetric(String name, MetricRegistry registry, MetricsConfigFacade config) throws IllegalArgumentException { com.codahale.metrics.Meter meter = registry.meter(name); return new MeterAdapter(meter, config.isMetricsEnabled()); } @Override public boolean isInstance(Metric metric) { return Meter.class.isInstance(metric); } }; MetricBuilder<Timer> TIMERS = new MetricBuilder<Timer>() { @Override public Timer createMetric(String name, MetricRegistry registry, MetricsConfigFacade config) throws IllegalArgumentException { com.codahale.metrics.Timer timer = registry.timer(name); return new TimerAdapter(timer, config.isMetricsEnabled()); } @Override public boolean isInstance(Metric metric) { return Timer.class.isInstance(metric); } }; /** * Create and return a a new metric. * * @param name the name to associate with the metric * @param registry the metric registry * @param config the module configuration * @return an implementation of a metric of the appropriate type * @throws IllegalArgumentException if the provided name is associated with a different type of metric */ T createMetric(String name, MetricRegistry registry, MetricsConfigFacade config) throws IllegalArgumentException; /** * Test whether is the provided metric is the same type the instance of the builder is responsible for making. * * @param metric the metric to test * * @return true if the type of metric is the same type that the builder makes, false otherwise. */ boolean isInstance(Metric metric); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.taskexecutor; import org.apache.flink.api.common.JobID; import org.apache.flink.api.common.time.Time; import org.apache.flink.api.java.tuple.Tuple5; import org.apache.flink.runtime.blob.TransientBlobKey; import org.apache.flink.runtime.checkpoint.CheckpointOptions; import org.apache.flink.runtime.clusterframework.types.AllocationID; import org.apache.flink.runtime.clusterframework.types.ResourceID; import org.apache.flink.runtime.clusterframework.types.SlotID; import org.apache.flink.runtime.concurrent.FutureUtils; import org.apache.flink.runtime.deployment.TaskDeploymentDescriptor; import org.apache.flink.runtime.executiongraph.ExecutionAttemptID; import org.apache.flink.runtime.executiongraph.PartitionInfo; import org.apache.flink.runtime.io.network.partition.ResultPartitionID; import org.apache.flink.runtime.jobmaster.AllocatedSlotReport; import org.apache.flink.runtime.jobmaster.JobMasterId; import org.apache.flink.runtime.messages.Acknowledge; import org.apache.flink.runtime.messages.StackTraceSampleResponse; import org.apache.flink.runtime.resourcemanager.ResourceManagerId; import org.apache.flink.types.SerializableOptional; import org.apache.flink.util.Preconditions; import java.util.Collection; import java.util.concurrent.CompletableFuture; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; /** * Simple {@link TaskExecutorGateway} implementation for testing purposes. */ public class TestingTaskExecutorGateway implements TaskExecutorGateway { private final String address; private final String hostname; private final BiConsumer<ResourceID, AllocatedSlotReport> heartbeatJobManagerConsumer; private final BiConsumer<JobID, Throwable> disconnectJobManagerConsumer; private final BiFunction<TaskDeploymentDescriptor, JobMasterId, CompletableFuture<Acknowledge>> submitTaskConsumer; private final Function<Tuple5<SlotID, JobID, AllocationID, String, ResourceManagerId>, CompletableFuture<Acknowledge>> requestSlotFunction; private final BiFunction<AllocationID, Throwable, CompletableFuture<Acknowledge>> freeSlotFunction; private final Consumer<ResourceID> heartbeatResourceManagerConsumer; private final Consumer<Exception> disconnectResourceManagerConsumer; private final Function<ExecutionAttemptID, CompletableFuture<Acknowledge>> cancelTaskFunction; private final Supplier<CompletableFuture<Boolean>> canBeReleasedSupplier; private final BiConsumer<JobID, Collection<ResultPartitionID>> releasePartitionsConsumer; TestingTaskExecutorGateway( String address, String hostname, BiConsumer<ResourceID, AllocatedSlotReport> heartbeatJobManagerConsumer, BiConsumer<JobID, Throwable> disconnectJobManagerConsumer, BiFunction<TaskDeploymentDescriptor, JobMasterId, CompletableFuture<Acknowledge>> submitTaskConsumer, Function<Tuple5<SlotID, JobID, AllocationID, String, ResourceManagerId>, CompletableFuture<Acknowledge>> requestSlotFunction, BiFunction<AllocationID, Throwable, CompletableFuture<Acknowledge>> freeSlotFunction, Consumer<ResourceID> heartbeatResourceManagerConsumer, Consumer<Exception> disconnectResourceManagerConsumer, Function<ExecutionAttemptID, CompletableFuture<Acknowledge>> cancelTaskFunction, Supplier<CompletableFuture<Boolean>> canBeReleasedSupplier, BiConsumer<JobID, Collection<ResultPartitionID>> releasePartitionsConsumer) { this.address = Preconditions.checkNotNull(address); this.hostname = Preconditions.checkNotNull(hostname); this.heartbeatJobManagerConsumer = Preconditions.checkNotNull(heartbeatJobManagerConsumer); this.disconnectJobManagerConsumer = Preconditions.checkNotNull(disconnectJobManagerConsumer); this.submitTaskConsumer = Preconditions.checkNotNull(submitTaskConsumer); this.requestSlotFunction = Preconditions.checkNotNull(requestSlotFunction); this.freeSlotFunction = Preconditions.checkNotNull(freeSlotFunction); this.heartbeatResourceManagerConsumer = heartbeatResourceManagerConsumer; this.disconnectResourceManagerConsumer = disconnectResourceManagerConsumer; this.cancelTaskFunction = cancelTaskFunction; this.canBeReleasedSupplier = canBeReleasedSupplier; this.releasePartitionsConsumer = releasePartitionsConsumer; } @Override public CompletableFuture<Acknowledge> requestSlot(SlotID slotId, JobID jobId, AllocationID allocationId, String targetAddress, ResourceManagerId resourceManagerId, Time timeout) { return requestSlotFunction.apply(Tuple5.of(slotId, jobId, allocationId, targetAddress, resourceManagerId)); } @Override public CompletableFuture<StackTraceSampleResponse> requestStackTraceSample( final ExecutionAttemptID executionAttemptId, final int sampleId, final int numSamples, final Time delayBetweenSamples, final int maxStackTraceDepth, final Time timeout) { throw new UnsupportedOperationException(); } @Override public CompletableFuture<Acknowledge> submitTask(TaskDeploymentDescriptor tdd, JobMasterId jobMasterId, Time timeout) { return submitTaskConsumer.apply(tdd, jobMasterId); } @Override public CompletableFuture<Acknowledge> updatePartitions(ExecutionAttemptID executionAttemptID, Iterable<PartitionInfo> partitionInfos, Time timeout) { return CompletableFuture.completedFuture(Acknowledge.get()); } @Override public void releasePartitions(JobID jobId, Collection<ResultPartitionID> partitionIds) { releasePartitionsConsumer.accept(jobId, partitionIds); } @Override public CompletableFuture<Acknowledge> triggerCheckpoint(ExecutionAttemptID executionAttemptID, long checkpointID, long checkpointTimestamp, CheckpointOptions checkpointOptions, boolean advanceToEndOfEventTime) { return CompletableFuture.completedFuture(Acknowledge.get()); } @Override public CompletableFuture<Acknowledge> confirmCheckpoint(ExecutionAttemptID executionAttemptID, long checkpointId, long checkpointTimestamp) { return CompletableFuture.completedFuture(Acknowledge.get()); } @Override public CompletableFuture<Acknowledge> cancelTask(ExecutionAttemptID executionAttemptID, Time timeout) { return cancelTaskFunction.apply(executionAttemptID); } @Override public void heartbeatFromJobManager(ResourceID heartbeatOrigin, AllocatedSlotReport allocatedSlotReport) { heartbeatJobManagerConsumer.accept(heartbeatOrigin, allocatedSlotReport); } @Override public void heartbeatFromResourceManager(ResourceID heartbeatOrigin) { heartbeatResourceManagerConsumer.accept(heartbeatOrigin); } @Override public void disconnectJobManager(JobID jobId, Exception cause) { disconnectJobManagerConsumer.accept(jobId, cause); } @Override public void disconnectResourceManager(Exception cause) { disconnectResourceManagerConsumer.accept(cause); } @Override public CompletableFuture<Acknowledge> freeSlot(AllocationID allocationId, Throwable cause, Time timeout) { return freeSlotFunction.apply(allocationId, cause); } @Override public CompletableFuture<TransientBlobKey> requestFileUpload(FileType fileType, Time timeout) { return FutureUtils.completedExceptionally(new UnsupportedOperationException()); } @Override public CompletableFuture<SerializableOptional<String>> requestMetricQueryServiceAddress(Time timeout) { return CompletableFuture.completedFuture(SerializableOptional.empty()); } @Override public CompletableFuture<Boolean> canBeReleased() { return canBeReleasedSupplier.get(); } @Override public String getAddress() { return address; } @Override public String getHostname() { return hostname; } }
/** * Package: MAG - VistA Imaging WARNING: Per VHA Directive 2004-038, this routine should not be modified. Date Created: Dec 19, 2006 Site Name: Washington OI Field Office, Silver Spring, MD Developer: VHAISWWERFEJ Description: ;; +--------------------------------------------------------------------+ ;; Property of the US Government. ;; No permission to copy or redistribute this software is given. ;; Use of unreleased versions of this software requires the user ;; to execute a written test agreement with the VistA Imaging ;; Development Office of the Department of Veterans Affairs, ;; telephone (301) 734-0100. ;; ;; The Food and Drug Administration classifies this software as ;; a Class II medical device. As such, it may not be changed ;; in any way. Modifications to this software may result in an ;; adulterated medical device under 21CFR820, the use of which ;; is considered to be a violation of US Federal Statutes. ;; +--------------------------------------------------------------------+ */ package gov.va.med.imaging.exchange; /** * Log Event to be put into the database. Logs Image access, copy access and print access * * @author VHAISWWERFEJ * */ public class ImageAccessLogEvent { public enum ImageAccessLogEventType { IMAGE_ACCESS, IMAGE_COPY, IMAGE_PRINT, PATIENT_ID_MISMATCH, RESTRICTED_ACCESS; @Override public String toString() { if(this == IMAGE_ACCESS) { return "Image_Access"; } else if(this == IMAGE_COPY) { return "Image_Copy"; } else if(this == IMAGE_PRINT) { return "Image_Print"; } else if(this == PATIENT_ID_MISMATCH) { return "Patient_ID_Mismatch"; } else if(this == RESTRICTED_ACCESS) { return "Restricted Access"; } // TODO Auto-generated method stub return super.toString(); } } private final String imageIen; private String patientDfn = null; // the patient DFN is mutable private final String patientIcn; private final long imageAccessEventTime; private final String reasonCode; // if the event is a copy or print a reason must be given private final String reasonDescription; private final String siteNumber; private final ImageAccessLogEventType eventType; private final boolean dodImage; // determines if the image being looked at is a DOD image being accessed by a VA user private String decodedImageIen = null; private final String userSiteNumber; /** * Constructor to use when logging access to a VA image and the patient DFN is unknown. * * @param imageIen * @param patientIcn * @param siteNumber * @param eventTime * @param reason * @param eventType */ /* public ImageAccessLogEvent( String imageIen, String patientIcn, String siteNumber, long eventTime, String reason, ImageAccessLogEventType eventType) { this(imageIen, null, patientIcn, siteNumber, eventTime, reason, eventType); } */ /** * Constructor to use when logging access to a VA image. * * @param imageIen * @param patientDfn * @param patientIcn * @param siteNumber * @param eventTime * @param reason * @param eventType */ public ImageAccessLogEvent( String imageIen, String patientDfn, String patientIcn, String siteNumber, long eventTime, String reasonCode, String reasonDescription, ImageAccessLogEventType eventType, String userSiteNumber) { this( imageIen, patientDfn, patientIcn, siteNumber, eventTime, reasonCode, reasonDescription, eventType, false, userSiteNumber ); } /** * Constructor to use for logging VA or DOD images. * @param imageIen * @param patientDfn * @param patientIcn * @param siteNumber * @param eventTime * @param reason * @param eventType * @param dodImage */ public ImageAccessLogEvent( String imageIen, String patientDfn, String patientIcn, String siteNumber, long eventTime, String reasonCode, String reasonDescription, ImageAccessLogEventType eventType, boolean dodImage, String userSiteNumber) { this.dodImage = dodImage; this.imageIen = imageIen; this.patientIcn = patientIcn; this.patientDfn = patientDfn; this.imageAccessEventTime = eventTime; this.reasonCode = reasonCode; this.siteNumber = siteNumber; this.eventType = eventType; this.userSiteNumber = userSiteNumber; this.reasonDescription = reasonDescription; } /** * @return the imageAccessEventTime */ public long getImageAccessEventTime() { return imageAccessEventTime; } /** * @return the imageIen */ public String getImageIen() { return imageIen; } /** * @return the patientDfn */ public String getPatientDfn() { return patientDfn; } /** * @param patientDfn the patientDfn to set */ public void setPatientDfn(String patientDfn) { this.patientDfn = patientDfn; } /** * @return the siteNumber */ public String getSiteNumber() { return siteNumber; } public String getReasonCode() { return reasonCode; } public String getReasonDescription() { return reasonDescription; } /** * @return the eventType */ public ImageAccessLogEventType getEventType() { return eventType; } /** * @return the patientIcn */ public String getPatientIcn() { return patientIcn; } /** * determines if the image being looked at is a DOD image being accessed by a VA user * @return the dodImage */ public boolean isDodImage() { return dodImage; } public String getDecodedImageIen() { return decodedImageIen; } public void setDecodedImageIen(String decodedImageIen) { this.decodedImageIen = decodedImageIen; } /** * This describes the user's site number - the site where the user viewed the image from. * This value is expected to be 200 for DOD or one of the other known VA site numbers (756, 660, 688, etc) * @return the userSiteNumber */ public String getUserSiteNumber() { return userSiteNumber; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((this.decodedImageIen == null) ? 0 : this.decodedImageIen.hashCode()); result = prime * result + (this.dodImage ? 1231 : 1237); result = prime * result + ((this.eventType == null) ? 0 : this.eventType.hashCode()); result = prime * result + (int) (this.imageAccessEventTime ^ (this.imageAccessEventTime >>> 32)); result = prime * result + ((this.imageIen == null) ? 0 : this.imageIen.hashCode()); result = prime * result + ((this.patientDfn == null) ? 0 : this.patientDfn.hashCode()); result = prime * result + ((this.patientIcn == null) ? 0 : this.patientIcn.hashCode()); result = prime * result + ((this.reasonCode == null) ? 0 : this.reasonCode.hashCode()); result = prime * result + ((this.reasonDescription == null) ? 0 : this.reasonDescription.hashCode()); result = prime * result + ((this.siteNumber == null) ? 0 : this.siteNumber.hashCode()); result = prime * result + ((this.userSiteNumber == null) ? 0 : this.userSiteNumber.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; ImageAccessLogEvent other = (ImageAccessLogEvent) obj; if (this.decodedImageIen == null) { if (other.decodedImageIen != null) return false; } else if (!this.decodedImageIen.equals(other.decodedImageIen)) return false; if (this.dodImage != other.dodImage) return false; if (this.eventType == null) { if (other.eventType != null) return false; } else if (!this.eventType.equals(other.eventType)) return false; //if (this.imageAccessEventTime != other.imageAccessEventTime) // return false; if (this.imageIen == null) { if (other.imageIen != null) return false; } else if (!this.imageIen.equals(other.imageIen)) return false; if (this.patientIcn == null) { if (other.patientIcn != null) return false; } else if (!this.patientIcn.equals(other.patientIcn)) return false; if (this.reasonCode == null) { if (other.reasonCode != null) return false; } else if (!this.reasonCode.equals(other.reasonCode)) return false; if (this.reasonDescription == null) { if (other.reasonDescription != null) return false; } else if (!this.reasonDescription.equals(other.reasonDescription)) return false; if (this.siteNumber == null) { if (other.siteNumber != null) return false; } else if (!this.siteNumber.equals(other.siteNumber)) return false; if (this.userSiteNumber == null) { if (other.userSiteNumber != null) return false; } else if (!this.userSiteNumber.equals(other.userSiteNumber)) return false; return true; } }
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.remote; import static com.google.common.truth.Truth.assertThat; import build.bazel.remote.execution.v2.Digest; import build.bazel.remote.execution.v2.Directory; import com.google.common.collect.ImmutableCollection; import com.google.devtools.build.lib.actions.ActionInput; import com.google.devtools.build.lib.actions.ActionInputHelper; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.MetadataProvider; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.exec.SingleBuildFileCache; import com.google.devtools.build.lib.remote.TreeNodeRepository.TreeNode; import com.google.devtools.build.lib.remote.util.DigestUtil; import com.google.devtools.build.lib.testutil.Scratch; import com.google.devtools.build.lib.vfs.DigestHashFunction; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.inmemoryfs.InMemoryFileSystem; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link TreeNodeRepository}. */ @RunWith(JUnit4.class) public class TreeNodeRepositoryTest { private Scratch scratch; private DigestUtil digestUtil; private Path execRoot; private ArtifactRoot rootDir; @Before public final void setRootDir() throws Exception { digestUtil = new DigestUtil(DigestHashFunction.SHA256); scratch = new Scratch(new InMemoryFileSystem(BlazeClock.instance(), DigestHashFunction.SHA256)); execRoot = scratch.getFileSystem().getPath("/exec/root"); rootDir = ArtifactRoot.asSourceRoot(Root.fromPath(scratch.dir("/exec/root"))); } private TreeNodeRepository createTestTreeNodeRepository() { MetadataProvider inputFileCache = new SingleBuildFileCache(execRoot.getPathString(), scratch.getFileSystem()); return new TreeNodeRepository(execRoot, inputFileCache, digestUtil); } private TreeNode buildFromActionInputs(TreeNodeRepository repo, ActionInput... inputs) throws IOException { TreeMap<PathFragment, ActionInput> sortedMap = new TreeMap<>(); for (ActionInput input : inputs) { sortedMap.put(PathFragment.create(input.getExecPathString()), input); } return repo.buildFromActionInputs(sortedMap); } @Test @SuppressWarnings("ReferenceEquality") public void testSubtreeReusage() throws Exception { Artifact fooCc = new Artifact(scratch.file("/exec/root/a/foo.cc"), rootDir); Artifact fooH = new Artifact(scratch.file("/exec/root/a/foo.h"), rootDir); Artifact bar = new Artifact(scratch.file("/exec/root/b/bar.txt"), rootDir); Artifact baz = new Artifact(scratch.file("/exec/root/c/baz.txt"), rootDir); TreeNodeRepository repo = createTestTreeNodeRepository(); TreeNode root1 = buildFromActionInputs(repo, fooCc, fooH, bar); TreeNode root2 = buildFromActionInputs(repo, fooCc, fooH, baz); // Reusing same node for the "a" subtree. assertThat( root1.getChildEntries().get(0).getChild() == root2.getChildEntries().get(0).getChild()) .isTrue(); } @Test public void testMerkleDigests() throws Exception { Artifact foo = new Artifact(scratch.file("/exec/root/a/foo", "1"), rootDir); Artifact bar = new Artifact(scratch.file("/exec/root/a/bar", "11"), rootDir); TreeNodeRepository repo = createTestTreeNodeRepository(); TreeNode root = buildFromActionInputs(repo, foo, bar); TreeNode aNode = root.getChildEntries().get(0).getChild(); TreeNode fooNode = aNode.getChildEntries().get(1).getChild(); // foo > bar in sort order! TreeNode barNode = aNode.getChildEntries().get(0).getChild(); repo.computeMerkleDigests(root); ImmutableCollection<Digest> digests = repo.getAllDigests(root); Digest rootDigest = repo.getMerkleDigest(root); Digest aDigest = repo.getMerkleDigest(aNode); Digest fooDigest = repo.getMerkleDigest(fooNode); // The contents digest. Digest barDigest = repo.getMerkleDigest(barNode); assertThat(digests).containsExactly(rootDigest, aDigest, barDigest, fooDigest); Map<Digest, Directory> directories = new HashMap<>(); Map<Digest, ActionInput> actionInputs = new HashMap<>(); repo.getDataFromDigests(digests, actionInputs, directories); assertThat(actionInputs.values()).containsExactly(bar, foo); assertThat(directories).hasSize(2); Directory rootDirectory = directories.get(rootDigest); assertThat(rootDirectory.getDirectories(0).getName()).isEqualTo("a"); assertThat(rootDirectory.getDirectories(0).getDigest()).isEqualTo(aDigest); Directory aDirectory = directories.get(aDigest); assertThat(aDirectory.getFiles(0).getName()).isEqualTo("bar"); assertThat(aDirectory.getFiles(0).getDigest()).isEqualTo(barDigest); assertThat(aDirectory.getFiles(1).getName()).isEqualTo("foo"); assertThat(aDirectory.getFiles(1).getDigest()).isEqualTo(fooDigest); } @Test public void testGetAllDigests() throws Exception { Artifact foo1 = new Artifact(scratch.file("/exec/root/a/foo", "1"), rootDir); Artifact foo2 = new Artifact(scratch.file("/exec/root/b/foo", "1"), rootDir); Artifact foo3 = new Artifact(scratch.file("/exec/root/c/foo", "1"), rootDir); TreeNodeRepository repo = createTestTreeNodeRepository(); TreeNode root = buildFromActionInputs(repo, foo1, foo2, foo3); repo.computeMerkleDigests(root); // Reusing same node for the "foo" subtree: only need the root, root child, and foo contents: assertThat(repo.getAllDigests(root)).hasSize(3); } @Test public void testEmptyTree() throws Exception { SortedMap<PathFragment, ActionInput> inputs = new TreeMap<>(); TreeNodeRepository repo = createTestTreeNodeRepository(); TreeNode root = repo.buildFromActionInputs(inputs); repo.computeMerkleDigests(root); assertThat(root.getChildEntries()).isEmpty(); } @Test public void testDirectoryInput() throws Exception { Artifact foo = new Artifact(scratch.dir("/exec/root/a/foo"), rootDir); scratch.file("/exec/root/a/foo/foo.h", "1"); ActionInput fooH = ActionInputHelper.fromPath("/exec/root/a/foo/foo.h"); scratch.file("/exec/root/a/foo/foo.cc", "2"); ActionInput fooCc = ActionInputHelper.fromPath("/exec/root/a/foo/foo.cc"); Artifact bar = new Artifact(scratch.file("/exec/root/a/bar.txt"), rootDir); TreeNodeRepository repo = createTestTreeNodeRepository(); Artifact aClient = new Artifact(scratch.dir("/exec/root/a-client"), rootDir); scratch.file("/exec/root/a-client/baz.txt", "3"); ActionInput baz = ActionInputHelper.fromPath("/exec/root/a-client/baz.txt"); TreeNode root = buildFromActionInputs(repo, foo, aClient, bar); TreeNode aNode = root.getChildEntries().get(0).getChild(); TreeNode fooNode = aNode.getChildEntries().get(1).getChild(); // foo > bar in sort order! TreeNode barNode = aNode.getChildEntries().get(0).getChild(); TreeNode aClientNode = root.getChildEntries().get(1).getChild(); // a-client > a in sort order TreeNode bazNode = aClientNode.getChildEntries().get(0).getChild(); TreeNode fooHNode = fooNode.getChildEntries().get(1).getChild(); // foo.h > foo.cc in sort order! TreeNode fooCcNode = fooNode.getChildEntries().get(0).getChild(); repo.computeMerkleDigests(root); ImmutableCollection<Digest> digests = repo.getAllDigests(root); Digest rootDigest = repo.getMerkleDigest(root); Digest aDigest = repo.getMerkleDigest(aNode); Digest fooDigest = repo.getMerkleDigest(fooNode); Digest fooHDigest = repo.getMerkleDigest(fooHNode); Digest fooCcDigest = repo.getMerkleDigest(fooCcNode); Digest aClientDigest = repo.getMerkleDigest(aClientNode); Digest bazDigest = repo.getMerkleDigest(bazNode); Digest barDigest = repo.getMerkleDigest(barNode); assertThat(digests) .containsExactly( rootDigest, aDigest, barDigest, fooDigest, fooCcDigest, fooHDigest, aClientDigest, bazDigest); Map<Digest, Directory> directories = new HashMap<>(); Map<Digest, ActionInput> actionInputs = new HashMap<>(); repo.getDataFromDigests(digests, actionInputs, directories); assertThat(actionInputs.values()).containsExactly(bar, fooH, fooCc, baz); assertThat(directories).hasSize(4); // root, root/a, root/a/foo, and root/a-client Directory rootDirectory = directories.get(rootDigest); assertThat(rootDirectory.getDirectories(0).getName()).isEqualTo("a"); assertThat(rootDirectory.getDirectories(0).getDigest()).isEqualTo(aDigest); assertThat(rootDirectory.getDirectories(1).getName()).isEqualTo("a-client"); assertThat(rootDirectory.getDirectories(1).getDigest()).isEqualTo(aClientDigest); Directory aDirectory = directories.get(aDigest); assertThat(aDirectory.getFiles(0).getName()).isEqualTo("bar.txt"); assertThat(aDirectory.getFiles(0).getDigest()).isEqualTo(barDigest); assertThat(aDirectory.getDirectories(0).getName()).isEqualTo("foo"); assertThat(aDirectory.getDirectories(0).getDigest()).isEqualTo(fooDigest); Directory fooDirectory = directories.get(fooDigest); assertThat(fooDirectory.getFiles(0).getName()).isEqualTo("foo.cc"); assertThat(fooDirectory.getFiles(0).getDigest()).isEqualTo(fooCcDigest); assertThat(fooDirectory.getFiles(1).getName()).isEqualTo("foo.h"); assertThat(fooDirectory.getFiles(1).getDigest()).isEqualTo(fooHDigest); Directory aClientDirectory = directories.get(aClientDigest); assertThat(aClientDirectory.getFiles(0).getName()).isEqualTo("baz.txt"); assertThat(aClientDirectory.getFiles(0).getDigest()).isEqualTo(bazDigest); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.percolator; import org.apache.lucene.index.Term; import org.apache.lucene.queries.BlendedTermQuery; import org.apache.lucene.queries.CommonTermsQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.spans.SpanFirstQuery; import org.apache.lucene.search.spans.SpanNearQuery; import org.apache.lucene.search.spans.SpanNotQuery; import org.apache.lucene.search.spans.SpanOrQuery; import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import org.elasticsearch.common.lucene.search.function.RandomScoreFunction; import org.elasticsearch.percolator.QueryAnalyzer.Result; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import static org.elasticsearch.percolator.QueryAnalyzer.UnsupportedQueryException; import static org.elasticsearch.percolator.QueryAnalyzer.analyze; import static org.elasticsearch.percolator.QueryAnalyzer.selectTermListWithTheLongestShortestTerm; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.sameInstance; public class QueryAnalyzerTests extends ESTestCase { public void testExtractQueryMetadata_termQuery() { TermQuery termQuery = new TermQuery(new Term("_field", "_term")); Result result = analyze(termQuery); assertThat(result.verified, is(true)); List<Term> terms = new ArrayList<>(result.terms); assertThat(terms.size(), equalTo(1)); assertThat(terms.get(0).field(), equalTo(termQuery.getTerm().field())); assertThat(terms.get(0).bytes(), equalTo(termQuery.getTerm().bytes())); } public void testExtractQueryMetadata_termsQuery() { TermInSetQuery termsQuery = new TermInSetQuery("_field", new BytesRef("_term1"), new BytesRef("_term2")); Result result = analyze(termsQuery); assertThat(result.verified, is(true)); List<Term> terms = new ArrayList<>(result.terms); Collections.sort(terms); assertThat(terms.size(), equalTo(2)); assertThat(terms.get(0).field(), equalTo("_field")); assertThat(terms.get(0).text(), equalTo("_term1")); assertThat(terms.get(1).field(), equalTo("_field")); assertThat(terms.get(1).text(), equalTo("_term2")); } public void testExtractQueryMetadata_phraseQuery() { PhraseQuery phraseQuery = new PhraseQuery("_field", "_term1", "term2"); Result result = analyze(phraseQuery); assertThat(result.verified, is(false)); List<Term> terms = new ArrayList<>(result.terms); assertThat(terms.size(), equalTo(1)); assertThat(terms.get(0).field(), equalTo(phraseQuery.getTerms()[0].field())); assertThat(terms.get(0).bytes(), equalTo(phraseQuery.getTerms()[0].bytes())); } public void testExtractQueryMetadata_multiPhraseQuery() { MultiPhraseQuery multiPhraseQuery = new MultiPhraseQuery.Builder() .add(new Term("_field", "_long_term")) .add(new Term[] {new Term("_field", "_long_term"), new Term("_field", "_term")}) .add(new Term[] {new Term("_field", "_long_term"), new Term("_field", "_very_long_term")}) .add(new Term[] {new Term("_field", "_very_long_term")}) .build(); Result result = analyze(multiPhraseQuery); assertThat(result.verified, is(false)); List<Term> terms = new ArrayList<>(result.terms); assertThat(terms.size(), equalTo(1)); assertThat(terms.get(0).field(), equalTo("_field")); assertThat(terms.get(0).bytes().utf8ToString(), equalTo("_very_long_term")); } public void testExtractQueryMetadata_booleanQuery() { BooleanQuery.Builder builder = new BooleanQuery.Builder(); TermQuery termQuery1 = new TermQuery(new Term("_field", "_term")); builder.add(termQuery1, BooleanClause.Occur.SHOULD); PhraseQuery phraseQuery = new PhraseQuery("_field", "_term1", "term2"); builder.add(phraseQuery, BooleanClause.Occur.SHOULD); BooleanQuery.Builder subBuilder = new BooleanQuery.Builder(); TermQuery termQuery2 = new TermQuery(new Term("_field1", "_term")); subBuilder.add(termQuery2, BooleanClause.Occur.MUST); TermQuery termQuery3 = new TermQuery(new Term("_field3", "_long_term")); subBuilder.add(termQuery3, BooleanClause.Occur.MUST); builder.add(subBuilder.build(), BooleanClause.Occur.SHOULD); BooleanQuery booleanQuery = builder.build(); Result result = analyze(booleanQuery); assertThat("Should clause with phrase query isn't verified, so entire query can't be verified", result.verified, is(false)); List<Term> terms = new ArrayList<>(result.terms); Collections.sort(terms); assertThat(terms.size(), equalTo(3)); assertThat(terms.get(0).field(), equalTo(termQuery1.getTerm().field())); assertThat(terms.get(0).bytes(), equalTo(termQuery1.getTerm().bytes())); assertThat(terms.get(1).field(), equalTo(phraseQuery.getTerms()[0].field())); assertThat(terms.get(1).bytes(), equalTo(phraseQuery.getTerms()[0].bytes())); assertThat(terms.get(2).field(), equalTo(termQuery3.getTerm().field())); assertThat(terms.get(2).bytes(), equalTo(termQuery3.getTerm().bytes())); } public void testExtractQueryMetadata_booleanQuery_onlyShould() { BooleanQuery.Builder builder = new BooleanQuery.Builder(); TermQuery termQuery1 = new TermQuery(new Term("_field", "_term1")); builder.add(termQuery1, BooleanClause.Occur.SHOULD); TermQuery termQuery2 = new TermQuery(new Term("_field", "_term2")); builder.add(termQuery2, BooleanClause.Occur.SHOULD); BooleanQuery.Builder subBuilder = new BooleanQuery.Builder(); TermQuery termQuery3 = new TermQuery(new Term("_field1", "_term")); subBuilder.add(termQuery3, BooleanClause.Occur.SHOULD); TermQuery termQuery4 = new TermQuery(new Term("_field3", "_long_term")); subBuilder.add(termQuery4, BooleanClause.Occur.SHOULD); builder.add(subBuilder.build(), BooleanClause.Occur.SHOULD); BooleanQuery booleanQuery = builder.build(); Result result = analyze(booleanQuery); assertThat(result.verified, is(true)); List<Term> terms = new ArrayList<>(result.terms); Collections.sort(terms); assertThat(terms.size(), equalTo(4)); assertThat(terms.get(0).field(), equalTo(termQuery1.getTerm().field())); assertThat(terms.get(0).bytes(), equalTo(termQuery1.getTerm().bytes())); assertThat(terms.get(1).field(), equalTo(termQuery2.getTerm().field())); assertThat(terms.get(1).bytes(), equalTo(termQuery2.getTerm().bytes())); assertThat(terms.get(2).field(), equalTo(termQuery3.getTerm().field())); assertThat(terms.get(2).bytes(), equalTo(termQuery3.getTerm().bytes())); assertThat(terms.get(3).field(), equalTo(termQuery4.getTerm().field())); assertThat(terms.get(3).bytes(), equalTo(termQuery4.getTerm().bytes())); } public void testExtractQueryMetadata_booleanQueryWithMustNot() { BooleanQuery.Builder builder = new BooleanQuery.Builder(); TermQuery termQuery1 = new TermQuery(new Term("_field", "_term")); builder.add(termQuery1, BooleanClause.Occur.MUST_NOT); PhraseQuery phraseQuery = new PhraseQuery("_field", "_term1", "term2"); builder.add(phraseQuery, BooleanClause.Occur.SHOULD); BooleanQuery booleanQuery = builder.build(); Result result = analyze(booleanQuery); assertThat(result.verified, is(false)); List<Term> terms = new ArrayList<>(result.terms); assertThat(terms.size(), equalTo(1)); assertThat(terms.get(0).field(), equalTo(phraseQuery.getTerms()[0].field())); assertThat(terms.get(0).bytes(), equalTo(phraseQuery.getTerms()[0].bytes())); } public void testExactMatch_booleanQuery() { BooleanQuery.Builder builder = new BooleanQuery.Builder(); TermQuery termQuery1 = new TermQuery(new Term("_field", "_term1")); builder.add(termQuery1, BooleanClause.Occur.SHOULD); TermQuery termQuery2 = new TermQuery(new Term("_field", "_term2")); builder.add(termQuery2, BooleanClause.Occur.SHOULD); Result result = analyze(builder.build()); assertThat("All clauses are exact, so candidate matches are verified", result.verified, is(true)); builder = new BooleanQuery.Builder(); builder.add(termQuery1, BooleanClause.Occur.SHOULD); PhraseQuery phraseQuery1 = new PhraseQuery("_field", "_term1", "_term2"); builder.add(phraseQuery1, BooleanClause.Occur.SHOULD); result = analyze(builder.build()); assertThat("Clause isn't exact, so candidate matches are not verified", result.verified, is(false)); builder = new BooleanQuery.Builder(); builder.add(phraseQuery1, BooleanClause.Occur.SHOULD); PhraseQuery phraseQuery2 = new PhraseQuery("_field", "_term3", "_term4"); builder.add(phraseQuery2, BooleanClause.Occur.SHOULD); result = analyze(builder.build()); assertThat("No clause is exact, so candidate matches are not verified", result.verified, is(false)); builder = new BooleanQuery.Builder(); builder.add(termQuery1, BooleanClause.Occur.MUST_NOT); builder.add(termQuery2, BooleanClause.Occur.SHOULD); result = analyze(builder.build()); assertThat("There is a must_not clause, so candidate matches are not verified", result.verified, is(false)); builder = new BooleanQuery.Builder(); builder.setMinimumNumberShouldMatch(randomIntBetween(2, 32)); builder.add(termQuery1, BooleanClause.Occur.SHOULD); builder.add(termQuery2, BooleanClause.Occur.SHOULD); result = analyze(builder.build()); assertThat("Minimum match is >= 1, so candidate matches are not verified", result.verified, is(false)); builder = new BooleanQuery.Builder(); builder.add(termQuery1, randomBoolean() ? BooleanClause.Occur.MUST : BooleanClause.Occur.FILTER); result = analyze(builder.build()); assertThat("Single required clause, so candidate matches are verified", result.verified, is(false)); builder = new BooleanQuery.Builder(); builder.add(termQuery1, randomBoolean() ? BooleanClause.Occur.MUST : BooleanClause.Occur.FILTER); builder.add(termQuery2, randomBoolean() ? BooleanClause.Occur.MUST : BooleanClause.Occur.FILTER); result = analyze(builder.build()); assertThat("Two or more required clauses, so candidate matches are not verified", result.verified, is(false)); builder = new BooleanQuery.Builder(); builder.add(termQuery1, randomBoolean() ? BooleanClause.Occur.MUST : BooleanClause.Occur.FILTER); builder.add(termQuery2, BooleanClause.Occur.MUST_NOT); result = analyze(builder.build()); assertThat("Required and prohibited clauses, so candidate matches are not verified", result.verified, is(false)); } public void testExtractQueryMetadata_constantScoreQuery() { TermQuery termQuery1 = new TermQuery(new Term("_field", "_term")); ConstantScoreQuery constantScoreQuery = new ConstantScoreQuery(termQuery1); Result result = analyze(constantScoreQuery); assertThat(result.verified, is(true)); List<Term> terms = new ArrayList<>(result.terms); assertThat(terms.size(), equalTo(1)); assertThat(terms.get(0).field(), equalTo(termQuery1.getTerm().field())); assertThat(terms.get(0).bytes(), equalTo(termQuery1.getTerm().bytes())); } public void testExtractQueryMetadata_boostQuery() { TermQuery termQuery1 = new TermQuery(new Term("_field", "_term")); BoostQuery constantScoreQuery = new BoostQuery(termQuery1, 1f); Result result = analyze(constantScoreQuery); assertThat(result.verified, is(true)); List<Term> terms = new ArrayList<>(result.terms); assertThat(terms.size(), equalTo(1)); assertThat(terms.get(0).field(), equalTo(termQuery1.getTerm().field())); assertThat(terms.get(0).bytes(), equalTo(termQuery1.getTerm().bytes())); } public void testExtractQueryMetadata_commonTermsQuery() { CommonTermsQuery commonTermsQuery = new CommonTermsQuery(BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD, 100); commonTermsQuery.add(new Term("_field", "_term1")); commonTermsQuery.add(new Term("_field", "_term2")); Result result = analyze(commonTermsQuery); assertThat(result.verified, is(false)); List<Term> terms = new ArrayList<>(result.terms); Collections.sort(terms); assertThat(terms.size(), equalTo(2)); assertThat(terms.get(0).field(), equalTo("_field")); assertThat(terms.get(0).text(), equalTo("_term1")); assertThat(terms.get(1).field(), equalTo("_field")); assertThat(terms.get(1).text(), equalTo("_term2")); } public void testExtractQueryMetadata_blendedTermQuery() { Term[] termsArr = new Term[]{new Term("_field", "_term1"), new Term("_field", "_term2")}; BlendedTermQuery commonTermsQuery = BlendedTermQuery.booleanBlendedQuery(termsArr); Result result = analyze(commonTermsQuery); assertThat(result.verified, is(true)); List<Term> terms = new ArrayList<>(result.terms); Collections.sort(terms); assertThat(terms.size(), equalTo(2)); assertThat(terms.get(0).field(), equalTo("_field")); assertThat(terms.get(0).text(), equalTo("_term1")); assertThat(terms.get(1).field(), equalTo("_field")); assertThat(terms.get(1).text(), equalTo("_term2")); } public void testExtractQueryMetadata_spanTermQuery() { // the following span queries aren't exposed in the query dsl and are therefor not supported: // 1) SpanPositionRangeQuery // 2) PayloadScoreQuery // 3) SpanBoostQuery // The following span queries can't be supported because of how these queries work: // 1) SpanMultiTermQueryWrapper, not supported, because there is no support for MTQ typed queries yet. // 2) SpanContainingQuery, is kind of range of spans and we don't know what is between the little and big terms // 3) SpanWithinQuery, same reason as SpanContainingQuery // 4) FieldMaskingSpanQuery is a tricky query so we shouldn't optimize this SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term")); Result result = analyze(spanTermQuery1); assertThat(result.verified, is(true)); assertTermsEqual(result.terms, spanTermQuery1.getTerm()); } public void testExtractQueryMetadata_spanNearQuery() { SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term")); SpanTermQuery spanTermQuery2 = new SpanTermQuery(new Term("_field", "_very_long_term")); SpanNearQuery spanNearQuery = new SpanNearQuery.Builder("_field", true) .addClause(spanTermQuery1).addClause(spanTermQuery2).build(); Result result = analyze(spanNearQuery); assertThat(result.verified, is(false)); assertTermsEqual(result.terms, spanTermQuery2.getTerm()); } public void testExtractQueryMetadata_spanOrQuery() { SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term")); SpanTermQuery spanTermQuery2 = new SpanTermQuery(new Term("_field", "_very_long_term")); SpanOrQuery spanOrQuery = new SpanOrQuery(spanTermQuery1, spanTermQuery2); Result result = analyze(spanOrQuery); assertThat(result.verified, is(false)); assertTermsEqual(result.terms, spanTermQuery1.getTerm(), spanTermQuery2.getTerm()); } public void testExtractQueryMetadata_spanFirstQuery() { SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term")); SpanFirstQuery spanFirstQuery = new SpanFirstQuery(spanTermQuery1, 20); Result result = analyze(spanFirstQuery); assertThat(result.verified, is(false)); assertTermsEqual(result.terms, spanTermQuery1.getTerm()); } public void testExtractQueryMetadata_spanNotQuery() { SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term")); SpanTermQuery spanTermQuery2 = new SpanTermQuery(new Term("_field", "_very_long_term")); SpanNotQuery spanNotQuery = new SpanNotQuery(spanTermQuery1, spanTermQuery2); Result result = analyze(spanNotQuery); assertThat(result.verified, is(false)); assertTermsEqual(result.terms, spanTermQuery1.getTerm()); } public void testExtractQueryMetadata_matchNoDocsQuery() { Result result = analyze(new MatchNoDocsQuery("sometimes there is no reason at all")); assertThat(result.verified, is(true)); assertEquals(0, result.terms.size()); BooleanQuery.Builder bq = new BooleanQuery.Builder(); bq.add(new TermQuery(new Term("field", "value")), BooleanClause.Occur.MUST); bq.add(new MatchNoDocsQuery("sometimes there is no reason at all"), BooleanClause.Occur.MUST); result = analyze(bq.build()); assertThat(result.verified, is(false)); assertEquals(0, result.terms.size()); bq = new BooleanQuery.Builder(); bq.add(new TermQuery(new Term("field", "value")), BooleanClause.Occur.SHOULD); bq.add(new MatchNoDocsQuery("sometimes there is no reason at all"), BooleanClause.Occur.SHOULD); result = analyze(bq.build()); assertThat(result.verified, is(true)); assertTermsEqual(result.terms, new Term("field", "value")); DisjunctionMaxQuery disjunctionMaxQuery = new DisjunctionMaxQuery( Arrays.asList(new TermQuery(new Term("field", "value")), new MatchNoDocsQuery("sometimes there is no reason at all")), 1f ); result = analyze(disjunctionMaxQuery); assertThat(result.verified, is(true)); assertTermsEqual(result.terms, new Term("field", "value")); } public void testExtractQueryMetadata_matchAllDocsQuery() { expectThrows(UnsupportedQueryException.class, () -> analyze(new MatchAllDocsQuery())); BooleanQuery.Builder builder = new BooleanQuery.Builder(); builder.add(new TermQuery(new Term("field", "value")), BooleanClause.Occur.MUST); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); Result result = analyze(builder.build()); assertThat(result.verified, is(false)); assertTermsEqual(result.terms, new Term("field", "value")); builder = new BooleanQuery.Builder(); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); BooleanQuery bq1 = builder.build(); expectThrows(UnsupportedQueryException.class, () -> analyze(bq1)); builder = new BooleanQuery.Builder(); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST_NOT); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); BooleanQuery bq2 = builder.build(); expectThrows(UnsupportedQueryException.class, () -> analyze(bq2)); builder = new BooleanQuery.Builder(); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); BooleanQuery bq3 = builder.build(); expectThrows(UnsupportedQueryException.class, () -> analyze(bq3)); builder = new BooleanQuery.Builder(); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST_NOT); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); BooleanQuery bq4 = builder.build(); expectThrows(UnsupportedQueryException.class, () -> analyze(bq4)); builder = new BooleanQuery.Builder(); builder.add(new TermQuery(new Term("field", "value")), BooleanClause.Occur.SHOULD); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); BooleanQuery bq5 = builder.build(); expectThrows(UnsupportedQueryException.class, () -> analyze(bq5)); } public void testExtractQueryMetadata_unsupportedQuery() { TermRangeQuery termRangeQuery = new TermRangeQuery("_field", null, null, true, false); UnsupportedQueryException e = expectThrows(UnsupportedQueryException.class, () -> analyze(termRangeQuery)); assertThat(e.getUnsupportedQuery(), sameInstance(termRangeQuery)); TermQuery termQuery1 = new TermQuery(new Term("_field", "_term")); BooleanQuery.Builder builder = new BooleanQuery.Builder(); builder.add(termQuery1, BooleanClause.Occur.SHOULD); builder.add(termRangeQuery, BooleanClause.Occur.SHOULD); BooleanQuery bq = builder.build(); e = expectThrows(UnsupportedQueryException.class, () -> analyze(bq)); assertThat(e.getUnsupportedQuery(), sameInstance(termRangeQuery)); } public void testExtractQueryMetadata_unsupportedQueryInBoolQueryWithMustClauses() { TermRangeQuery unsupportedQuery = new TermRangeQuery("_field", null, null, true, false); TermQuery termQuery1 = new TermQuery(new Term("_field", "_term")); BooleanQuery.Builder builder = new BooleanQuery.Builder(); builder.add(termQuery1, BooleanClause.Occur.MUST); builder.add(unsupportedQuery, BooleanClause.Occur.MUST); BooleanQuery bq1 = builder.build(); Result result = analyze(bq1); assertThat(result.verified, is(false)); assertTermsEqual(result.terms, termQuery1.getTerm()); TermQuery termQuery2 = new TermQuery(new Term("_field", "_longer_term")); builder = new BooleanQuery.Builder(); builder.add(termQuery1, BooleanClause.Occur.MUST); builder.add(termQuery2, BooleanClause.Occur.MUST); builder.add(unsupportedQuery, BooleanClause.Occur.MUST); bq1 = builder.build(); result = analyze(bq1); assertThat(result.verified, is(false)); assertTermsEqual(result.terms, termQuery2.getTerm()); builder = new BooleanQuery.Builder(); builder.add(unsupportedQuery, BooleanClause.Occur.MUST); builder.add(unsupportedQuery, BooleanClause.Occur.MUST); BooleanQuery bq2 = builder.build(); UnsupportedQueryException e = expectThrows(UnsupportedQueryException.class, () -> analyze(bq2)); assertThat(e.getUnsupportedQuery(), sameInstance(unsupportedQuery)); } public void testExtractQueryMetadata_disjunctionMaxQuery() { TermQuery termQuery1 = new TermQuery(new Term("_field", "_term1")); TermQuery termQuery2 = new TermQuery(new Term("_field", "_term2")); TermQuery termQuery3 = new TermQuery(new Term("_field", "_term3")); TermQuery termQuery4 = new TermQuery(new Term("_field", "_term4")); DisjunctionMaxQuery disjunctionMaxQuery = new DisjunctionMaxQuery( Arrays.asList(termQuery1, termQuery2, termQuery3, termQuery4), 0.1f ); Result result = analyze(disjunctionMaxQuery); assertThat(result.verified, is(true)); List<Term> terms = new ArrayList<>(result.terms); Collections.sort(terms); assertThat(terms.size(), equalTo(4)); assertThat(terms.get(0).field(), equalTo(termQuery1.getTerm().field())); assertThat(terms.get(0).bytes(), equalTo(termQuery1.getTerm().bytes())); assertThat(terms.get(1).field(), equalTo(termQuery2.getTerm().field())); assertThat(terms.get(1).bytes(), equalTo(termQuery2.getTerm().bytes())); assertThat(terms.get(2).field(), equalTo(termQuery3.getTerm().field())); assertThat(terms.get(2).bytes(), equalTo(termQuery3.getTerm().bytes())); assertThat(terms.get(3).field(), equalTo(termQuery4.getTerm().field())); assertThat(terms.get(3).bytes(), equalTo(termQuery4.getTerm().bytes())); disjunctionMaxQuery = new DisjunctionMaxQuery( Arrays.asList(termQuery1, termQuery2, termQuery3, new PhraseQuery("_field", "_term4")), 0.1f ); result = analyze(disjunctionMaxQuery); assertThat(result.verified, is(false)); terms = new ArrayList<>(result.terms); Collections.sort(terms); assertThat(terms.size(), equalTo(4)); assertThat(terms.get(0).field(), equalTo(termQuery1.getTerm().field())); assertThat(terms.get(0).bytes(), equalTo(termQuery1.getTerm().bytes())); assertThat(terms.get(1).field(), equalTo(termQuery2.getTerm().field())); assertThat(terms.get(1).bytes(), equalTo(termQuery2.getTerm().bytes())); assertThat(terms.get(2).field(), equalTo(termQuery3.getTerm().field())); assertThat(terms.get(2).bytes(), equalTo(termQuery3.getTerm().bytes())); assertThat(terms.get(3).field(), equalTo(termQuery4.getTerm().field())); assertThat(terms.get(3).bytes(), equalTo(termQuery4.getTerm().bytes())); } public void testSynonymQuery() { SynonymQuery query = new SynonymQuery(); Result result = analyze(query); assertThat(result.verified, is(true)); assertThat(result.terms.isEmpty(), is(true)); query = new SynonymQuery(new Term("_field", "_value1"), new Term("_field", "_value2")); result = analyze(query); assertThat(result.verified, is(true)); assertTermsEqual(result.terms, new Term("_field", "_value1"), new Term("_field", "_value2")); } public void testFunctionScoreQuery() { TermQuery termQuery = new TermQuery(new Term("_field", "_value")); FunctionScoreQuery functionScoreQuery = new FunctionScoreQuery(termQuery, new RandomScoreFunction()); Result result = analyze(functionScoreQuery); assertThat(result.verified, is(true)); assertTermsEqual(result.terms, new Term("_field", "_value")); functionScoreQuery = new FunctionScoreQuery(termQuery, new RandomScoreFunction(), 1f, null, 10f); result = analyze(functionScoreQuery); assertThat(result.verified, is(false)); assertTermsEqual(result.terms, new Term("_field", "_value")); } public void testSelectTermsListWithHighestSumOfTermLength() { Set<Term> terms1 = new HashSet<>(); int shortestTerms1Length = Integer.MAX_VALUE; int sumTermLength = randomIntBetween(1, 128); while (sumTermLength > 0) { int length = randomInt(sumTermLength); shortestTerms1Length = Math.min(shortestTerms1Length, length); terms1.add(new Term("field", randomAlphaOfLength(length))); sumTermLength -= length; } Set<Term> terms2 = new HashSet<>(); int shortestTerms2Length = Integer.MAX_VALUE; sumTermLength = randomIntBetween(1, 128); while (sumTermLength > 0) { int length = randomInt(sumTermLength); shortestTerms2Length = Math.min(shortestTerms2Length, length); terms2.add(new Term("field", randomAlphaOfLength(length))); sumTermLength -= length; } Set<Term> result = selectTermListWithTheLongestShortestTerm(terms1, terms2); Set<Term> expected = shortestTerms1Length >= shortestTerms2Length ? terms1 : terms2; assertThat(result, sameInstance(expected)); } private static void assertTermsEqual(Set<Term> actual, Term... expected) { assertEquals(new HashSet<>(Arrays.asList(expected)), actual); } }
/* * Copyright 2013 Red Hat Inc. and/or its affiliates and other contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.switchyard.component.bpm.exchange; import static org.switchyard.component.common.knowledge.operation.KnowledgeOperations.getInput; import static org.switchyard.component.common.knowledge.operation.KnowledgeOperations.getInputMap; import static org.switchyard.component.common.knowledge.operation.KnowledgeOperations.setFaults; import static org.switchyard.component.common.knowledge.operation.KnowledgeOperations.setGlobals; import static org.switchyard.component.common.knowledge.operation.KnowledgeOperations.setOutputs; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.xml.namespace.QName; import org.jbpm.workflow.instance.impl.WorkflowProcessInstanceImpl; import org.kie.api.runtime.manager.RuntimeEngine; import org.kie.api.runtime.process.ProcessInstance; import org.kie.internal.KieInternalServices; import org.kie.internal.process.CorrelationAwareProcessRuntime; import org.kie.internal.process.CorrelationKey; import org.kie.internal.process.CorrelationKeyFactory; import org.switchyard.Context; import org.switchyard.Exchange; import org.switchyard.ExchangePattern; import org.switchyard.HandlerException; import org.switchyard.Message; import org.switchyard.ServiceDomain; import org.switchyard.common.lang.Strings; import org.switchyard.component.bpm.BPMConstants; import org.switchyard.component.bpm.BPMMessages; import org.switchyard.component.bpm.config.model.BPMComponentImplementationModel; import org.switchyard.component.bpm.operation.BPMOperationType; import org.switchyard.component.common.knowledge.exchange.KnowledgeExchangeHandler; import org.switchyard.component.common.knowledge.operation.KnowledgeOperation; import org.switchyard.component.common.knowledge.runtime.KnowledgeRuntimeEngine; import org.switchyard.component.common.knowledge.runtime.KnowledgeRuntimeManager; import org.switchyard.component.common.knowledge.runtime.KnowledgeRuntimeManagerRegistry; import org.switchyard.component.common.knowledge.transaction.TransactionHelper; /** * A "bpm" implementation of a KnowledgeExchangeHandler. * * @author David Ward &lt;<a href="mailto:dward@jboss.org">dward@jboss.org</a>&gt; &copy; 2012 Red Hat Inc. */ public class BPMExchangeHandler extends KnowledgeExchangeHandler { private static final KnowledgeOperation DEFAULT_OPERATION = new KnowledgeOperation(BPMOperationType.START_PROCESS); private final boolean _persistent; private final String _processId; private final CorrelationKeyFactory _correlationKeyFactory; private KnowledgeRuntimeManager _runtimeManager; /** * Constructs a new BPMExchangeHandler with the specified model, service domain, and service name. * @param model the specified model * @param serviceDomain the specified service domain * @param serviceName the specified service name */ public BPMExchangeHandler(BPMComponentImplementationModel model, ServiceDomain serviceDomain, QName serviceName) { super(model, serviceDomain, serviceName); _persistent = model.isPersistent(); _processId = model.getProcessId(); _correlationKeyFactory = KieInternalServices.Factory.get().newCorrelationKeyFactory(); } /** * {@inheritDoc} */ @Override protected void doStart() { super.doStart(); _runtimeManager = newSingletonRuntimeManager(); // TODO: SWITCHYARD-1584 //_runtimeManager = newPerProcessInstanceRuntimeManager(); //_runtimeManager = _persistent ? newPerProcessInstanceRuntimeManager() : newSingletonRuntimeManager(); KnowledgeRuntimeManagerRegistry.putRuntimeManager(getServiceDomain().getName(), getServiceName(), _runtimeManager); } /** * {@inheritDoc} */ @Override protected void doStop() { KnowledgeRuntimeManagerRegistry.removeRuntimeManager(getServiceDomain().getName(), getServiceName()); try { _runtimeManager.close(); } finally { _runtimeManager = null; super.doStop(); } } /** * {@inheritDoc} */ @Override public KnowledgeOperation getDefaultOperation() { return DEFAULT_OPERATION; } /** * {@inheritDoc} */ @Override public void handleOperation(Exchange exchange, KnowledgeOperation operation) throws HandlerException { //Long sessionIdentifier = null; Long processInstanceId = null; Message inputMessage = exchange.getMessage(); ExchangePattern exchangePattern = exchange.getContract().getProviderOperation().getExchangePattern(); Map<String, Object> expressionVariables = new HashMap<String, Object>(); TransactionHelper txh = new TransactionHelper(_persistent); BPMOperationType operationType = (BPMOperationType)operation.getType(); switch (operationType) { case START_PROCESS: { try { txh.begin(); KnowledgeRuntimeEngine runtime = getRuntimeEngine(); //sessionIdentifier = runtime.getSessionIdentifier(); setGlobals(inputMessage, operation, runtime, true); Map<String, Object> inputMap = getInputMap(inputMessage, operation, runtime); ProcessInstance processInstance; CorrelationKey correlationKey = getCorrelationKey(exchange, inputMessage); if (correlationKey != null) { processInstance = ((CorrelationAwareProcessRuntime)runtime.getKieSession()).startProcess(_processId, correlationKey, inputMap); } else { processInstance = runtime.getKieSession().startProcess(_processId, inputMap); } processInstanceId = Long.valueOf(processInstance.getId()); if (ExchangePattern.IN_OUT.equals(exchangePattern)) { expressionVariables.putAll(getGlobalVariables(runtime)); expressionVariables.putAll(getProcessInstanceVariables(processInstance)); } if (!_persistent) { _runtimeManager.disposeRuntimeEngine(runtime); } txh.commit(); } catch (RuntimeException re) { txh.rollback(); throw re; } break; } case SIGNAL_EVENT: case SIGNAL_EVENT_ALL: { try { txh.begin(); KnowledgeRuntimeEngine runtime; if (BPMOperationType.SIGNAL_EVENT.equals(operationType)) { runtime = getRuntimeEngine(exchange, inputMessage); } else { //BPMOperationType.SIGNAL_EVENT_ALL runtime = getRuntimeEngine(); } //sessionIdentifier = runtime.getSessionIdentifier(); setGlobals(inputMessage, operation, runtime, true); Object eventObject = getInput(inputMessage, operation, runtime); String eventId = operation.getEventId(); if (BPMOperationType.SIGNAL_EVENT.equals(operationType)) { processInstanceId = getProcessInstanceId(exchange, inputMessage, runtime); if (processInstanceId == null) { throw BPMMessages.MESSAGES.cannotSignalEventUnknownProcessInstanceIdOrUnknownunmatchedCorrelationKey(); } if (ExchangePattern.IN_OUT.equals(exchangePattern)) { ProcessInstance processInstance = runtime.getKieSession().getProcessInstance(processInstanceId); processInstance.signalEvent(eventId, eventObject); expressionVariables.putAll(getGlobalVariables(runtime)); expressionVariables.putAll(getProcessInstanceVariables(processInstance)); } else { runtime.getKieSession().signalEvent(eventId, eventObject, processInstanceId); } } else { //BPMOperationType.SIGNAL_EVENT_ALL runtime.getKieSession().signalEvent(eventId, eventObject); if (ExchangePattern.IN_OUT.equals(exchangePattern)) { expressionVariables.putAll(getGlobalVariables(runtime)); } } if (!_persistent) { _runtimeManager.disposeRuntimeEngine(runtime); } txh.commit(); } catch (RuntimeException re) { txh.rollback(); throw re; } break; } case ABORT_PROCESS_INSTANCE: { try { txh.begin(); KnowledgeRuntimeEngine runtime = getRuntimeEngine(exchange, inputMessage); //sessionIdentifier = runtime.getSessionIdentifier(); processInstanceId = getProcessInstanceId(exchange, inputMessage, runtime); if (processInstanceId == null) { throw BPMMessages.MESSAGES.cannotAbortProcessInstance(); } if (ExchangePattern.IN_OUT.equals(exchangePattern)) { expressionVariables.putAll(getGlobalVariables(runtime)); ProcessInstance processInstance = runtime.getKieSession().getProcessInstance(processInstanceId); expressionVariables.putAll(getProcessInstanceVariables(processInstance)); } runtime.getKieSession().abortProcessInstance(processInstanceId); if (!_persistent) { _runtimeManager.disposeRuntimeEngine(runtime); } txh.commit(); } catch (RuntimeException re) { txh.rollback(); throw re; } break; } default: { throw BPMMessages.MESSAGES.unsupportedOperationType(operationType); } } if (ExchangePattern.IN_OUT.equals(exchangePattern)) { Message outputMessage = exchange.createMessage(); Context outputContext = exchange.getContext(outputMessage); /* if (sessionIdentifier != null) { outputContext.setProperty(BPMConstants.SESSION_ID_PROPERTY, sessionIdentifier); } */ if (processInstanceId != null) { outputContext.setProperty(BPMConstants.PROCESSS_INSTANCE_ID_PROPERTY, processInstanceId); } setFaults(outputMessage, operation, expressionVariables); if (outputMessage.getContent() != null) { exchange.sendFault(outputMessage); } else { setOutputs(outputMessage, operation, expressionVariables); exchange.send(outputMessage); } } } private KnowledgeRuntimeEngine getRuntimeEngine() { return (KnowledgeRuntimeEngine)_runtimeManager.getRuntimeEngine(); } private KnowledgeRuntimeEngine getRuntimeEngine(Exchange exchange, Message message) throws HandlerException { RuntimeEngine runtimeEngine = null; Long processInstanceId = getProcessInstanceId(exchange, message); if (processInstanceId != null) { runtimeEngine = _runtimeManager.getRuntimeEngine(processInstanceId); } if (runtimeEngine == null) { CorrelationKey correlationKey = getCorrelationKey(exchange, message); if (correlationKey != null) { runtimeEngine = _runtimeManager.getRuntimeEngine(correlationKey); } } if (runtimeEngine == null) { throw new HandlerException("runtimeEngine == null"); } return (KnowledgeRuntimeEngine)runtimeEngine; } private CorrelationKey getCorrelationKey(Exchange exchange, Message message) { String ckp = getString(exchange, message, BPMConstants.CORRELATION_KEY_PROPERTY); if (ckp != null) { List<String> properties = Strings.splitTrimToNull(ckp, " \t\n\r\f"); if (properties.size() > 0) { return _correlationKeyFactory.newCorrelationKey(properties); } } return null; } private Long getProcessInstanceId(Exchange exchange, Message message) { return getLong(exchange, message, BPMConstants.PROCESSS_INSTANCE_ID_PROPERTY); } private Long getProcessInstanceId(Exchange exchange, Message message, KnowledgeRuntimeEngine session) { Long processInstanceId = getProcessInstanceId(exchange, message); if (processInstanceId == null) { CorrelationKey correlationKey = getCorrelationKey(exchange, message); if (correlationKey != null) { processInstanceId = getProcessInstanceId(correlationKey, session); } } return processInstanceId; } private Long getProcessInstanceId(CorrelationKey correlationKey, KnowledgeRuntimeEngine session) { if (correlationKey != null) { ProcessInstance processInstance = ((CorrelationAwareProcessRuntime)session.getKieSession()).getProcessInstance(correlationKey); if (processInstance != null) { return Long.valueOf(processInstance.getId()); } } return null; } private Map<String, Object> getProcessInstanceVariables(ProcessInstance processInstance) { Map<String, Object> processInstanceVariables = new HashMap<String, Object>(); if (processInstance instanceof WorkflowProcessInstanceImpl) { Map<String, Object> var = ((WorkflowProcessInstanceImpl)processInstance).getVariables(); if (var != null) { processInstanceVariables.putAll(var); } } return processInstanceVariables; } }
package gov.cdc.epiinfo; import android.content.ContentValues; import android.content.Context; import android.content.SharedPreferences; import android.database.Cursor; import android.database.SQLException; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; import android.os.AsyncTask; import android.preference.PreferenceManager; import android.util.Log; import org.json.JSONArray; import org.json.JSONObject; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import java.util.Iterator; import java.util.List; import java.util.Stack; import java.util.UUID; import gov.cdc.epiinfo.cloud.AzureClient; import gov.cdc.epiinfo.cloud.BoxClient; import gov.cdc.epiinfo.cloud.CloudFactory; import gov.cdc.epiinfo.cloud.CouchDbClient; import gov.cdc.epiinfo.cloud.EpiInfoCloudClient; import gov.cdc.epiinfo.cloud.ICloudClient; public class EpiDbHelper { public static final String KEY_ROWID = "_id"; public static final String GUID = "globalRecordId"; private static final String TAG = "EpiDbHelper"; private DatabaseHelper mDbHelper; private SQLiteDatabase mDb; private String DATABASE_NAME;// = "epiinfo"; private String DATABASE_TABLE;// = "Survey"; private int DATABASE_VERSION; private FormMetadata formMetadata; public boolean isRelatedTable; private final Context mCtx; private class DatabaseHelper extends SQLiteOpenHelper { DatabaseHelper(Context context) { super(context, DATABASE_NAME, null, DATABASE_VERSION); } @Override public void onCreate(SQLiteDatabase db) { String DATABASE_CREATE = "create table "+ DATABASE_TABLE +" (" + "_id integer primary key autoincrement, "; for (int x=0;x<formMetadata.Fields.size();x++) { if (!formMetadata.Fields.get(x).getType().equals("2") && !formMetadata.Fields.get(x).getType().equals("21")) { String dbType; if (formMetadata.Fields.get(x).getType().equals("5") || formMetadata.Fields.get(x).getType().equals("10") || formMetadata.Fields.get(x).getType().equals("11") || formMetadata.Fields.get(x).getType().equals("12") || formMetadata.Fields.get(x).getType().equals("98") || formMetadata.Fields.get(x).getType().equals("17") || formMetadata.Fields.get(x).getType().equals("18") || formMetadata.Fields.get(x).getType().equals("19")) dbType="real"; else dbType="text"; if ((formMetadata.Fields.get(x).getType().equals("17") || formMetadata.Fields.get(x).getType().equals("19")) && formMetadata.Fields.get(x).getListValues().size() > 100) dbType="text"; DATABASE_CREATE += formMetadata.Fields.get(x).getName() + " " + dbType + " null, "; } } DATABASE_CREATE = DATABASE_CREATE.substring(0, DATABASE_CREATE.length() - 2) + ", globalRecordId text null, _updateStamp real null, _syncStatus real null);"; try { db.execSQL(DATABASE_CREATE); } catch (Exception ex) { int z=0; z++; } } private void updateSchema(SQLiteDatabase db) { String statement = "ALTER TABLE " + DATABASE_TABLE + " ADD COLUMN "; for (int x=0;x<formMetadata.Fields.size();x++) { if (!formMetadata.Fields.get(x).getType().equals("2") && !formMetadata.Fields.get(x).getType().equals("21")) { String dbType; if (formMetadata.Fields.get(x).getType().equals("5") || formMetadata.Fields.get(x).getType().equals("10") || formMetadata.Fields.get(x).getType().equals("11") || formMetadata.Fields.get(x).getType().equals("12") || formMetadata.Fields.get(x).getType().equals("17") || formMetadata.Fields.get(x).getType().equals("18") || formMetadata.Fields.get(x).getType().equals("19")) dbType="real"; else dbType="text"; if ((formMetadata.Fields.get(x).getType().equals("17") || formMetadata.Fields.get(x).getType().equals("19")) && formMetadata.Fields.get(x).getListValues().size() > 100) dbType="text"; try { db.execSQL(statement + formMetadata.Fields.get(x).getName() + " " + dbType + " null "); } catch (Exception ex) { int z=5; z++; } } } } @Override public void onDowngrade(SQLiteDatabase db, int oldVersion, int newVersion) { Log.w(TAG, "Downgrading database from version " + oldVersion + " to " + newVersion); updateSchema(db); } @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { Log.w(TAG, "Upgrading database from version " + oldVersion + " to " + newVersion); updateSchema(db); } } public EpiDbHelper(Context ctx, FormMetadata formMetadata, String tableName) { this.mCtx = ctx; this.formMetadata = formMetadata; DATABASE_TABLE = tableName; DATABASE_NAME = tableName + "DB"; DATABASE_VERSION = formMetadata.FileVersion; //recList = (RecordList)ctx; } public EpiDbHelper open() throws SQLException { mDbHelper = new DatabaseHelper(mCtx); mDb = mDbHelper.getWritableDatabase(); try { mDb.execSQL("ALTER TABLE " + DATABASE_TABLE + " ADD COLUMN _updateStamp real null"); } catch (Exception ex) { } try { mDb.execSQL("ALTER TABLE " + DATABASE_TABLE + " ADD COLUMN _syncStatus real null"); } catch (Exception ex) { } if (DATABASE_TABLE.startsWith("_")) { isRelatedTable = true; try { mDb.execSQL("ALTER TABLE " + DATABASE_TABLE + " ADD COLUMN FKEY text null"); } catch (Exception ex) { } } return this; } public void DropDatabase(String tableName) { mCtx.deleteDatabase(tableName + "DB"); } public void close() { mDbHelper.close(); } public long createRecord(ContentValues initialValues, boolean sendToCloud, String preexistingGuid, String fkeyGuid) { if (preexistingGuid == null) { initialValues.put(GUID, UUID.randomUUID().toString()); } else { initialValues.put(GUID, preexistingGuid); } if (fkeyGuid != null && fkeyGuid.length() > 0) { initialValues.put("FKEY", fkeyGuid); } initialValues.put("_updateStamp", new Date().getTime()); if (!initialValues.containsKey("_syncStatus")) { initialValues.put("_syncStatus", 0); } long retVal = mDb.insert(DATABASE_TABLE, null, initialValues); try { if (sendToCloud) { SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(mCtx); if (sharedPref.getBoolean("cloud_sync_save", false) && !sharedPref.getBoolean("sync_down_only", false)) { if (!sharedPref.getString("cloud_service", "").equals("Box") || BoxClient.isAuthenticated(mCtx)) { new CloudRecordCreator().executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, retVal, initialValues); } } } } catch (Exception ex) { int x =5; x++; } return retVal; } private class CloudRecordCreator extends AsyncTask<Object,Void, Boolean> { private long recordId; @Override protected Boolean doInBackground(Object... params) { recordId = (Long)params[0]; return createCloudRecord((ContentValues)params[1]); } @Override protected void onPostExecute(Boolean success) { if (success) { updateSyncStatus(recordId); try { ((RecordList)mCtx).fillData(); } catch (Exception ex) { } } } } private boolean createCloudRecord(ContentValues initialValues) { String guidValue = initialValues.get(GUID).toString(); ICloudClient cloudClient = CloudFactory.GetCloudClient(DATABASE_TABLE, formMetadata.GetSurveyId(), this, mCtx); try { initialValues.put("id", guidValue); initialValues.remove(GUID); for (int x = 0; x < formMetadata.DataFields.size(); x++) { if (formMetadata.DataFields.get(x).getType().equals("17") || formMetadata.DataFields.get(x).getType().equals("18") || formMetadata.DataFields.get(x).getType().equals("19")) { try { int value = initialValues.getAsInteger(formMetadata.DataFields.get(x).getName()); if (value == 0) { initialValues.remove(formMetadata.DataFields.get(x).getName()); } } catch (Exception ex) { } } if (formMetadata.DataFields.get(x).getType().equals("7")) { String dateValue = initialValues.getAsString(formMetadata.DataFields.get(x).getName()); if (!dateValue.equals("")) { String jsonDate = ""; try { DateFormat jsonFormat; if (cloudClient.getClass().equals(EpiInfoCloudClient.class)) { jsonFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); } else { jsonFormat = new SimpleDateFormat("M/d/yyyy h:mm:ss a"); } Date date = DateFormat.getDateInstance().parse(dateValue); jsonDate = jsonFormat.format(date); } catch (Exception ex) { jsonDate = dateValue; } initialValues.put(formMetadata.DataFields.get(x).getName(), jsonDate); } } } return cloudClient.insertRecord( initialValues ); } catch ( Exception exception ) { return false; } } private ArrayList<String> GetCloudData() { ICloudClient cloudClient = CloudFactory.GetCloudClient(DATABASE_TABLE, formMetadata.GetSurveyId(), this, mCtx); ArrayList<String> guids = new ArrayList<String>(); try { JSONArray table = cloudClient.getData(formMetadata.HasImageFields, formMetadata.HasMediaFields, this); for (int x = 0; x < table.length(); x++) { JSONObject row = table.getJSONObject(x); if (row != null) { ContentValues values = new ContentValues(); String guid = ""; Iterator<String> columns = row.keys(); while (columns.hasNext()) { String column = columns.next(); Object value = row.get(column); if (column.equals("id") || column.equals("_id")) { guid = value.toString(); } else if (column.equals("_syncStatus")) { values.put("_syncStatus", 1); } else if (column.equals("version") || column.equals("createdAt") || column.equals("updatedAt") || column.equals("deleted")) { //ignore } else { if (value instanceof Integer) { values.put(column, (Integer)row.get(column)); } else if (value instanceof Double) { values.put(column, (Double)row.get(column)); } else if (value instanceof Boolean) { if ((Boolean)value) { values.put(column, 1); } else { values.put(column, 0); } } else if (row.isNull(column)) { values.put(column, Double.POSITIVE_INFINITY); } else if (formMetadata.GetFieldType(column) == 7) { if (row.get(column).toString().equals("")) { values.put(column, row.get(column).toString()); } else { try { SimpleDateFormat jsonFormat; if (cloudClient.getClass().equals(EpiInfoCloudClient.class)) { jsonFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); } else { jsonFormat = new SimpleDateFormat("M/d/yyyy h:mm:ss a"); } Date date = jsonFormat.parse(row.get(column).toString()); DateFormat dateFormat = DateFormat.getDateInstance(); Calendar cal = GregorianCalendar.getInstance(); cal.setTime(date); values.put(column, dateFormat.format(cal.getTime())); } catch (Exception ex) { values.put(column, row.get(column).toString()); } } } else { values.put(column, row.get(column).toString()); } } } for (int n=0; n<formMetadata.NumericFields.size(); n++) { if (!values.containsKey(formMetadata.NumericFields.get(n).getName())) { values.put(formMetadata.NumericFields.get(n).getName(), Double.POSITIVE_INFINITY); } } if (cloudClient.getClass() != AzureClient.class && cloudClient.getClass() != CouchDbClient.class) { if (guid != null && guid != "") { guids.add(guid); } } else { Cursor tempCursor = fetchIdAndStamp(GUID + " = \"" + guid + "\""); if (tempCursor.getCount() > 0) { tempCursor.moveToFirst(); int id = tempCursor.getInt(tempCursor.getColumnIndexOrThrow(KEY_ROWID)); long localTimeStamp; if (tempCursor.isNull(tempCursor.getColumnIndexOrThrow("_updateStamp"))) { localTimeStamp = 0; } else { localTimeStamp = tempCursor.getLong(tempCursor.getColumnIndexOrThrow("_updateStamp")); } long serverTimeStamp = values.getAsLong("_updateStamp"); if (serverTimeStamp > localTimeStamp) { updateRecord(id, values, false); guids.add(guid); } } else { long insertedId = createRecord(values, false, guid, null); if (insertedId < 0) { return null; } updateSyncStatus(insertedId); guids.add(guid); } } } else { break; } } } catch (Exception ex) { return null; } return guids; } public boolean SaveRecievedData(JSONObject row) { try { if (row != null) { ContentValues values = new ContentValues(); String guid = ""; Iterator<String> columns = row.keys(); while (columns.hasNext()) { String column = columns.next(); Object value = row.get(column); if (column.equals("id")) { guid = value.toString(); } else if (column.equals("_syncStatus")) { values.put("_syncStatus", 1); } else if (column.equals("version") || column.equals("createdAt") || column.equals("updatedAt") || column.equals("deleted")) { //ignore } else { if (value instanceof Integer) { values.put(column, (Integer) row.get(column)); } else if (value instanceof Double) { values.put(column, (Double) row.get(column)); } else if (value instanceof Boolean) { if ((Boolean) value) { values.put(column, 1); } else { values.put(column, 0); } } else if (row.isNull(column)) { values.put(column, Double.POSITIVE_INFINITY); } else { values.put(column, row.get(column).toString()); } } } for (int n = 0; n < formMetadata.NumericFields.size(); n++) { if (!values.containsKey(formMetadata.NumericFields.get(n).getName())) { values.put(formMetadata.NumericFields.get(n).getName(), Double.POSITIVE_INFINITY); } } Cursor tempCursor = fetchIdAndStamp(GUID + " = \"" + guid + "\""); if (tempCursor.getCount() > 0) { tempCursor.moveToFirst(); int id = tempCursor.getInt(tempCursor.getColumnIndexOrThrow(KEY_ROWID)); long localTimeStamp; if (tempCursor.isNull(tempCursor.getColumnIndexOrThrow("_updateStamp"))) { localTimeStamp = 0; } else { localTimeStamp = tempCursor.getLong(tempCursor.getColumnIndexOrThrow("_updateStamp")); } long serverTimeStamp = values.getAsLong("_updateStamp"); if (serverTimeStamp > localTimeStamp) { updateRecord(id, values, false); } } else { long insertedId = createRecord(values, false, guid, null); if (insertedId < 0) { return false; } updateSyncStatus(insertedId); } } } catch (Exception ex) { int w=5; w++; return false; } return true; } public void AddTextColumn(String columnName) { String statement = "ALTER TABLE " + DATABASE_TABLE + " ADD COLUMN "; try { mDb.execSQL(statement + columnName + " text null "); } catch (Exception ex) { int z = 5; z++; } } public int SendRecordToCloud(long id) { ICloudClient cloudClient = CloudFactory.GetCloudClient(DATABASE_TABLE, formMetadata.GetSurveyId(), this, mCtx); Cursor c = fetchWhere_all(KEY_ROWID + "=" + id); double totalSize = c.getCount(); if (totalSize < 1) { return 0; } int retval = -1; if (c.moveToFirst()) { ContentValues initialValues = new ContentValues(); for (int x = 0; x < formMetadata.DataFields.size(); x++) { if (!c.isNull((c.getColumnIndexOrThrow(formMetadata.DataFields.get(x).getName())))) { if (formMetadata.DataFields.get(x).getType().equals("11") || formMetadata.DataFields.get(x).getType().equals("12") || formMetadata.DataFields.get(x).getType().equals("18")) { initialValues.put(formMetadata.DataFields.get(x).getName(), c.getInt(c.getColumnIndexOrThrow(formMetadata.DataFields.get(x).getName()))); } else if (formMetadata.DataFields.get(x).getType().equals("17") || formMetadata.DataFields.get(x).getType().equals("19")) { if (formMetadata.DataFields.get(x).getListValues().size() > 100) { initialValues.put(formMetadata.DataFields.get(x).getName(), c.getString(c.getColumnIndexOrThrow(formMetadata.DataFields.get(x).getName()))); } else { if (c.getInt(c.getColumnIndexOrThrow(formMetadata.DataFields.get(x).getName())) > 0) { initialValues.put(formMetadata.DataFields.get(x).getName(), c.getInt(c.getColumnIndexOrThrow(formMetadata.DataFields.get(x).getName()))); } } } else if (formMetadata.DataFields.get(x).getType().equals("5")) { initialValues.put(formMetadata.DataFields.get(x).getName(), c.getDouble(c.getColumnIndexOrThrow(formMetadata.DataFields.get(x).getName()))); } else if (formMetadata.DataFields.get(x).getType().equals("7")) { if (c.getString(c.getColumnIndexOrThrow(formMetadata.DataFields.get(x).getName())).equals("")) { initialValues.put(formMetadata.DataFields.get(x).getName(), ""); } else { String jsonDate = ""; try { DateFormat jsonFormat; if (cloudClient.getClass().equals(EpiInfoCloudClient.class)) { jsonFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); } else { jsonFormat = new SimpleDateFormat("M/d/yyyy h:mm:ss a"); } Date date = DateFormat.getDateInstance().parse(c.getString(c.getColumnIndexOrThrow(formMetadata.DataFields.get(x).getName()))); jsonDate = jsonFormat.format(date); } catch (Exception ex) { jsonDate = c.getString(c.getColumnIndexOrThrow(formMetadata.DataFields.get(x).getName())); } initialValues.put(formMetadata.DataFields.get(x).getName(), jsonDate); } } else if (formMetadata.DataFields.get(x).getType().equals("10")) { if (c.getInt(c.getColumnIndexOrThrow(formMetadata.DataFields.get(x).getName())) == 1) { initialValues.put(formMetadata.DataFields.get(x).getName(), true); } else { initialValues.put(formMetadata.DataFields.get(x).getName(), false); } } else { initialValues.put(formMetadata.DataFields.get(x).getName(), c.getString(c.getColumnIndexOrThrow(formMetadata.DataFields.get(x).getName()))); } } } String guidValue = c.getString(c.getColumnIndexOrThrow(GUID)); initialValues.put("id", guidValue); if (c.isNull(c.getColumnIndexOrThrow("_updateStamp"))) { initialValues.put("_updateStamp", new Date().getTime()); } else { initialValues.put("_updateStamp", c.getLong(c.getColumnIndexOrThrow("_updateStamp"))); } if (this.isRelatedTable) { initialValues.put("FKEY", c.getString(c.getColumnIndexOrThrow("FKEY"))); } try { if (cloudClient.updateRecord(guidValue, initialValues)) { System.out.println("update succeeded"); updateSyncStatus(id); retval = 1; } else { retval = -1; System.out.println("update failed"); } } catch (Exception ex) { retval = -1; } } return retval; } private void ReportProgress(AsyncTask asyncTask, double progress) { try { ((RecordList.CloudSynchronizer)asyncTask).ReportProgress(progress); } catch (Exception ex) { } } private int SendDataToCloud(AsyncTask asyncTask, ArrayList<String> receivedGuids) { Cursor c = fetchAllIds(true); double totalSize = c.getCount(); if (totalSize < 1) { return 0; } int retval = -1; int counter = 0; if (c.moveToFirst()) { do { counter++; ReportProgress(asyncTask, counter / totalSize * 100); //((RecordList.CloudSynchronizer)asyncTask).ReportProgress(counter / totalSize * 100); retval = this.SendRecordToCloud(c.getLong(c.getColumnIndexOrThrow(KEY_ROWID))); } while (c.moveToNext()); } return retval; } public int SyncWithCloud(AsyncTask asyncTask) { SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(mCtx); boolean getData = sharedPref.getBoolean("sync_up_down", false) || sharedPref.getBoolean("sync_down_only", false); boolean dontPush = sharedPref.getBoolean("sync_down_only", false); ArrayList<String> receivedGuids = new ArrayList<String>(); if (getData) { ReportProgress(asyncTask,-1); receivedGuids = GetCloudData(); if (receivedGuids == null) { return -99; } } if (dontPush) { return 1; } else { return SendDataToCloud(asyncTask, receivedGuids); } } public boolean deleteRecord(long rowId) { SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(mCtx); boolean allowCloudDeletion = sharedPref.getBoolean("cloud_deletion", false); if (allowCloudDeletion) { Cursor c = fetchRecord(rowId); if (c.moveToFirst()) { String guidValue = c.getString(c.getColumnIndexOrThrow(GUID)); new CloudRecordDeletor().executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, guidValue); } } return mDb.delete(DATABASE_TABLE, KEY_ROWID + "=" + rowId, null) > 0; } public boolean deleteAllRecords() { /*SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(mCtx); boolean allowCloudDeletion = sharedPref.getBoolean("cloud_deletion", false); if (allowCloudDeletion) { Cursor c = fetchAllRecords(); if (c.moveToFirst()) { do { String guidValue = c.getString(c.getColumnIndexOrThrow(GUID)); new CloudRecordDeletor().executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, guidValue); }while (c.moveToNext()); } }*/ try { return mCtx.deleteDatabase(DATABASE_NAME); //mDb.execSQL("DROP TABLE IF EXISTS " + DATABASE_TABLE); } catch (Exception ex) { return false; } } public Cursor fetchAllRecords() { String[] columns = new String[formMetadata.DataFields.size() + 4]; for (int x=0; x<formMetadata.DataFields.size() + 1; x++) { if (x==0) columns[x]=KEY_ROWID; else columns[x]=formMetadata.DataFields.get(x-1).getName(); } columns[formMetadata.DataFields.size() + 1]=GUID; columns[formMetadata.DataFields.size() + 2]="_updateStamp"; columns[formMetadata.DataFields.size() + 3]="_syncStatus"; return mDb.query(DATABASE_TABLE, columns, null, null, null, null, null); } public Cursor fetchAllRecordsPlusFkey() { String[] columns = new String[formMetadata.DataFields.size() + 5]; for (int x=0; x<formMetadata.DataFields.size() + 1; x++) { if (x==0) columns[x]=KEY_ROWID; else columns[x]=formMetadata.DataFields.get(x-1).getName(); } columns[formMetadata.DataFields.size() + 1]=GUID; columns[formMetadata.DataFields.size() + 2]="_updateStamp"; columns[formMetadata.DataFields.size() + 3]="_syncStatus"; columns[formMetadata.DataFields.size() + 4]="fkey"; Cursor c = mDb.query(DATABASE_TABLE, columns, null, null, null, null, null); int x = c.getCount(); System.out.println(x); return c; } public Cursor fetchTopOne() { String[] columns = new String[formMetadata.DataFields.size() + 2]; for (int x=0; x<formMetadata.DataFields.size() + 1; x++) { if (x==0) columns[x]=KEY_ROWID; else columns[x]=formMetadata.DataFields.get(x-1).getName(); } columns[formMetadata.DataFields.size() + 1]=GUID; return mDb.query(DATABASE_TABLE, columns, null, null, null, null, "1"); } public Cursor fetchLineListing(String field1, String field2, String field3) { String queryString = "SELECT " + KEY_ROWID + ", '" + field1 + "' as columnName1, " + field1 + ", '" + field2 + "' as columnName2, " + field2 + ", '" + field3 + "' as columnName3, " + field3 + ", " + GUID + ", _syncStatus FROM " + DATABASE_TABLE; SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(mCtx); if (sharedPref.getBoolean("reverse_order", false)) { queryString += " order by " + KEY_ROWID + " desc"; } return mDb.rawQuery(queryString, null); } public Cursor fetchLineListing(String field1, String field2) { String queryString = "SELECT " + KEY_ROWID + ", '" + field1 + "' as columnName1, " + field1 + ", '" + field2 + "' as columnName2, " + field2 + ", " + GUID + ", _syncStatus FROM " + DATABASE_TABLE; SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(mCtx); if (sharedPref.getBoolean("reverse_order", false)) { queryString += " order by " + KEY_ROWID + " desc"; } return mDb.rawQuery(queryString, null); } public Cursor fetchLineListing(String field1) { String queryString = "SELECT " + KEY_ROWID + ", '" + field1 + "' as columnName1, " + field1 + ", " + GUID + ", _syncStatus FROM " + DATABASE_TABLE; SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(mCtx); if (sharedPref.getBoolean("reverse_order", false)) { queryString += " order by " + KEY_ROWID + " desc"; } return mDb.rawQuery(queryString, null); } public Cursor fetchRecord(long rowId) throws SQLException { String[] columns = new String[formMetadata.DataFields.size() + 2]; for (int x=0; x<formMetadata.DataFields.size() + 1; x++) { if (x==0) columns[x]=KEY_ROWID; else columns[x]=formMetadata.DataFields.get(x-1).getName(); } columns[formMetadata.DataFields.size() + 1]=GUID; Cursor mCursor = mDb.query(true, DATABASE_TABLE, columns, KEY_ROWID + "=" + rowId, null, null, null, null, null); if (mCursor != null) { mCursor.moveToFirst(); } return mCursor; } public Cursor fetchWhere(String field1, String field2, String field3, String whereClause) { String queryString = "SELECT " + KEY_ROWID + ", '" + field1 + "' as columnName1, " + field1 + ", '" + field2 + "' as columnName2, " + field2 + ", '" + field3 + "' as columnName3, " + field3 + ", " + GUID + ", _syncStatus FROM " + DATABASE_TABLE + " WHERE " + whereClause; SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(mCtx); if (sharedPref.getBoolean("reverse_order", false)) { queryString += " order by " + KEY_ROWID + " desc"; } return mDb.rawQuery(queryString, null); } public Cursor fetchWhere(String field1, String field2, String whereClause) { String queryString = "SELECT " + KEY_ROWID + ", '" + field1 + "' as columnName1, " + field1 + ", '" + field2 + "' as columnName2, " + field2 + ", " + GUID + ", _syncStatus FROM " + DATABASE_TABLE + " WHERE " + whereClause; SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(mCtx); if (sharedPref.getBoolean("reverse_order", false)) { queryString += " order by " + KEY_ROWID + " desc"; } return mDb.rawQuery(queryString, null); } public Cursor fetchWhere(String field1, String whereClause) { String queryString = "SELECT " + KEY_ROWID + ", '" + field1 + "' as columnName1, " + field1 + ", " + GUID + ", _syncStatus FROM " + DATABASE_TABLE + " WHERE " + whereClause; SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(mCtx); if (sharedPref.getBoolean("reverse_order", false)) { queryString += " order by " + KEY_ROWID + " desc"; } return mDb.rawQuery(queryString, null); } public Cursor fetchIdAndStamp(String where) throws SQLException { String[] columns = new String[] {KEY_ROWID, "_updateStamp"}; Cursor mCursor = mDb.query(true, DATABASE_TABLE, columns, where, null, null, null, null, null); if (mCursor != null) { mCursor.moveToFirst(); } return mCursor; } public Cursor fetchAllIds(boolean unsyncedOnly) throws SQLException { String[] columns = new String[] {KEY_ROWID}; String whereClause; if (unsyncedOnly) { whereClause="_syncStatus != 1"; } else { whereClause="1 = 1"; } Cursor mCursor = mDb.query(true, DATABASE_TABLE, columns, whereClause, null, null, null, null, null); if (mCursor != null) { mCursor.moveToFirst(); } return mCursor; } public Cursor fetchWhere_all(String where) throws SQLException { String[] columns; if (this.isRelatedTable) { columns = new String[formMetadata.DataFields.size() + 5]; } else { columns = new String[formMetadata.DataFields.size() + 4]; } for (int x=0; x<formMetadata.DataFields.size() + 1; x++) { if (x==0) columns[x]=KEY_ROWID; else columns[x]=formMetadata.DataFields.get(x-1).getName(); } columns[formMetadata.DataFields.size() + 1]=GUID; columns[formMetadata.DataFields.size() + 2]="_updateStamp"; columns[formMetadata.DataFields.size() + 3]="_syncStatus"; if (this.isRelatedTable) { columns[formMetadata.DataFields.size() + 4]="FKEY"; } Cursor mCursor = mDb.query(true, DATABASE_TABLE, columns, where, null, null, null, null, null); if (mCursor != null) { mCursor.moveToFirst(); } return mCursor; } public Cursor getFrequencyWhere(String field, String where) throws SQLException { String[] columns = new String[] {field, "COUNT(*)"}; Cursor mCursor = mDb.query(false, DATABASE_TABLE, columns, where, null, field, null, null, null); if (mCursor != null) { mCursor.moveToFirst(); } return mCursor; } public Cursor getFrequency(String field, boolean reverseOrder) throws SQLException { String[] columns = new String[] {field, "COUNT(*)"}; Cursor mCursor; if (reverseOrder) { mCursor = mDb.query(false, DATABASE_TABLE, columns, null, null, field, null, field + " desc", null); } else { mCursor = mDb.query(false, DATABASE_TABLE, columns, null, null, field, null, field + " asc", null); } if (mCursor != null) { mCursor.moveToFirst(); } return mCursor; } public Cursor getFieldValues(String field) throws SQLException { String[] columns = new String[] {field}; Cursor mCursor = mDb.query(false, DATABASE_TABLE, columns, null, null, null, null, null, null); if (mCursor != null) { mCursor.moveToFirst(); } return mCursor; } public List<String> getDistinctFieldValues(String field) throws SQLException { String[] columns = new String[] {field}; List<String> values = new ArrayList<String>(); Stack<String> stack = new Stack<String>(); Cursor mCursor = mDb.query(true, DATABASE_TABLE, columns, null, null, null, null, null, null); if (mCursor != null) { if (mCursor.moveToFirst()) { do { stack.push(mCursor.getString(0)); } while (mCursor.moveToNext()); } } mCursor.close(); while (!stack.empty()) { values.add(stack.pop()); } return values; } public String getFieldValue(String field, String guid) throws SQLException { String[] columns = new String[] {field}; String value = ""; Stack<String> stack = new Stack<String>(); Cursor mCursor = mDb.query(false, DATABASE_TABLE, columns, GUID + "= '" + guid + "'", null, null, null, null, null); if (mCursor != null) { if (mCursor.moveToFirst()) { value = mCursor.getString(0); } } mCursor.close(); return value; } public Cursor getNumericValues(String field) throws SQLException { String[] columns = new String[] {field}; Cursor mCursor = mDb.query(false, DATABASE_TABLE, columns, field + " < " + Double.MAX_VALUE, null, null, null, null, null); if (mCursor != null) { mCursor.moveToFirst(); } return mCursor; } public boolean updateRecord(long rowId, ContentValues args, boolean sendToCloud) { args.put("_updateStamp", new Date().getTime()); args.put("_syncStatus", 0); boolean retVal = mDb.update(DATABASE_TABLE, args, KEY_ROWID + "=" + rowId, null) > 0; if (sendToCloud) { SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(mCtx); if (sharedPref.getBoolean("cloud_sync_save", false) && !sharedPref.getBoolean("sync_down_only", false)) { if (!sharedPref.getString("cloud_service", "").equals("Box") || BoxClient.isAuthenticated(mCtx)) { new CloudRecordUpdator().executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, rowId); } } } return retVal; } public boolean updateSyncStatus(long rowId) { ContentValues args = new ContentValues(); args.put("_syncStatus", 1); boolean retVal = mDb.update(DATABASE_TABLE, args, KEY_ROWID + "=" + rowId, null) > 0; return retVal; } public boolean resetSyncStatus() { ContentValues args = new ContentValues(); args.put("_syncStatus", 0); boolean retVal = mDb.update(DATABASE_TABLE, args, null, null) > 0; return retVal; } public boolean updateRevision(String guid, String rev) { ContentValues args = new ContentValues(); args.put("_rev", rev); boolean retVal = mDb.update(DATABASE_TABLE, args, GUID + "= '" + guid + "'", null) > 0; return retVal; } private class CloudRecordDeletor extends AsyncTask<String,Void, Integer> { @Override protected Integer doInBackground(String... params) { deleteCloudRecord(params[0]); return 0; } } private void deleteCloudRecord(String guidValue) { ICloudClient cloudClient = CloudFactory.GetCloudClient(DATABASE_TABLE, formMetadata.GetSurveyId(), this, mCtx); try { cloudClient.deleteRecord(guidValue); } catch ( Exception exception ) { System.out.println( "EXCEPTION = " + exception ); } } private class CloudRecordUpdator extends AsyncTask<Object,Void, Boolean> { private long recordId; @Override protected Boolean doInBackground(Object... params) { recordId = (Long)params[0]; return SendRecordToCloud(recordId) == 1; } @Override protected void onPostExecute(Boolean success) { if (success) { updateSyncStatus(recordId); try { ((RecordList)mCtx).fillData(); } catch (Exception ex) { } } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.solr; import java.io.UnsupportedEncodingException; import java.net.URL; import java.net.URLDecoder; import org.apache.camel.Consumer; import org.apache.camel.Processor; import org.apache.camel.Producer; import org.apache.camel.impl.DefaultEndpoint; import org.apache.camel.spi.Metadata; import org.apache.camel.spi.UriEndpoint; import org.apache.camel.spi.UriParam; import org.apache.camel.spi.UriPath; import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.impl.ConcurrentUpdateSolrClient; import org.apache.solr.client.solrj.impl.HttpSolrClient; /** * Represents a Solr endpoint. */ @UriEndpoint(scheme = "solr,solrs,solrCloud", title = "Solr", syntax = "solr:url", producerOnly = true, label = "monitoring,search") public class SolrEndpoint extends DefaultEndpoint { private String scheme = "http://"; @UriPath(description = "Hostname and port for the solr server") @Metadata(required = "true") private String url; @UriParam(defaultValue = "" + SolrConstants.DEFUALT_STREAMING_QUEUE_SIZE) private int streamingQueueSize = SolrConstants.DEFUALT_STREAMING_QUEUE_SIZE; @UriParam(defaultValue = "" + SolrConstants.DEFAULT_STREAMING_THREAD_COUNT) private int streamingThreadCount = SolrConstants.DEFAULT_STREAMING_THREAD_COUNT; @UriParam private Integer maxRetries; @UriParam private Integer soTimeout; @UriParam private Integer connectionTimeout; @UriParam private Integer defaultMaxConnectionsPerHost; @UriParam private Integer maxTotalConnections; @UriParam private Boolean followRedirects; @UriParam private Boolean allowCompression; @UriParam(label = "solrCloud") private String zkHost; @UriParam(label = "solrCloud") private String collection; @UriParam private String requestHandler; public SolrEndpoint(String endpointUri, SolrComponent component, String address) throws Exception { super(endpointUri, component); if (endpointUri.startsWith("solrs")) { scheme = "https://"; } URL url = new URL(scheme + address); this.url = url.toString(); } /** * Set the ZooKeeper host information which the solrCloud could use, such as "zkhost=localhost:8123". */ public void setZkHost(String zkHost) throws UnsupportedEncodingException { String decoded = URLDecoder.decode(zkHost, "UTF-8"); this.zkHost = decoded; } public String getZkHost() { return this.zkHost; } /** * Set the collection name which the solrCloud server could use */ public void setCollection(String collection) { this.collection = collection; } public String getCollection() { return this.collection; } @Override public SolrComponent getComponent() { return (SolrComponent) super.getComponent(); } private CloudSolrClient getCloudServer() { CloudSolrClient rVal = null; if (this.getZkHost() != null && this.getCollection() != null) { rVal = new CloudSolrClient(zkHost); rVal.setDefaultCollection(this.getCollection()); } return rVal; } @Override public Producer createProducer() throws Exception { // do we have servers? SolrComponent.SolrServerReference ref = getComponent().getSolrServers(this); if (ref == null) { // no then create new servers ref = new SolrComponent.SolrServerReference(); CloudSolrClient cloudServer = getCloudServer(); if (cloudServer == null) { HttpSolrClient solrServer = new HttpSolrClient(url); ConcurrentUpdateSolrClient solrStreamingServer = new ConcurrentUpdateSolrClient(url, streamingQueueSize, streamingThreadCount); // set the properties on the solr server if (maxRetries != null) { solrServer.setMaxRetries(maxRetries); } if (soTimeout != null) { solrServer.setSoTimeout(soTimeout); } if (connectionTimeout != null) { solrServer.setConnectionTimeout(connectionTimeout); } if (defaultMaxConnectionsPerHost != null) { solrServer.setDefaultMaxConnectionsPerHost(defaultMaxConnectionsPerHost); } if (maxTotalConnections != null) { solrServer.setMaxTotalConnections(maxTotalConnections); } if (followRedirects != null) { solrServer.setFollowRedirects(followRedirects); } if (allowCompression != null) { solrServer.setAllowCompression(allowCompression); } ref.setSolrServer(solrServer); ref.setUpdateSolrServer(solrStreamingServer); } ref.setCloudSolrServer(cloudServer); getComponent().addSolrServers(this, ref); } ref.addReference(); return new SolrProducer(this, ref.getSolrServer(), ref.getUpdateSolrServer(), ref.getCloudSolrServer()); } protected void onProducerShutdown(SolrProducer producer) { SolrComponent.SolrServerReference ref = getComponent().getSolrServers(this); if (ref != null) { int counter = ref.decReference(); if (counter <= 0) { getComponent().shutdownServers(ref, true); } } } @Override public Consumer createConsumer(Processor processor) throws Exception { throw new UnsupportedOperationException("Consumer not supported for Solr endpoint."); } @Override public boolean isSingleton() { return true; } /** * Set the request handler to be used */ public void setRequestHandler(String requestHandler) { this.requestHandler = requestHandler; } public String getRequestHandler() { return requestHandler; } public int getStreamingThreadCount() { return streamingThreadCount; } /** * Set the number of threads for the StreamingUpdateSolrServer */ public void setStreamingThreadCount(int streamingThreadCount) { this.streamingThreadCount = streamingThreadCount; } public int getStreamingQueueSize() { return streamingQueueSize; } /** * Set the queue size for the StreamingUpdateSolrServer */ public void setStreamingQueueSize(int streamingQueueSize) { this.streamingQueueSize = streamingQueueSize; } public Integer getMaxRetries() { return maxRetries; } /** * Maximum number of retries to attempt in the event of transient errors */ public void setMaxRetries(Integer maxRetries) { this.maxRetries = maxRetries; } public Integer getSoTimeout() { return soTimeout; } /** * Read timeout on the underlying HttpConnectionManager. This is desirable for queries, but probably not for indexing */ public void setSoTimeout(Integer soTimeout) { this.soTimeout = soTimeout; } public Integer getConnectionTimeout() { return connectionTimeout; } /** * connectionTimeout on the underlying HttpConnectionManager */ public void setConnectionTimeout(Integer connectionTimeout) { this.connectionTimeout = connectionTimeout; } public Integer getDefaultMaxConnectionsPerHost() { return defaultMaxConnectionsPerHost; } /** * maxConnectionsPerHost on the underlying HttpConnectionManager */ public void setDefaultMaxConnectionsPerHost(Integer defaultMaxConnectionsPerHost) { this.defaultMaxConnectionsPerHost = defaultMaxConnectionsPerHost; } public Integer getMaxTotalConnections() { return maxTotalConnections; } /** * maxTotalConnection on the underlying HttpConnectionManager */ public void setMaxTotalConnections(Integer maxTotalConnections) { this.maxTotalConnections = maxTotalConnections; } public Boolean getFollowRedirects() { return followRedirects; } /** * indicates whether redirects are used to get to the Solr server */ public void setFollowRedirects(Boolean followRedirects) { this.followRedirects = followRedirects; } public Boolean getAllowCompression() { return allowCompression; } /** * Server side must support gzip or deflate for this to have any effect */ public void setAllowCompression(Boolean allowCompression) { this.allowCompression = allowCompression; } }
/* * Copyright 2004-2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.compass.core.test.sort; import org.compass.core.CompassHits; import org.compass.core.CompassQuery; import org.compass.core.CompassQuery.SortDirection; import org.compass.core.CompassQuery.SortImplicitType; import org.compass.core.CompassSession; import org.compass.core.CompassTransaction; import org.compass.core.config.CompassSettings; import org.compass.core.lucene.LuceneEnvironment; import org.compass.core.test.AbstractTestCase; public class SortTests extends AbstractTestCase { @Override protected String[] getMappings() { return new String[] { "sort/sort.cpm.xml" }; } @Override protected void addSettings(CompassSettings settings) { // set no concurrent operations so sort by id and relevance will worok settings.setBooleanSetting(LuceneEnvironment.Transaction.Processor.ReadCommitted.CONCURRENT_OPERATIONS, false); } @Override protected void setUp() throws Exception { super.setUp(); CompassSession session = openSession(); CompassTransaction tr = session.beginTransaction(); A a = new A(new Long(1), "aab test testA", 1, 1.1f, new B(new Integer(2), "aab")); session.save(a); a = new A(new Long(2), "aac test testA", 2, 1.0f, new B(new Integer(1), "aac")); session.save(a); a = new A(new Long(3), "bbc test testB", 10, -1.0f, new B(new Integer(3), "aaa")); session.save(a); a = new A(new Long(4), "zx test testB", -10, 1.3f, new B(new Integer(4), "aad")); session.save(a); tr.commit(); session.close(); } public void testSortComponent() { CompassSession session = openSession(); CompassTransaction tr = session.beginTransaction(); CompassHits hits = session.queryBuilder().queryString("test").toQuery() .addSort("a.id", SortDirection.AUTO).hits(); assertEquals(4, hits.length()); assertAId(1, 0, hits); assertAId(2, 1, hits); assertAId(3, 2, hits); assertAId(4, 3, hits); hits = session.queryBuilder().queryString("test").toQuery() .addSort("a.b.id", SortDirection.AUTO).hits(); assertEquals(4, hits.length()); assertAId(2, 0, hits); assertAId(1, 1, hits); assertAId(3, 2, hits); assertAId(4, 3, hits); hits = session.queryBuilder().queryString("test").toQuery() .addSort("a.value2", SortDirection.AUTO).hits(); assertEquals(4, hits.length()); assertAId(1, 0, hits); assertAId(2, 1, hits); assertAId(3, 2, hits); assertAId(4, 3, hits); hits = session.queryBuilder().queryString("test").toQuery() .addSort("a.b.value2", SortDirection.AUTO).hits(); assertEquals(4, hits.length()); assertAId(3, 0, hits); assertAId(1, 1, hits); assertAId(2, 2, hits); assertAId(4, 3, hits); tr.commit(); session.close(); } public void testSortRelevance() { CompassSession session = openSession(); CompassTransaction tr = session.beginTransaction(); CompassQuery query = session.queryBuilder().queryString("test").toQuery(); query.addSort(SortImplicitType.SCORE); CompassHits hits = query.hits(); assertEquals(4, hits.length()); assertAId(1, 0, hits); assertAId(2, 1, hits); assertAId(3, 2, hits); assertAId(4, 3, hits); tr.commit(); session.close(); } public void testSortRelevanceReverse() { CompassSession session = openSession(); CompassTransaction tr = session.beginTransaction(); // TODO WHY is it not reversed? CompassQuery query = session.queryBuilder().queryString("test").toQuery(); query.addSort(SortImplicitType.SCORE, SortDirection.REVERSE); CompassHits hits = query.hits(); assertEquals(4, hits.length()); assertAId(1, 0, hits); assertAId(2, 1, hits); assertAId(3, 2, hits); assertAId(4, 3, hits); tr.commit(); session.close(); } public void testSortDoc() { CompassSession session = openSession(); CompassTransaction tr = session.beginTransaction(); CompassQuery query = session.queryBuilder().queryString("test").toQuery(); query.addSort(SortImplicitType.DOC); CompassHits hits = query.hits(); assertEquals(4, hits.length()); assertAId(1, 0, hits); assertAId(2, 1, hits); assertAId(3, 2, hits); assertAId(4, 3, hits); tr.commit(); session.close(); } public void testSortDocReverse() { CompassSession session = openSession(); CompassTransaction tr = session.beginTransaction(); CompassQuery query = session.queryBuilder().queryString("test").toQuery(); query.addSort(SortImplicitType.DOC, SortDirection.REVERSE); CompassHits hits = query.hits(); assertEquals(4, hits.length()); assertAId(4, 0, hits); assertAId(3, 1, hits); assertAId(2, 2, hits); assertAId(1, 3, hits); tr.commit(); session.close(); } public void testSortString() { CompassSession session = openSession(); CompassTransaction tr = session.beginTransaction(); CompassQuery query = session.queryBuilder().queryString("test").toQuery(); query.addSort("value", CompassQuery.SortPropertyType.STRING); CompassHits hits = query.hits(); assertEquals(4, hits.length()); assertAId(1, 0, hits); assertAId(2, 1, hits); assertAId(3, 2, hits); assertAId(4, 3, hits); tr.commit(); session.close(); } public void testSortStringReverse() { CompassSession session = openSession(); CompassTransaction tr = session.beginTransaction(); CompassQuery query = session.queryBuilder().queryString("test").toQuery(); query.addSort("value", CompassQuery.SortPropertyType.STRING, CompassQuery.SortDirection.REVERSE); CompassHits hits = query.hits(); assertEquals(4, hits.length()); assertAId(4, 0, hits); assertAId(3, 1, hits); assertAId(2, 2, hits); assertAId(1, 3, hits); tr.commit(); session.close(); } public void testSortInt() { CompassSession session = openSession(); CompassTransaction tr = session.beginTransaction(); CompassQuery query = session.queryBuilder().queryString("test").toQuery(); query.addSort("intValue", CompassQuery.SortPropertyType.INT); CompassHits hits = query.hits(); assertEquals(4, hits.length()); assertAId(4, 0, hits); assertAId(1, 1, hits); assertAId(2, 2, hits); assertAId(3, 3, hits); tr.commit(); session.close(); } public void testSortIntReverse() { CompassSession session = openSession(); CompassTransaction tr = session.beginTransaction(); CompassQuery query = session.queryBuilder().queryString("test").toQuery(); query.addSort("intValue", CompassQuery.SortPropertyType.INT, CompassQuery.SortDirection.REVERSE); CompassHits hits = query.hits(); assertEquals(4, hits.length()); assertAId(3, 0, hits); assertAId(2, 1, hits); assertAId(1, 2, hits); assertAId(4, 3, hits); tr.commit(); session.close(); } public void testSortFloat() { CompassSession session = openSession(); CompassTransaction tr = session.beginTransaction(); CompassQuery query = session.queryBuilder().queryString("test").toQuery(); query.addSort("floatValue", CompassQuery.SortPropertyType.FLOAT); CompassHits hits = query.hits(); assertEquals(4, hits.length()); assertAId(3, 0, hits); assertAId(2, 1, hits); assertAId(1, 2, hits); assertAId(4, 3, hits); tr.commit(); session.close(); } public void testSortFloatReverse() { CompassSession session = openSession(); CompassTransaction tr = session.beginTransaction(); CompassQuery query = session.queryBuilder().queryString("test").toQuery(); query.addSort("floatValue", CompassQuery.SortPropertyType.FLOAT, CompassQuery.SortDirection.REVERSE); CompassHits hits = query.hits(); assertEquals(4, hits.length()); assertAId(4, 0, hits); assertAId(1, 1, hits); assertAId(2, 2, hits); assertAId(3, 3, hits); tr.commit(); session.close(); } private void assertAId(long id, int hitNum, CompassHits hits) { A a = (A) hits.data(hitNum); assertEquals(id, a.getId().longValue()); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: grpc/job_master.proto package alluxio.grpc; /** * Protobuf type {@code alluxio.grpc.job.CancelTaskCommand} */ public final class CancelTaskCommand extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:alluxio.grpc.job.CancelTaskCommand) CancelTaskCommandOrBuilder { private static final long serialVersionUID = 0L; // Use CancelTaskCommand.newBuilder() to construct. private CancelTaskCommand(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CancelTaskCommand() { jobId_ = 0L; taskId_ = 0; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CancelTaskCommand( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; jobId_ = input.readInt64(); break; } case 16: { bitField0_ |= 0x00000002; taskId_ = input.readInt32(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return alluxio.grpc.JobMasterProto.internal_static_alluxio_grpc_job_CancelTaskCommand_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return alluxio.grpc.JobMasterProto.internal_static_alluxio_grpc_job_CancelTaskCommand_fieldAccessorTable .ensureFieldAccessorsInitialized( alluxio.grpc.CancelTaskCommand.class, alluxio.grpc.CancelTaskCommand.Builder.class); } private int bitField0_; public static final int JOBID_FIELD_NUMBER = 1; private long jobId_; /** * <code>optional int64 jobId = 1;</code> */ public boolean hasJobId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional int64 jobId = 1;</code> */ public long getJobId() { return jobId_; } public static final int TASKID_FIELD_NUMBER = 2; private int taskId_; /** * <code>optional int32 taskId = 2;</code> */ public boolean hasTaskId() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional int32 taskId = 2;</code> */ public int getTaskId() { return taskId_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt64(1, jobId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeInt32(2, taskId_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeInt64Size(1, jobId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(2, taskId_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof alluxio.grpc.CancelTaskCommand)) { return super.equals(obj); } alluxio.grpc.CancelTaskCommand other = (alluxio.grpc.CancelTaskCommand) obj; boolean result = true; result = result && (hasJobId() == other.hasJobId()); if (hasJobId()) { result = result && (getJobId() == other.getJobId()); } result = result && (hasTaskId() == other.hasTaskId()); if (hasTaskId()) { result = result && (getTaskId() == other.getTaskId()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasJobId()) { hash = (37 * hash) + JOBID_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong( getJobId()); } if (hasTaskId()) { hash = (37 * hash) + TASKID_FIELD_NUMBER; hash = (53 * hash) + getTaskId(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static alluxio.grpc.CancelTaskCommand parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static alluxio.grpc.CancelTaskCommand parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static alluxio.grpc.CancelTaskCommand parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static alluxio.grpc.CancelTaskCommand parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static alluxio.grpc.CancelTaskCommand parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static alluxio.grpc.CancelTaskCommand parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static alluxio.grpc.CancelTaskCommand parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static alluxio.grpc.CancelTaskCommand parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static alluxio.grpc.CancelTaskCommand parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static alluxio.grpc.CancelTaskCommand parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static alluxio.grpc.CancelTaskCommand parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static alluxio.grpc.CancelTaskCommand parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(alluxio.grpc.CancelTaskCommand prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code alluxio.grpc.job.CancelTaskCommand} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:alluxio.grpc.job.CancelTaskCommand) alluxio.grpc.CancelTaskCommandOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return alluxio.grpc.JobMasterProto.internal_static_alluxio_grpc_job_CancelTaskCommand_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return alluxio.grpc.JobMasterProto.internal_static_alluxio_grpc_job_CancelTaskCommand_fieldAccessorTable .ensureFieldAccessorsInitialized( alluxio.grpc.CancelTaskCommand.class, alluxio.grpc.CancelTaskCommand.Builder.class); } // Construct using alluxio.grpc.CancelTaskCommand.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); jobId_ = 0L; bitField0_ = (bitField0_ & ~0x00000001); taskId_ = 0; bitField0_ = (bitField0_ & ~0x00000002); return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return alluxio.grpc.JobMasterProto.internal_static_alluxio_grpc_job_CancelTaskCommand_descriptor; } public alluxio.grpc.CancelTaskCommand getDefaultInstanceForType() { return alluxio.grpc.CancelTaskCommand.getDefaultInstance(); } public alluxio.grpc.CancelTaskCommand build() { alluxio.grpc.CancelTaskCommand result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public alluxio.grpc.CancelTaskCommand buildPartial() { alluxio.grpc.CancelTaskCommand result = new alluxio.grpc.CancelTaskCommand(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.jobId_ = jobId_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.taskId_ = taskId_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.setField(field, value); } public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof alluxio.grpc.CancelTaskCommand) { return mergeFrom((alluxio.grpc.CancelTaskCommand)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(alluxio.grpc.CancelTaskCommand other) { if (other == alluxio.grpc.CancelTaskCommand.getDefaultInstance()) return this; if (other.hasJobId()) { setJobId(other.getJobId()); } if (other.hasTaskId()) { setTaskId(other.getTaskId()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { alluxio.grpc.CancelTaskCommand parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (alluxio.grpc.CancelTaskCommand) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private long jobId_ ; /** * <code>optional int64 jobId = 1;</code> */ public boolean hasJobId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional int64 jobId = 1;</code> */ public long getJobId() { return jobId_; } /** * <code>optional int64 jobId = 1;</code> */ public Builder setJobId(long value) { bitField0_ |= 0x00000001; jobId_ = value; onChanged(); return this; } /** * <code>optional int64 jobId = 1;</code> */ public Builder clearJobId() { bitField0_ = (bitField0_ & ~0x00000001); jobId_ = 0L; onChanged(); return this; } private int taskId_ ; /** * <code>optional int32 taskId = 2;</code> */ public boolean hasTaskId() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional int32 taskId = 2;</code> */ public int getTaskId() { return taskId_; } /** * <code>optional int32 taskId = 2;</code> */ public Builder setTaskId(int value) { bitField0_ |= 0x00000002; taskId_ = value; onChanged(); return this; } /** * <code>optional int32 taskId = 2;</code> */ public Builder clearTaskId() { bitField0_ = (bitField0_ & ~0x00000002); taskId_ = 0; onChanged(); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:alluxio.grpc.job.CancelTaskCommand) } // @@protoc_insertion_point(class_scope:alluxio.grpc.job.CancelTaskCommand) private static final alluxio.grpc.CancelTaskCommand DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new alluxio.grpc.CancelTaskCommand(); } public static alluxio.grpc.CancelTaskCommand getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final com.google.protobuf.Parser<CancelTaskCommand> PARSER = new com.google.protobuf.AbstractParser<CancelTaskCommand>() { public CancelTaskCommand parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new CancelTaskCommand(input, extensionRegistry); } }; public static com.google.protobuf.Parser<CancelTaskCommand> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CancelTaskCommand> getParserForType() { return PARSER; } public alluxio.grpc.CancelTaskCommand getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Autogenerated by Thrift Compiler (0.9.3) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.storm.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import org.apache.thrift.protocol.TProtocolException; import org.apache.thrift.EncodingUtils; import org.apache.thrift.TException; import org.apache.thrift.async.AsyncMethodCallback; import org.apache.thrift.server.AbstractNonblockingServer.*; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import javax.annotation.Generated; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) @Generated(value = "Autogenerated by Thrift Compiler (0.9.3)") public class SupervisorAssignments implements org.apache.thrift.TBase<SupervisorAssignments, SupervisorAssignments._Fields>, java.io.Serializable, Cloneable, Comparable<SupervisorAssignments> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("SupervisorAssignments"); private static final org.apache.thrift.protocol.TField STORM_ASSIGNMENT_FIELD_DESC = new org.apache.thrift.protocol.TField("storm_assignment", org.apache.thrift.protocol.TType.MAP, (short)1); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new SupervisorAssignmentsStandardSchemeFactory()); schemes.put(TupleScheme.class, new SupervisorAssignmentsTupleSchemeFactory()); } private Map<String,Assignment> storm_assignment; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { STORM_ASSIGNMENT((short)1, "storm_assignment"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // STORM_ASSIGNMENT return STORM_ASSIGNMENT; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments private static final _Fields optionals[] = {_Fields.STORM_ASSIGNMENT}; public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.STORM_ASSIGNMENT, new org.apache.thrift.meta_data.FieldMetaData("storm_assignment", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING), new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, Assignment.class)))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(SupervisorAssignments.class, metaDataMap); } public SupervisorAssignments() { this.storm_assignment = new HashMap<String,Assignment>(); } /** * Performs a deep copy on <i>other</i>. */ public SupervisorAssignments(SupervisorAssignments other) { if (other.is_set_storm_assignment()) { Map<String,Assignment> __this__storm_assignment = new HashMap<String,Assignment>(other.storm_assignment.size()); for (Map.Entry<String, Assignment> other_element : other.storm_assignment.entrySet()) { String other_element_key = other_element.getKey(); Assignment other_element_value = other_element.getValue(); String __this__storm_assignment_copy_key = other_element_key; Assignment __this__storm_assignment_copy_value = new Assignment(other_element_value); __this__storm_assignment.put(__this__storm_assignment_copy_key, __this__storm_assignment_copy_value); } this.storm_assignment = __this__storm_assignment; } } public SupervisorAssignments deepCopy() { return new SupervisorAssignments(this); } @Override public void clear() { this.storm_assignment = new HashMap<String,Assignment>(); } public int get_storm_assignment_size() { return (this.storm_assignment == null) ? 0 : this.storm_assignment.size(); } public void put_to_storm_assignment(String key, Assignment val) { if (this.storm_assignment == null) { this.storm_assignment = new HashMap<String,Assignment>(); } this.storm_assignment.put(key, val); } public Map<String,Assignment> get_storm_assignment() { return this.storm_assignment; } public void set_storm_assignment(Map<String,Assignment> storm_assignment) { this.storm_assignment = storm_assignment; } public void unset_storm_assignment() { this.storm_assignment = null; } /** Returns true if field storm_assignment is set (has been assigned a value) and false otherwise */ public boolean is_set_storm_assignment() { return this.storm_assignment != null; } public void set_storm_assignment_isSet(boolean value) { if (!value) { this.storm_assignment = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case STORM_ASSIGNMENT: if (value == null) { unset_storm_assignment(); } else { set_storm_assignment((Map<String,Assignment>)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case STORM_ASSIGNMENT: return get_storm_assignment(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case STORM_ASSIGNMENT: return is_set_storm_assignment(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof SupervisorAssignments) return this.equals((SupervisorAssignments)that); return false; } public boolean equals(SupervisorAssignments that) { if (that == null) return false; boolean this_present_storm_assignment = true && this.is_set_storm_assignment(); boolean that_present_storm_assignment = true && that.is_set_storm_assignment(); if (this_present_storm_assignment || that_present_storm_assignment) { if (!(this_present_storm_assignment && that_present_storm_assignment)) return false; if (!this.storm_assignment.equals(that.storm_assignment)) return false; } return true; } @Override public int hashCode() { List<Object> list = new ArrayList<Object>(); boolean present_storm_assignment = true && (is_set_storm_assignment()); list.add(present_storm_assignment); if (present_storm_assignment) list.add(storm_assignment); return list.hashCode(); } @Override public int compareTo(SupervisorAssignments other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = Boolean.valueOf(is_set_storm_assignment()).compareTo(other.is_set_storm_assignment()); if (lastComparison != 0) { return lastComparison; } if (is_set_storm_assignment()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.storm_assignment, other.storm_assignment); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("SupervisorAssignments("); boolean first = true; if (is_set_storm_assignment()) { sb.append("storm_assignment:"); if (this.storm_assignment == null) { sb.append("null"); } else { sb.append(this.storm_assignment); } first = false; } sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class SupervisorAssignmentsStandardSchemeFactory implements SchemeFactory { public SupervisorAssignmentsStandardScheme getScheme() { return new SupervisorAssignmentsStandardScheme(); } } private static class SupervisorAssignmentsStandardScheme extends StandardScheme<SupervisorAssignments> { public void read(org.apache.thrift.protocol.TProtocol iprot, SupervisorAssignments struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // STORM_ASSIGNMENT if (schemeField.type == org.apache.thrift.protocol.TType.MAP) { { org.apache.thrift.protocol.TMap _map886 = iprot.readMapBegin(); struct.storm_assignment = new HashMap<String,Assignment>(2*_map886.size); String _key887; Assignment _val888; for (int _i889 = 0; _i889 < _map886.size; ++_i889) { _key887 = iprot.readString(); _val888 = new Assignment(); _val888.read(iprot); struct.storm_assignment.put(_key887, _val888); } iprot.readMapEnd(); } struct.set_storm_assignment_isSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, SupervisorAssignments struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.storm_assignment != null) { if (struct.is_set_storm_assignment()) { oprot.writeFieldBegin(STORM_ASSIGNMENT_FIELD_DESC); { oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRUCT, struct.storm_assignment.size())); for (Map.Entry<String, Assignment> _iter890 : struct.storm_assignment.entrySet()) { oprot.writeString(_iter890.getKey()); _iter890.getValue().write(oprot); } oprot.writeMapEnd(); } oprot.writeFieldEnd(); } } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class SupervisorAssignmentsTupleSchemeFactory implements SchemeFactory { public SupervisorAssignmentsTupleScheme getScheme() { return new SupervisorAssignmentsTupleScheme(); } } private static class SupervisorAssignmentsTupleScheme extends TupleScheme<SupervisorAssignments> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, SupervisorAssignments struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.is_set_storm_assignment()) { optionals.set(0); } oprot.writeBitSet(optionals, 1); if (struct.is_set_storm_assignment()) { { oprot.writeI32(struct.storm_assignment.size()); for (Map.Entry<String, Assignment> _iter891 : struct.storm_assignment.entrySet()) { oprot.writeString(_iter891.getKey()); _iter891.getValue().write(oprot); } } } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, SupervisorAssignments struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(1); if (incoming.get(0)) { { org.apache.thrift.protocol.TMap _map892 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); struct.storm_assignment = new HashMap<String,Assignment>(2*_map892.size); String _key893; Assignment _val894; for (int _i895 = 0; _i895 < _map892.size; ++_i895) { _key893 = iprot.readString(); _val894 = new Assignment(); _val894.read(iprot); struct.storm_assignment.put(_key893, _val894); } } struct.set_storm_assignment_isSet(true); } } } }
package uk.ac.ebi.phis.jaxb; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.datatype.XMLGregorianCalendar; /** * <p>Java class for Roi complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="Roi"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;all> * &lt;element name="id" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="associated_image" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="associated_channel" type="{http://www.example.org/phisSchema}StringArray" minOccurs="0"/> * &lt;element name="coordinates" type="{http://www.example.org/phisSchema}Coordinates"/> * &lt;element name="user_owner" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="user_group" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="creation_date" type="{http://www.w3.org/2001/XMLSchema}dateTime" minOccurs="0"/> * &lt;element name="edit_date" type="{http://www.w3.org/2001/XMLSchema}dateTime" minOccurs="0"/> * &lt;element name="depicted_anatomical_structure" type="{http://www.example.org/phisSchema}ExpressionAnnotationArray" minOccurs="0"/> * &lt;element name="is_expression_pattern" type="{http://www.w3.org/2001/XMLSchema}boolean" minOccurs="0"/> * &lt;element name="abnormality_in_anatomical_structure" type="{http://www.example.org/phisSchema}AnnotationArray" minOccurs="0"/> * &lt;element name="phenotype_annotations" type="{http://www.example.org/phisSchema}AnnotationArray" minOccurs="0"/> * &lt;element name="observations" type="{http://www.example.org/phisSchema}StringArray" minOccurs="0"/> * &lt;/all> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "Roi", namespace = "http://www.example.org/phisSchema", propOrder = { }) public class Roi { @XmlElement(required = true) protected String id; @XmlElement(name = "associated_image", required = true) protected String associatedImage; @XmlElement(name = "associated_channel") protected StringArray associatedChannel; @XmlElement(required = true) protected Coordinates coordinates; @XmlElement(name = "user_owner") protected String userOwner; @XmlElement(name = "user_group") protected String userGroup; @XmlElement(name = "creation_date") @XmlSchemaType(name = "dateTime") protected XMLGregorianCalendar creationDate; @XmlElement(name = "edit_date") @XmlSchemaType(name = "dateTime") protected XMLGregorianCalendar editDate; @XmlElement(name = "depicted_anatomical_structure") protected ExpressionAnnotationArray depictedAnatomicalStructure; @XmlElement(name = "is_expression_pattern") protected Boolean isExpressionPattern; @XmlElement(name = "abnormality_in_anatomical_structure") protected AnnotationArray abnormalityInAnatomicalStructure; @XmlElement(name = "phenotype_annotations") protected AnnotationArray phenotypeAnnotations; protected StringArray observations; /** * Gets the value of the id property. * * @return * possible object is * {@link String } * */ public String getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link String } * */ public void setId(String value) { this.id = value; } /** * Gets the value of the associatedImage property. * * @return * possible object is * {@link String } * */ public String getAssociatedImage() { return associatedImage; } /** * Sets the value of the associatedImage property. * * @param value * allowed object is * {@link String } * */ public void setAssociatedImage(String value) { this.associatedImage = value; } /** * Gets the value of the associatedChannel property. * * @return * possible object is * {@link StringArray } * */ public StringArray getAssociatedChannel() { return associatedChannel; } /** * Sets the value of the associatedChannel property. * * @param value * allowed object is * {@link StringArray } * */ public void setAssociatedChannel(StringArray value) { this.associatedChannel = value; } /** * Gets the value of the coordinates property. * * @return * possible object is * {@link Coordinates } * */ public Coordinates getCoordinates() { return coordinates; } /** * Sets the value of the coordinates property. * * @param value * allowed object is * {@link Coordinates } * */ public void setCoordinates(Coordinates value) { this.coordinates = value; } /** * Gets the value of the userOwner property. * * @return * possible object is * {@link String } * */ public String getUserOwner() { return userOwner; } /** * Sets the value of the userOwner property. * * @param value * allowed object is * {@link String } * */ public void setUserOwner(String value) { this.userOwner = value; } /** * Gets the value of the userGroup property. * * @return * possible object is * {@link String } * */ public String getUserGroup() { return userGroup; } /** * Sets the value of the userGroup property. * * @param value * allowed object is * {@link String } * */ public void setUserGroup(String value) { this.userGroup = value; } /** * Gets the value of the creationDate property. * * @return * possible object is * {@link XMLGregorianCalendar } * */ public XMLGregorianCalendar getCreationDate() { return creationDate; } /** * Sets the value of the creationDate property. * * @param value * allowed object is * {@link XMLGregorianCalendar } * */ public void setCreationDate(XMLGregorianCalendar value) { this.creationDate = value; } /** * Gets the value of the editDate property. * * @return * possible object is * {@link XMLGregorianCalendar } * */ public XMLGregorianCalendar getEditDate() { return editDate; } /** * Sets the value of the editDate property. * * @param value * allowed object is * {@link XMLGregorianCalendar } * */ public void setEditDate(XMLGregorianCalendar value) { this.editDate = value; } /** * Gets the value of the depictedAnatomicalStructure property. * * @return * possible object is * {@link ExpressionAnnotationArray } * */ public ExpressionAnnotationArray getDepictedAnatomicalStructure() { return depictedAnatomicalStructure; } /** * Sets the value of the depictedAnatomicalStructure property. * * @param value * allowed object is * {@link ExpressionAnnotationArray } * */ public void setDepictedAnatomicalStructure(ExpressionAnnotationArray value) { this.depictedAnatomicalStructure = value; } /** * Gets the value of the isExpressionPattern property. * * @return * possible object is * {@link Boolean } * */ public Boolean isIsExpressionPattern() { return isExpressionPattern; } /** * Sets the value of the isExpressionPattern property. * * @param value * allowed object is * {@link Boolean } * */ public void setIsExpressionPattern(Boolean value) { this.isExpressionPattern = value; } /** * Gets the value of the abnormalityInAnatomicalStructure property. * * @return * possible object is * {@link AnnotationArray } * */ public AnnotationArray getAbnormalityInAnatomicalStructure() { return abnormalityInAnatomicalStructure; } /** * Sets the value of the abnormalityInAnatomicalStructure property. * * @param value * allowed object is * {@link AnnotationArray } * */ public void setAbnormalityInAnatomicalStructure(AnnotationArray value) { this.abnormalityInAnatomicalStructure = value; } /** * Gets the value of the phenotypeAnnotations property. * * @return * possible object is * {@link AnnotationArray } * */ public AnnotationArray getPhenotypeAnnotations() { return phenotypeAnnotations; } /** * Sets the value of the phenotypeAnnotations property. * * @param value * allowed object is * {@link AnnotationArray } * */ public void setPhenotypeAnnotations(AnnotationArray value) { this.phenotypeAnnotations = value; } /** * Gets the value of the observations property. * * @return * possible object is * {@link StringArray } * */ public StringArray getObservations() { return observations; } /** * Sets the value of the observations property. * * @param value * allowed object is * {@link StringArray } * */ public void setObservations(StringArray value) { this.observations = value; } }
package gnu.kawa.reflect; import gnu.bytecode.*; import gnu.mapping.*; import gnu.mapping.Location; // As opposed to gnu.bytecode.Location. import gnu.expr.*; public class FieldLocation extends ClassMemberLocation { Declaration decl; /** The cached location of the field, if final. * This is the value of this Location. Howeve, if INDIRECT_LOCATION is * set and CONSTANT is cleared, then we need to do an extra indirection. */ Object value; static final int SETUP_DONE = 1; // FIXME - do we still need this? /** Flag that indicates that field value has type Location. * Hence <code>get</code> of this Location requires an extra indirection. */ static final int INDIRECT_LOCATION = 2; /** The actual value (following any indirection) is constant. * I.e. if INDIRECT_LOCATION is set, then that Location has isConstant set, * Otherwise the actual value is a final field. */ static final int CONSTANT = 4; /** Flag that indicates that the value field has been set. * If INDIRECT_LOCATION has been set, but not CONSTANT, then * the <code>value</code> is a Location we need to indirect. * If CONSTANT is set, then this is the actual (final) value. * Not set unless at least one of INDIRECT_LOCATION or CONSTANT are set. */ static final int VALUE_SET = 8; // The PROCEDURE and SYNTAX flags aren't current used by getDeclaration, // but probably should be, assuming we can count on them. public static final int PROCEDURE = 16; public static final int SYNTAX = 32; /** True if the flags <code>PROCEDURE|SYNTAX|INDIRECT_LOCATION|CONSTANT</code> * are valid. */ public static final int KIND_FLAGS_SET = 64; private int flags; public boolean isIndirectLocation () { return (flags & INDIRECT_LOCATION) != 0; } public void setProcedure () { flags |= PROCEDURE|CONSTANT|KIND_FLAGS_SET; } public void setSyntax () { flags |= SYNTAX|CONSTANT|KIND_FLAGS_SET; } void setKindFlags () { String fname = getMemberName(); gnu.bytecode.Field fld = getDeclaringClass().getDeclaredField(fname); int fflags = fld.getModifiers(); Type ftype = fld.getType(); if (ftype.isSubtype(Compilation.typeLocation)) flags |= INDIRECT_LOCATION; if ((fflags & Access.FINAL) != 0) { if ((flags & INDIRECT_LOCATION) == 0) { flags |= CONSTANT; if (ftype.isSubtype(Compilation.typeProcedure)) flags |= PROCEDURE; if (ftype instanceof ClassType && ((ClassType) ftype).isSubclass("kawa.lang.Syntax")) flags |= SYNTAX; } else { Location loc = (Location) getFieldValue(); if (loc instanceof FieldLocation) { FieldLocation floc = (FieldLocation) loc; if ((floc.flags & KIND_FLAGS_SET) == 0) floc.setKindFlags(); flags |= (floc.flags & (SYNTAX|PROCEDURE|CONSTANT)); if ((floc.flags & CONSTANT) != 0) { if ((floc.flags & VALUE_SET) != 0) { value = floc.value; flags |= VALUE_SET; } } else { value = floc; flags |= VALUE_SET; } } else if (loc.isConstant()) { Object val = loc.get(null); // if (val == null) ????; if (val instanceof Procedure) flags |= PROCEDURE; if (val instanceof kawa.lang.Syntax) // FIXME flags |= SYNTAX; flags |= CONSTANT|VALUE_SET; value = val; } } } flags |= KIND_FLAGS_SET; } public boolean isProcedureOrSyntax () { if ((flags & KIND_FLAGS_SET) == 0) setKindFlags(); return (flags & (PROCEDURE+SYNTAX)) != 0; } public FieldLocation(Object instance, String cname, String fname) { super(instance, ClassType.make(cname), fname); } public FieldLocation(Object instance, ClassType type, String mname) { super(instance, type, mname); } public FieldLocation (Object instance, java.lang.reflect.Field field) { super(instance, field); type = (ClassType) Type.make(field.getDeclaringClass()); } public void setDeclaration (Declaration decl) { this.decl = decl; } public Field getField () { return type.getDeclaredField(mname); } /** Get the type of the field. */ public Type getFType () { return type.getDeclaredField(mname).getType(); } public synchronized Declaration getDeclaration () { if ((flags & KIND_FLAGS_SET) == 0) setKindFlags(); Declaration d = decl; if (d == null) { String fname = getMemberName(); ClassType t = getDeclaringClass(); gnu.bytecode.Field procField = t.getDeclaredField(fname); if (procField == null) return null; ModuleInfo info = ModuleInfo.find(t); ModuleExp mexp = info.getModuleExp(); for (d = mexp.firstDecl(); d != null; d = d.nextDecl()) { if (d.field != null && d.field.getName().equals(fname)) break; } if (d == null) throw new RuntimeException("no field found for "+this); decl = d; } return d; } void setup () { synchronized (this) { if ((flags & SETUP_DONE) != 0) return; super.setup(); if ((flags & KIND_FLAGS_SET) == 0) setKindFlags(); flags |= SETUP_DONE; } } public Object get (Object defaultValue) { try { setup(); } catch (Throwable ex) { return defaultValue; } Object v; if ((flags & VALUE_SET) != 0) { v = value; if ((flags & CONSTANT) != 0) return v; } else { v = getFieldValue(); if ((type.getDeclaredField(mname).getModifiers() & Access.FINAL) != 0) { flags |= VALUE_SET; if ((flags & INDIRECT_LOCATION) == 0) flags |= CONSTANT; value = v; } } if ((flags & INDIRECT_LOCATION) != 0) { Object unb = Location.UNBOUND; Location loc = (Location) v; v = loc.get(unb); if (v == unb) return defaultValue; if (loc.isConstant()) { flags |= CONSTANT; value = v; } } return v; } private Object getFieldValue () { super.setup(); // Set rfield, if needed. try { return rfield.get(instance); } catch (Throwable ex) { throw WrappedException.wrapIfNeeded(ex); } } public void set (Object newValue) { setup(); if ((flags & INDIRECT_LOCATION) == 0) { try { rfield.set(instance, newValue); } catch (Throwable ex) { throw WrappedException.wrapIfNeeded(ex); } } else { Object v; if ((flags & VALUE_SET) != 0) v = value; else { flags |= VALUE_SET; v = getFieldValue(); value = v; } ((Location) v).set(newValue); } } public Object setWithSave (Object newValue) { if ((flags & KIND_FLAGS_SET) == 0) setKindFlags(); if ((flags & INDIRECT_LOCATION) == 0) return super.setWithSave(newValue); else { Object v; if ((flags & VALUE_SET) != 0) v = value; else { flags |= VALUE_SET; v = getFieldValue(); value = v; } return ((Location) v).setWithSave(newValue); } } public void setRestore (Object oldValue) { if ((flags & INDIRECT_LOCATION) == 0) super.setRestore(oldValue); else ((Location) value).setRestore(oldValue); } public boolean isConstant () { if ((flags & KIND_FLAGS_SET) == 0) setKindFlags(); if ((flags & CONSTANT) != 0) return true; if (isIndirectLocation()) { Object v; if ((flags & VALUE_SET) != 0) v = value; else { try { setup(); } catch (Throwable ex) { return false; } v = getFieldValue(); flags |= VALUE_SET; value = v; } return ((Location) v).isConstant(); } return false; } public boolean isBound () { if ((flags & KIND_FLAGS_SET) == 0) setKindFlags(); if ((flags & CONSTANT) != 0 || (flags & INDIRECT_LOCATION) == 0) return true; Object v; if ((flags & VALUE_SET) != 0) v = value; else { try { setup(); } catch (Throwable ex) { return false; } v = getFieldValue(); flags |= VALUE_SET; value = v; } return ((Location) v).isBound(); } public static FieldLocation make (Object instance, Declaration decl) { gnu.bytecode.Field fld = decl.field; ClassType ctype = fld.getDeclaringClass(); FieldLocation loc = new FieldLocation(instance, ctype, fld.getName()); loc.setDeclaration(decl); //maybe setKindFlags(); return loc; } public static FieldLocation make (/*Object name,*/ Object instance, String cname, String fldName) { return new FieldLocation(/*name,*/ instance, ClassType.make(cname), fldName); } public String toString() { StringBuffer sbuf = new StringBuffer(); sbuf.append("FieldLocation["); if (instance != null) { sbuf.append(instance); sbuf.append(' '); } sbuf.append(type == null ? "(null)" : type.getName()); sbuf.append('.'); sbuf.append(mname); /* DEBUGGING: sbuf.append(" #:"); sbuf.append(id); */ sbuf.append(']'); return sbuf.toString(); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.geo; import com.spatial4j.core.context.SpatialContext; import com.spatial4j.core.distance.DistanceUtils; import com.spatial4j.core.exception.InvalidShapeException; import com.spatial4j.core.shape.Shape; import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; import org.apache.lucene.spatial.query.SpatialArgs; import org.apache.lucene.spatial.query.SpatialOperation; import org.apache.lucene.spatial.query.UnsupportedSpatialOperation; import org.apache.lucene.util.XGeoHashUtils; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.builders.MultiPolygonBuilder; import org.elasticsearch.common.geo.builders.PolygonBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.GeohashCellQuery; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESIntegTestCase; import org.junit.BeforeClass; import org.junit.Test; import java.io.ByteArrayOutputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Random; import java.util.zip.GZIPInputStream; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.geoBoundingBoxQuery; import static org.elasticsearch.index.query.QueryBuilders.geoDistanceQuery; import static org.elasticsearch.index.query.QueryBuilders.geoHashCellQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; /** * */ public class GeoFilterIT extends ESIntegTestCase { private static boolean intersectSupport; private static boolean disjointSupport; private static boolean withinSupport; @BeforeClass public static void createNodes() throws Exception { intersectSupport = testRelationSupport(SpatialOperation.Intersects); disjointSupport = testRelationSupport(SpatialOperation.IsDisjointTo); withinSupport = testRelationSupport(SpatialOperation.IsWithin); } private static byte[] unZipData(String path) throws IOException { InputStream is = Streams.class.getResourceAsStream(path); if (is == null) { throw new FileNotFoundException("Resource [" + path + "] not found in classpath"); } ByteArrayOutputStream out = new ByteArrayOutputStream(); GZIPInputStream in = new GZIPInputStream(is); Streams.copy(in, out); is.close(); out.close(); return out.toByteArray(); } @Test public void testShapeBuilders() { try { // self intersection polygon ShapeBuilder.newPolygon() .point(-10, -10) .point(10, 10) .point(-10, 10) .point(10, -10) .close().build(); fail("Self intersection not detected"); } catch (InvalidShapeException e) { } // polygon with hole ShapeBuilder.newPolygon() .point(-10, -10).point(-10, 10).point(10, 10).point(10, -10) .hole() .point(-5, -5).point(-5, 5).point(5, 5).point(5, -5) .close().close().build(); try { // polygon with overlapping hole ShapeBuilder.newPolygon() .point(-10, -10).point(-10, 10).point(10, 10).point(10, -10) .hole() .point(-5, -5).point(-5, 11).point(5, 11).point(5, -5) .close().close().build(); fail("Self intersection not detected"); } catch (InvalidShapeException e) { } try { // polygon with intersection holes ShapeBuilder.newPolygon() .point(-10, -10).point(-10, 10).point(10, 10).point(10, -10) .hole() .point(-5, -5).point(-5, 5).point(5, 5).point(5, -5) .close() .hole() .point(-5, -6).point(5, -6).point(5, -4).point(-5, -4) .close() .close().build(); fail("Intersection of holes not detected"); } catch (InvalidShapeException e) { } try { // Common line in polygon ShapeBuilder.newPolygon() .point(-10, -10) .point(-10, 10) .point(-5, 10) .point(-5, -5) .point(-5, 20) .point(10, 20) .point(10, -10) .close().build(); fail("Self intersection not detected"); } catch (InvalidShapeException e) { } // Not specified // try { // // two overlapping polygons within a multipolygon // ShapeBuilder.newMultiPolygon() // .polygon() // .point(-10, -10) // .point(-10, 10) // .point(10, 10) // .point(10, -10) // .close() // .polygon() // .point(-5, -5).point(-5, 5).point(5, 5).point(5, -5) // .close().build(); // fail("Polygon intersection not detected"; // } catch (InvalidShapeException e) {} // Multipolygon: polygon with hole and polygon within the whole ShapeBuilder.newMultiPolygon() .polygon() .point(-10, -10).point(-10, 10).point(10, 10).point(10, -10) .hole() .point(-5, -5).point(-5, 5).point(5, 5).point(5, -5) .close() .close() .polygon() .point(-4, -4).point(-4, 4).point(4, 4).point(4, -4) .close() .build(); // Not supported // try { // // Multipolygon: polygon with hole and polygon within the hole but overlapping // ShapeBuilder.newMultiPolygon() // .polygon() // .point(-10, -10).point(-10, 10).point(10, 10).point(10, -10) // .hole() // .point(-5, -5).point(-5, 5).point(5, 5).point(5, -5) // .close() // .close() // .polygon() // .point(-4, -4).point(-4, 6).point(4, 6).point(4, -4) // .close() // .build(); // fail("Polygon intersection not detected"; // } catch (InvalidShapeException e) {} } @Test public void testShapeRelations() throws Exception { assertTrue( "Intersect relation is not supported", intersectSupport); assertTrue("Disjoint relation is not supported", disjointSupport); assertTrue("within relation is not supported", withinSupport); String mapping = XContentFactory.jsonBuilder() .startObject() .startObject("polygon") .startObject("properties") .startObject("area") .field("type", "geo_shape") .field("tree", "geohash") .endObject() .endObject() .endObject() .endObject().string(); CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("shapes").addMapping("polygon", mapping); mappingRequest.execute().actionGet(); client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); // Create a multipolygon with two polygons. The first is an rectangle of size 10x10 // with a hole of size 5x5 equidistant from all sides. This hole in turn contains // the second polygon of size 4x4 equidistant from all sites MultiPolygonBuilder polygon = ShapeBuilder.newMultiPolygon() .polygon() .point(-10, -10).point(-10, 10).point(10, 10).point(10, -10) .hole() .point(-5, -5).point(-5, 5).point(5, 5).point(5, -5) .close() .close() .polygon() .point(-4, -4).point(-4, 4).point(4, 4).point(4, -4) .close(); BytesReference data = jsonBuilder().startObject().field("area", polygon).endObject().bytes(); client().prepareIndex("shapes", "polygon", "1").setSource(data).execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); // Point in polygon SearchResponse result = client().prepareSearch() .setQuery(matchAllQuery()) .setPostFilter(QueryBuilders.geoIntersectionQuery("area", ShapeBuilder.newPoint(3, 3))) .execute().actionGet(); assertHitCount(result, 1); assertFirstHit(result, hasId("1")); // Point in polygon hole result = client().prepareSearch() .setQuery(matchAllQuery()) .setPostFilter(QueryBuilders.geoIntersectionQuery("area", ShapeBuilder.newPoint(4.5, 4.5))) .execute().actionGet(); assertHitCount(result, 0); // by definition the border of a polygon belongs to the inner // so the border of a polygons hole also belongs to the inner // of the polygon NOT the hole // Point on polygon border result = client().prepareSearch() .setQuery(matchAllQuery()) .setPostFilter(QueryBuilders.geoIntersectionQuery("area", ShapeBuilder.newPoint(10.0, 5.0))) .execute().actionGet(); assertHitCount(result, 1); assertFirstHit(result, hasId("1")); // Point on hole border result = client().prepareSearch() .setQuery(matchAllQuery()) .setPostFilter(QueryBuilders.geoIntersectionQuery("area", ShapeBuilder.newPoint(5.0, 2.0))) .execute().actionGet(); assertHitCount(result, 1); assertFirstHit(result, hasId("1")); if (disjointSupport) { // Point not in polygon result = client().prepareSearch() .setQuery(matchAllQuery()) .setPostFilter(QueryBuilders.geoDisjointQuery("area", ShapeBuilder.newPoint(3, 3))) .execute().actionGet(); assertHitCount(result, 0); // Point in polygon hole result = client().prepareSearch() .setQuery(matchAllQuery()) .setPostFilter(QueryBuilders.geoDisjointQuery("area", ShapeBuilder.newPoint(4.5, 4.5))) .execute().actionGet(); assertHitCount(result, 1); assertFirstHit(result, hasId("1")); } // Create a polygon that fills the empty area of the polygon defined above PolygonBuilder inverse = ShapeBuilder.newPolygon() .point(-5, -5).point(-5, 5).point(5, 5).point(5, -5) .hole() .point(-4, -4).point(-4, 4).point(4, 4).point(4, -4) .close() .close(); data = jsonBuilder().startObject().field("area", inverse).endObject().bytes(); client().prepareIndex("shapes", "polygon", "2").setSource(data).execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); // re-check point on polygon hole result = client().prepareSearch() .setQuery(matchAllQuery()) .setPostFilter(QueryBuilders.geoIntersectionQuery("area", ShapeBuilder.newPoint(4.5, 4.5))) .execute().actionGet(); assertHitCount(result, 1); assertFirstHit(result, hasId("2")); // Create Polygon with hole and common edge PolygonBuilder builder = ShapeBuilder.newPolygon() .point(-10, -10).point(-10, 10).point(10, 10).point(10, -10) .hole() .point(-5, -5).point(-5, 5).point(10, 5).point(10, -5) .close() .close(); if (withinSupport) { // Polygon WithIn Polygon builder = ShapeBuilder.newPolygon() .point(-30, -30).point(-30, 30).point(30, 30).point(30, -30).close(); result = client().prepareSearch() .setQuery(matchAllQuery()) .setPostFilter(QueryBuilders.geoWithinQuery("area", builder)) .execute().actionGet(); assertHitCount(result, 2); } // Create a polygon crossing longitude 180. builder = ShapeBuilder.newPolygon() .point(170, -10).point(190, -10).point(190, 10).point(170, 10) .close(); data = jsonBuilder().startObject().field("area", builder).endObject().bytes(); client().prepareIndex("shapes", "polygon", "1").setSource(data).execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); // Create a polygon crossing longitude 180 with hole. builder = ShapeBuilder.newPolygon() .point(170, -10).point(190, -10).point(190, 10).point(170, 10) .hole().point(175, -5).point(185, -5).point(185, 5).point(175, 5).close() .close(); data = jsonBuilder().startObject().field("area", builder).endObject().bytes(); client().prepareIndex("shapes", "polygon", "1").setSource(data).execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); result = client().prepareSearch() .setQuery(matchAllQuery()) .setPostFilter(QueryBuilders.geoIntersectionQuery("area", ShapeBuilder.newPoint(174, -4))) .execute().actionGet(); assertHitCount(result, 1); result = client().prepareSearch() .setQuery(matchAllQuery()) .setPostFilter(QueryBuilders.geoIntersectionQuery("area", ShapeBuilder.newPoint(-174, -4))) .execute().actionGet(); assertHitCount(result, 1); result = client().prepareSearch() .setQuery(matchAllQuery()) .setPostFilter(QueryBuilders.geoIntersectionQuery("area", ShapeBuilder.newPoint(180, -4))) .execute().actionGet(); assertHitCount(result, 0); result = client().prepareSearch() .setQuery(matchAllQuery()) .setPostFilter(QueryBuilders.geoIntersectionQuery("area", ShapeBuilder.newPoint(180, -6))) .execute().actionGet(); assertHitCount(result, 1); } @Test public void bulktest() throws Exception { byte[] bulkAction = unZipData("/org/elasticsearch/search/geo/gzippedmap.gz"); String mapping = XContentFactory.jsonBuilder() .startObject() .startObject("country") .startObject("properties") .startObject("pin") .field("type", "geo_point") .field("lat_lon", true) .field("store", true) .endObject() .startObject("location") .field("type", "geo_shape") .endObject() .endObject() .endObject() .endObject() .string(); client().admin().indices().prepareCreate("countries").addMapping("country", mapping).execute().actionGet(); BulkResponse bulk = client().prepareBulk().add(bulkAction, 0, bulkAction.length, null, null).execute().actionGet(); for (BulkItemResponse item : bulk.getItems()) { assertFalse("unable to index data", item.isFailed()); } client().admin().indices().prepareRefresh().execute().actionGet(); String key = "DE"; SearchResponse searchResponse = client().prepareSearch() .setQuery(matchQuery("_id", key)) .execute().actionGet(); assertHitCount(searchResponse, 1); for (SearchHit hit : searchResponse.getHits()) { assertThat(hit.getId(), equalTo(key)); } SearchResponse world = client().prepareSearch().addField("pin").setQuery( geoBoundingBoxQuery("pin").setCorners(90, -179.99999, -90, 179.99999) ).execute().actionGet(); assertHitCount(world, 53); SearchResponse distance = client().prepareSearch().addField("pin").setQuery( geoDistanceQuery("pin").distance("425km").point(51.11, 9.851) ).execute().actionGet(); assertHitCount(distance, 5); GeoPoint point = new GeoPoint(); for (SearchHit hit : distance.getHits()) { String name = hit.getId(); point.resetFromString(hit.fields().get("pin").getValue().toString()); double dist = distance(point.getLat(), point.getLon(), 51.11, 9.851); assertThat("distance to '" + name + "'", dist, lessThanOrEqualTo(425000d)); assertThat(name, anyOf(equalTo("CZ"), equalTo("DE"), equalTo("BE"), equalTo("NL"), equalTo("LU"))); if (key.equals(name)) { assertThat(dist, equalTo(0d)); } } } @Test public void testGeohashCellFilter() throws IOException { String geohash = randomhash(10); logger.info("Testing geohash_cell filter for [{}]", geohash); Collection<? extends CharSequence> neighbors = XGeoHashUtils.neighbors(geohash); Collection<? extends CharSequence> parentNeighbors = XGeoHashUtils.neighbors(geohash.substring(0, geohash.length() - 1)); logger.info("Neighbors {}", neighbors); logger.info("Parent Neighbors {}", parentNeighbors); ensureYellow(); client().admin().indices().prepareCreate("locations").addMapping("location", "pin", "type=geo_point,geohash_prefix=true,lat_lon=false").execute().actionGet(); // Index a pin client().prepareIndex("locations", "location", "1").setCreate(true).setSource("pin", geohash).execute().actionGet(); // index neighbors Iterator<? extends CharSequence> iterator = neighbors.iterator(); for (int i = 0; iterator.hasNext(); i++) { client().prepareIndex("locations", "location", "N" + i).setCreate(true).setSource("pin", iterator.next()).execute().actionGet(); } // Index parent cell client().prepareIndex("locations", "location", "p").setCreate(true).setSource("pin", geohash.substring(0, geohash.length() - 1)).execute().actionGet(); // index neighbors iterator = parentNeighbors.iterator(); for (int i = 0; iterator.hasNext(); i++) { client().prepareIndex("locations", "location", "p" + i).setCreate(true).setSource("pin", iterator.next()).execute().actionGet(); } client().admin().indices().prepareRefresh("locations").execute().actionGet(); Map<GeohashCellQuery.Builder, Long> expectedCounts = new HashMap<>(); Map<GeohashCellQuery.Builder, String[]> expectedResults = new HashMap<>(); expectedCounts.put(geoHashCellQuery("pin", geohash, false), 1L); expectedCounts.put(geoHashCellQuery("pin", geohash.substring(0, geohash.length() - 1), true), 2L + neighbors.size() + parentNeighbors.size()); // Testing point formats and precision GeoPoint point = GeoPoint.fromGeohash(geohash); int precision = geohash.length(); expectedCounts.put(geoHashCellQuery("pin", point).neighbors(true).precision(precision), 1L + neighbors.size()); logger.info("random testing of setting"); List<GeohashCellQuery.Builder> filterBuilders = new ArrayList<>(expectedCounts.keySet()); for (int j = filterBuilders.size() * 2 * randomIntBetween(1, 5); j > 0; j--) { Collections.shuffle(filterBuilders, getRandom()); for (GeohashCellQuery.Builder builder : filterBuilders) { try { long expectedCount = expectedCounts.get(builder); SearchResponse response = client().prepareSearch("locations").setQuery(QueryBuilders.matchAllQuery()) .setPostFilter(builder).setSize((int) expectedCount).get(); assertHitCount(response, expectedCount); String[] expectedIds = expectedResults.get(builder); if (expectedIds == null) { ArrayList<String> ids = new ArrayList<>(); for (SearchHit hit : response.getHits()) { ids.add(hit.id()); } expectedResults.put(builder, ids.toArray(Strings.EMPTY_ARRAY)); continue; } assertSearchHits(response, expectedIds); } catch (AssertionError error) { throw new AssertionError(error.getMessage() + "\n geohash_cell filter:" + builder, error); } } } logger.info("Testing lat/lon format"); String pointTest1 = "{\"geohash_cell\": {\"pin\": {\"lat\": " + point.lat() + ",\"lon\": " + point.lon() + "},\"precision\": " + precision + ",\"neighbors\": true}}"; SearchResponse results3 = client().prepareSearch("locations").setQuery(QueryBuilders.matchAllQuery()).setPostFilter(pointTest1).execute().actionGet(); assertHitCount(results3, neighbors.size() + 1); logger.info("Testing String format"); String pointTest2 = "{\"geohash_cell\": {\"pin\": \"" + point.lat() + "," + point.lon() + "\",\"precision\": " + precision + ",\"neighbors\": true}}"; SearchResponse results4 = client().prepareSearch("locations").setQuery(QueryBuilders.matchAllQuery()).setPostFilter(pointTest2).execute().actionGet(); assertHitCount(results4, neighbors.size() + 1); logger.info("Testing Array format"); String pointTest3 = "{\"geohash_cell\": {\"pin\": [" + point.lon() + "," + point.lat() + "],\"precision\": " + precision + ",\"neighbors\": true}}"; SearchResponse results5 = client().prepareSearch("locations").setQuery(QueryBuilders.matchAllQuery()).setPostFilter(pointTest3).execute().actionGet(); assertHitCount(results5, neighbors.size() + 1); } @Test public void testNeighbors() { // Simple root case assertThat(XGeoHashUtils.addNeighbors("7", new ArrayList<String>()), containsInAnyOrder("4", "5", "6", "d", "e", "h", "k", "s")); // Root cases (Outer cells) assertThat(XGeoHashUtils.addNeighbors("0", new ArrayList<String>()), containsInAnyOrder("1", "2", "3", "p", "r")); assertThat(XGeoHashUtils.addNeighbors("b", new ArrayList<String>()), containsInAnyOrder("8", "9", "c", "x", "z")); assertThat(XGeoHashUtils.addNeighbors("p", new ArrayList<String>()), containsInAnyOrder("n", "q", "r", "0", "2")); assertThat(XGeoHashUtils.addNeighbors("z", new ArrayList<String>()), containsInAnyOrder("8", "b", "w", "x", "y")); // Root crossing dateline assertThat(XGeoHashUtils.addNeighbors("2", new ArrayList<String>()), containsInAnyOrder("0", "1", "3", "8", "9", "p", "r", "x")); assertThat(XGeoHashUtils.addNeighbors("r", new ArrayList<String>()), containsInAnyOrder("0", "2", "8", "n", "p", "q", "w", "x")); // level1: simple case assertThat(XGeoHashUtils.addNeighbors("dk", new ArrayList<String>()), containsInAnyOrder("d5", "d7", "de", "dh", "dj", "dm", "ds", "dt")); // Level1: crossing cells assertThat(XGeoHashUtils.addNeighbors("d5", new ArrayList<String>()), containsInAnyOrder("d4", "d6", "d7", "dh", "dk", "9f", "9g", "9u")); assertThat(XGeoHashUtils.addNeighbors("d0", new ArrayList<String>()), containsInAnyOrder("d1", "d2", "d3", "9b", "9c", "6p", "6r", "3z")); } public static double distance(double lat1, double lon1, double lat2, double lon2) { return GeoUtils.EARTH_SEMI_MAJOR_AXIS * DistanceUtils.distHaversineRAD( DistanceUtils.toRadians(lat1), DistanceUtils.toRadians(lon1), DistanceUtils.toRadians(lat2), DistanceUtils.toRadians(lon2) ); } protected static boolean testRelationSupport(SpatialOperation relation) { if (relation == SpatialOperation.IsDisjointTo) { // disjoint works in terms of intersection relation = SpatialOperation.Intersects; } try { GeohashPrefixTree tree = new GeohashPrefixTree(SpatialContext.GEO, 3); RecursivePrefixTreeStrategy strategy = new RecursivePrefixTreeStrategy(tree, "area"); Shape shape = SpatialContext.GEO.makePoint(0, 0); SpatialArgs args = new SpatialArgs(relation, shape); strategy.makeFilter(args); return true; } catch (UnsupportedSpatialOperation e) { e.printStackTrace(); return false; } } protected static String randomhash(int length) { return randomhash(getRandom(), length); } protected static String randomhash(Random random) { return randomhash(random, 2 + random.nextInt(10)); } protected static String randomhash() { return randomhash(getRandom()); } protected static String randomhash(Random random, int length) { final char[] BASE_32 = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'j', 'k', 'm', 'n', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z'}; StringBuilder sb = new StringBuilder(); for (int i = 0; i < length; i++) { sb.append(BASE_32[random.nextInt(BASE_32.length)]); } return sb.toString(); } }
/** * Copyright 2015 MKL Software * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mkl.websuites.internal.command; import java.util.Arrays; import java.util.List; import junitparams.JUnitParamsRunner; import junitparams.Parameters; import mockit.Deencapsulation; import org.junit.Test; import org.junit.runner.RunWith; import com.mkl.websuites.command.Command; import com.mkl.websuites.internal.WebSuitesException; import com.mkl.websuites.internal.command.CommandPostProcessor; import com.mkl.websuites.internal.command.impl.flow.EndControlFlowHandler; import com.mkl.websuites.internal.command.impl.flow.RepeatControlFlowHandler; import com.mkl.websuites.itests.cmd.NoArgCommand; import com.mkl.websuites.itests.web.core.ServiceBasedTest; import static junitparams.JUnitParamsRunner.$; import static org.junit.Assert.*; @RunWith(JUnitParamsRunner.class) public class StandardCommandPostProcessorTest extends ServiceBasedTest<CommandPostProcessor> { @Override protected Class<CommandPostProcessor> getServiceUnderTestClass() { return CommandPostProcessor.class; } @SuppressWarnings("unused") private Object[] parametersForTestMaxDepthFindingAlgorithm() { return $( $(Arrays.asList((Command) new NoArgCommand()), 0), $(Arrays.asList((Command) new NoArgCommand(), new NoArgCommand(), new NoArgCommand()), 0), $(Arrays.asList((Command) new RepeatControlFlowHandler(), new NoArgCommand(), new EndControlFlowHandler()), 1), $(Arrays.asList((Command) new RepeatControlFlowHandler(), new NoArgCommand(), new RepeatControlFlowHandler(), new EndControlFlowHandler(), new EndControlFlowHandler()), 2), $(Arrays.asList((Command) new RepeatControlFlowHandler(), new RepeatControlFlowHandler(), new RepeatControlFlowHandler(), new RepeatControlFlowHandler(), new NoArgCommand(), new EndControlFlowHandler(), new EndControlFlowHandler(), new EndControlFlowHandler(), new EndControlFlowHandler()), 4)); } @Test @Parameters public void testMaxDepthFindingAlgorithm(List<Command> input, int expectedDepth) { int depth = Deencapsulation.invoke(logic(), "checkMaxNestingDepths", input); assertEquals(expectedDepth, depth); } @Test(expected = WebSuitesException.class) public void testWrongControlFlowBlock() { List<Command> commandsFromScn = Arrays.asList((Command) new RepeatControlFlowHandler(), new NoArgCommand(), new NoArgCommand(), new NoArgCommand() // no end, error ); Deencapsulation.invoke(logic(), "checkMaxNestingDepths", commandsFromScn); } @Test public void testNoControlFlowBlock() { List<Command> commandsFromScn = Arrays.asList((Command) new NoArgCommand(), new NoArgCommand(), new NoArgCommand(), new NoArgCommand()); List<Command> processed = logic().postProcessCommands(commandsFromScn); assertEquals(4, processed.size()); } @Test public void testOneExternalControlFlowBlock() { List<Command> commandsFromScn = Arrays.asList((Command) new RepeatControlFlowHandler(), new NoArgCommand(), new NoArgCommand(), new NoArgCommand(), new NoArgCommand(), new EndControlFlowHandler()); List<Command> processed = logic().postProcessCommands(commandsFromScn); assertEquals(1, processed.size()); Command first = processed.get(0); assertTrue(first instanceof RepeatControlFlowHandler); assertEquals(4, ((RepeatControlFlowHandler) first).getNestedCommands().size()); } @Test public void testThreeFirstLevelControlFlowBlocks() { List<Command> commandsFromScn = Arrays.asList((Command) new RepeatControlFlowHandler(), new NoArgCommand(), new EndControlFlowHandler(), new RepeatControlFlowHandler(), new NoArgCommand(), new NoArgCommand(), new EndControlFlowHandler(), new RepeatControlFlowHandler(), new NoArgCommand(), new NoArgCommand(), new NoArgCommand(), new EndControlFlowHandler()); List<Command> processed = logic().postProcessCommands(commandsFromScn); assertEquals(3, processed.size()); Command first = processed.get(0); assertTrue(first instanceof RepeatControlFlowHandler); assertEquals(1, ((RepeatControlFlowHandler) first).getNestedCommands().size()); Command second = processed.get(1); assertTrue(second instanceof RepeatControlFlowHandler); assertEquals(2, ((RepeatControlFlowHandler) second).getNestedCommands().size()); Command third = processed.get(2); assertTrue(third instanceof RepeatControlFlowHandler); assertEquals(3, ((RepeatControlFlowHandler) third).getNestedCommands().size()); } @Test public void testThreeLevelNestingWithTwoFirstLevelCommands() { List<Command> commandsFromScn = Arrays.asList((Command) new NoArgCommand(), new RepeatControlFlowHandler(), new RepeatControlFlowHandler(), new RepeatControlFlowHandler(), new NoArgCommand(), new NoArgCommand(), new NoArgCommand(), new EndControlFlowHandler(), new EndControlFlowHandler(), new EndControlFlowHandler(), new NoArgCommand()); List<Command> processed = logic().postProcessCommands(commandsFromScn); assertEquals(3, processed.size()); assertTrue(processed.get(0) instanceof NoArgCommand); assertTrue(processed.get(1) instanceof RepeatControlFlowHandler); assertTrue(processed.get(2) instanceof NoArgCommand); List<Command> secondLevel = ((RepeatControlFlowHandler) processed.get(1)).getNestedCommands(); assertEquals(1, secondLevel.size()); List<Command> thirdLevel = ((RepeatControlFlowHandler) secondLevel.get(0)).getNestedCommands(); assertEquals(1, thirdLevel.size()); List<Command> fourthLevel = ((RepeatControlFlowHandler) thirdLevel.get(0)).getNestedCommands(); assertEquals(3, fourthLevel.size()); for (Command command : fourthLevel) { assertTrue(command instanceof NoArgCommand); } } @Test public void testComplexNesting() { List<Command> commandsFromScn = Arrays.asList((Command) new NoArgCommand(), new RepeatControlFlowHandler(), new NoArgCommand(), new NoArgCommand(), new EndControlFlowHandler(), new RepeatControlFlowHandler(), new NoArgCommand(), new RepeatControlFlowHandler(), new NoArgCommand(), new NoArgCommand(), new EndControlFlowHandler(), new RepeatControlFlowHandler(), new NoArgCommand(), new EndControlFlowHandler(), new NoArgCommand(), new EndControlFlowHandler(), new NoArgCommand()); List<Command> processed = logic().postProcessCommands(commandsFromScn); assertEquals(4, processed.size()); assertTrue(processed.get(0) instanceof NoArgCommand); assertTrue(processed.get(1) instanceof RepeatControlFlowHandler); assertTrue(processed.get(2) instanceof RepeatControlFlowHandler); assertTrue(processed.get(3) instanceof NoArgCommand); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: BceRequestFriendInfo.proto package com.xinqihd.sns.gameserver.proto; public final class XinqiBceRequestFriendInfo { private XinqiBceRequestFriendInfo() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface BceRequestFriendInfoOrBuilder extends com.google.protobuf.MessageOrBuilder { // required string username = 1; boolean hasUsername(); String getUsername(); // optional int32 type = 2 [default = 0]; boolean hasType(); int getType(); } public static final class BceRequestFriendInfo extends com.google.protobuf.GeneratedMessage implements BceRequestFriendInfoOrBuilder { // Use BceRequestFriendInfo.newBuilder() to construct. private BceRequestFriendInfo(Builder builder) { super(builder); } private BceRequestFriendInfo(boolean noInit) {} private static final BceRequestFriendInfo defaultInstance; public static BceRequestFriendInfo getDefaultInstance() { return defaultInstance; } public BceRequestFriendInfo getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.internal_static_com_xinqihd_sns_gameserver_proto_BceRequestFriendInfo_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.internal_static_com_xinqihd_sns_gameserver_proto_BceRequestFriendInfo_fieldAccessorTable; } private int bitField0_; // required string username = 1; public static final int USERNAME_FIELD_NUMBER = 1; private java.lang.Object username_; public boolean hasUsername() { return ((bitField0_ & 0x00000001) == 0x00000001); } public String getUsername() { java.lang.Object ref = username_; if (ref instanceof String) { return (String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; String s = bs.toStringUtf8(); if (com.google.protobuf.Internal.isValidUtf8(bs)) { username_ = s; } return s; } } private com.google.protobuf.ByteString getUsernameBytes() { java.lang.Object ref = username_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); username_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional int32 type = 2 [default = 0]; public static final int TYPE_FIELD_NUMBER = 2; private int type_; public boolean hasType() { return ((bitField0_ & 0x00000002) == 0x00000002); } public int getType() { return type_; } private void initFields() { username_ = ""; type_ = 0; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasUsername()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getUsernameBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeInt32(2, type_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getUsernameBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(2, type_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } public static com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.internal_static_com_xinqihd_sns_gameserver_proto_BceRequestFriendInfo_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.internal_static_com_xinqihd_sns_gameserver_proto_BceRequestFriendInfo_fieldAccessorTable; } // Construct using com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); username_ = ""; bitField0_ = (bitField0_ & ~0x00000001); type_ = 0; bitField0_ = (bitField0_ & ~0x00000002); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo.getDescriptor(); } public com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo getDefaultInstanceForType() { return com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo.getDefaultInstance(); } public com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo build() { com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo buildPartial() { com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo result = new com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.username_ = username_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.type_ = type_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo) { return mergeFrom((com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo other) { if (other == com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo.getDefaultInstance()) return this; if (other.hasUsername()) { setUsername(other.getUsername()); } if (other.hasType()) { setType(other.getType()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasUsername()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 10: { bitField0_ |= 0x00000001; username_ = input.readBytes(); break; } case 16: { bitField0_ |= 0x00000002; type_ = input.readInt32(); break; } } } } private int bitField0_; // required string username = 1; private java.lang.Object username_ = ""; public boolean hasUsername() { return ((bitField0_ & 0x00000001) == 0x00000001); } public String getUsername() { java.lang.Object ref = username_; if (!(ref instanceof String)) { String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); username_ = s; return s; } else { return (String) ref; } } public Builder setUsername(String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; username_ = value; onChanged(); return this; } public Builder clearUsername() { bitField0_ = (bitField0_ & ~0x00000001); username_ = getDefaultInstance().getUsername(); onChanged(); return this; } void setUsername(com.google.protobuf.ByteString value) { bitField0_ |= 0x00000001; username_ = value; onChanged(); } // optional int32 type = 2 [default = 0]; private int type_ ; public boolean hasType() { return ((bitField0_ & 0x00000002) == 0x00000002); } public int getType() { return type_; } public Builder setType(int value) { bitField0_ |= 0x00000002; type_ = value; onChanged(); return this; } public Builder clearType() { bitField0_ = (bitField0_ & ~0x00000002); type_ = 0; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:com.xinqihd.sns.gameserver.proto.BceRequestFriendInfo) } static { defaultInstance = new BceRequestFriendInfo(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:com.xinqihd.sns.gameserver.proto.BceRequestFriendInfo) } private static com.google.protobuf.Descriptors.Descriptor internal_static_com_xinqihd_sns_gameserver_proto_BceRequestFriendInfo_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_com_xinqihd_sns_gameserver_proto_BceRequestFriendInfo_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\032BceRequestFriendInfo.proto\022 com.xinqih" + "d.sns.gameserver.proto\"9\n\024BceRequestFrie" + "ndInfo\022\020\n\010username\030\001 \002(\t\022\017\n\004type\030\002 \001(\005:\001" + "0B\033B\031XinqiBceRequestFriendInfo" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; internal_static_com_xinqihd_sns_gameserver_proto_BceRequestFriendInfo_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_com_xinqihd_sns_gameserver_proto_BceRequestFriendInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_com_xinqihd_sns_gameserver_proto_BceRequestFriendInfo_descriptor, new java.lang.String[] { "Username", "Type", }, com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo.class, com.xinqihd.sns.gameserver.proto.XinqiBceRequestFriendInfo.BceRequestFriendInfo.Builder.class); return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } // @@protoc_insertion_point(outer_class_scope) }
/* * Copyright (c) 2015 LingoChamp Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.liulishuo.filedownloader; import com.liulishuo.filedownloader.message.MessageSnapshot; import com.liulishuo.filedownloader.message.MessageSnapshotTaker; import com.liulishuo.filedownloader.model.FileDownloadStatus; import com.liulishuo.filedownloader.util.FileDownloadLog; import java.util.ArrayList; import java.util.List; /** * Storing all tasks in processing in the Main-Process. */ @SuppressWarnings("UnusedReturnValue") public class FileDownloadList { private final static class HolderClass { private final static FileDownloadList INSTANCE = new FileDownloadList(); } public static FileDownloadList getImpl() { return HolderClass.INSTANCE; } private final ArrayList<BaseDownloadTask.IRunningTask> mList; private FileDownloadList() { mList = new ArrayList<>(); } boolean isEmpty() { return mList.isEmpty(); } int size() { return mList.size(); } /** * @param id download id * @return get counts os same id */ int count(final int id) { int size = 0; synchronized (mList) { for (BaseDownloadTask.IRunningTask task : mList) { if (task.is(id)) { size++; } } } return size; } public BaseDownloadTask.IRunningTask get(final int id) { synchronized (mList) { for (BaseDownloadTask.IRunningTask task : mList) { // when FileDownloadMgr#isDownloading if (task.is(id)) { return task; } } } return null; } List<BaseDownloadTask.IRunningTask> getReceiveServiceTaskList(final int id){ final List<BaseDownloadTask.IRunningTask> list = new ArrayList<>(); synchronized (this.mList) { for (BaseDownloadTask.IRunningTask task : this.mList) { if (task.is(id) && !task.isOver()) { final byte status = task.getOrigin().getStatus(); if (status != FileDownloadStatus.INVALID_STATUS && status != FileDownloadStatus.toLaunchPool) { list.add(task); } } } } return list; } List<BaseDownloadTask.IRunningTask> getDownloadingList(final int id) { final List<BaseDownloadTask.IRunningTask> list = new ArrayList<>(); synchronized (this.mList) { for (BaseDownloadTask.IRunningTask task : this.mList) { if (task.is(id) && !task.isOver()) { list.add(task); } } } return list; } boolean isNotContains(final BaseDownloadTask.IRunningTask download) { return mList.isEmpty() || !mList.contains(download); } List<BaseDownloadTask.IRunningTask> copy(final FileDownloadListener listener) { final List<BaseDownloadTask.IRunningTask> targetList = new ArrayList<>(); synchronized (mList) { // Prevent size changing for (BaseDownloadTask.IRunningTask task : mList) { if (task.is(listener)) { targetList.add(task); } } return targetList; } } List<BaseDownloadTask.IRunningTask> assembleTasksToStart(int attachKey, FileDownloadListener listener) { final List<BaseDownloadTask.IRunningTask> targetList = new ArrayList<>(); synchronized (mList) { // Prevent size changing for (BaseDownloadTask.IRunningTask task : mList) { if (task.getOrigin().getListener() == listener && !task.getOrigin().isAttached()) { task.setAttachKeyByQueue(attachKey); targetList.add(task); } } return targetList; } } BaseDownloadTask.IRunningTask[] copy() { synchronized (mList) { // Prevent size changing BaseDownloadTask.IRunningTask[] copy = new BaseDownloadTask.IRunningTask[mList.size()]; return mList.toArray(copy); } } /** * Divert all data in list 2 destination list */ void divertAndIgnoreDuplicate(@SuppressWarnings("SameParameterValue") final List<BaseDownloadTask.IRunningTask> destination) { synchronized (mList) { for (BaseDownloadTask.IRunningTask iRunningTask : mList) { if (!destination.contains(iRunningTask)) { destination.add(iRunningTask); } } mList.clear(); } } /** * @param willRemoveDownload will be remove */ public boolean remove(final BaseDownloadTask.IRunningTask willRemoveDownload, MessageSnapshot snapshot) { final byte removeByStatus = snapshot.getStatus(); boolean succeed; synchronized (mList) { succeed = mList.remove(willRemoveDownload); } if (FileDownloadLog.NEED_LOG) { if (mList.size() == 0) { FileDownloadLog.v(this, "remove %s left %d %d", willRemoveDownload, removeByStatus, mList.size()); } } if (succeed) { final IFileDownloadMessenger messenger = willRemoveDownload.getMessageHandler(). getMessenger(); // Notify 2 Listener switch (removeByStatus) { case FileDownloadStatus.warn: messenger.notifyWarn(snapshot); break; case FileDownloadStatus.error: messenger.notifyError(snapshot); break; case FileDownloadStatus.paused: messenger.notifyPaused(snapshot); break; case FileDownloadStatus.completed: messenger.notifyBlockComplete(MessageSnapshotTaker.takeBlockCompleted(snapshot)); break; } } else { FileDownloadLog.e(this, "remove error, not exist: %s %d", willRemoveDownload, removeByStatus); } return succeed; } void add(final BaseDownloadTask.IRunningTask task) { if (!task.getOrigin().isAttached()) { // if this task didn't attach to any key, this task must be an isolated task, so we // generate a key and attache it to this task, make sure this task not be assembled by // a queue. task.setAttachKeyDefault(); } if (task.getMessageHandler().getMessenger().notifyBegin()) { addUnchecked(task); } } /** * This method generally used for enqueuing the task which will be assembled by a queue. * * @see BaseDownloadTask.InQueueTask#enqueue() */ void addUnchecked(final BaseDownloadTask.IRunningTask task) { if (task.isMarkedAdded2List()) { return; } synchronized (mList) { if (mList.contains(task)) { FileDownloadLog.w(this, "already has %s", task); } else { task.markAdded2List(); mList.add(task); if (FileDownloadLog.NEED_LOG) { FileDownloadLog.v(this, "add list in all %s %d %d", task, task.getOrigin().getStatus(), mList.size()); } } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id: NamedColorSpace.java 1051421 2010-12-21 08:54:25Z jeremias $ */ package org.apache.xmlgraphics.java2d.color; import java.awt.Color; import java.awt.color.ColorSpace; /** * Implements a pseudo color space for a named color which is defined in the CIE * XYZ color space. At the moment, this color space always returns the fully * opaque color regardless of the single component value (tint) given to its * conversion methods. */ public class NamedColorSpace extends ColorSpace implements ColorSpaceOrigin { private static final long serialVersionUID = -8957543225908514658L; private final String name; private final float[] xyz; private final String profileName; private final String profileURI; /** * Creates a new named color. * * @param name * the color name * @param xyz * the CIE XYZ coordinates (valid values: 0.0f to 1.0f, although * values slightly larger than 1.0f are common) */ public NamedColorSpace(final String name, final float[] xyz) { this(name, xyz, null, null); } /** * Creates a new named color. * * @param name * the color name * @param xyz * the CIE XYZ coordinates (valid values: 0.0f to 1.0f, although * values slightly larger than 1.0f are common) * @param profileName * Optional profile name associated with this color space * @param profileURI * Optional profile URI associated with this color space */ public NamedColorSpace(final String name, final float[] xyz, final String profileName, final String profileURI) { super(ColorSpace.TYPE_GRAY, 1); checkNumComponents(xyz, 3); if (name == null || name.trim().length() == 0) { throw new IllegalArgumentException( "No name provided for named color space"); } this.name = name.trim(); this.xyz = new float[3]; System.arraycopy(xyz, 0, this.xyz, 0, 3); this.profileName = profileName; this.profileURI = profileURI; } /** * Creates a new named color. * * @param name * the color name * @param color * the color to use when the named color's specific color * properties are not available. * @param profileName * Optional profile name associated with this color space * @param profileURI * Optional profile URI associated with this color space */ public NamedColorSpace(final String name, final Color color, final String profileName, final String profileURI) { this(name, color.getColorSpace().toCIEXYZ( color.getColorComponents(null)), profileName, profileURI); } /** * Creates a new named color. * * @param name * the color name * @param color * the color to use when the named color's specific color * properties are not available. */ public NamedColorSpace(final String name, final Color color) { this(name, color.getColorSpace().toCIEXYZ( color.getColorComponents(null)), null, null); } private void checkNumComponents(final float[] colorvalue, final int expected) { if (colorvalue == null) { throw new NullPointerException("color value may not be null"); } if (colorvalue.length != expected) { throw new IllegalArgumentException("Expected " + expected + " components, but got " + colorvalue.length); } } /** * Returns the color name. * * @return the color name */ public String getColorName() { return this.name; } /** {@inheritDoc} */ @Override public String getProfileName() { return this.profileName; } /** {@inheritDoc} */ @Override public String getProfileURI() { return this.profileURI; } /** * Returns the XYZ coordinates of the named color. * * @return the XYZ coordinates of the named color */ public float[] getXYZ() { final float[] result = new float[this.xyz.length]; System.arraycopy(this.xyz, 0, result, 0, this.xyz.length); return result; } /** * Returns an sRGB-based color representing the full-tint color defined by * this named color space. * * @return the sRGB color */ public Color getRGBColor() { final float[] comps = toRGB(this.xyz); return new Color(comps[0], comps[1], comps[2]); } /** {@inheritDoc} */ @Override public float getMinValue(final int component) { return getMaxValue(component); // same as min, i.e. always 1.0 } /** {@inheritDoc} */ @Override public float getMaxValue(final int component) { switch (component) { case 0: return 1f; default: throw new IllegalArgumentException( "A named color space only has 1 component!"); } } /** {@inheritDoc} */ @Override public String getName(final int component) { switch (component) { case 0: return "Tint"; default: throw new IllegalArgumentException( "A named color space only has 1 component!"); } } /** {@inheritDoc} */ @Override public float[] fromCIEXYZ(final float[] colorvalue) { // ignore the given color values as this is a fixed color. return new float[] { 1.0f }; // Return value for full tint } /** {@inheritDoc} */ @Override public float[] fromRGB(final float[] rgbvalue) { // ignore the given color values as this is a fixed color. return new float[] { 1.0f }; // Return value for full tint } /** {@inheritDoc} */ @Override public float[] toCIEXYZ(final float[] colorvalue) { final float[] ret = new float[3]; System.arraycopy(this.xyz, 0, ret, 0, this.xyz.length); return ret; } /** {@inheritDoc} */ @Override public float[] toRGB(final float[] colorvalue) { final ColorSpace sRGB = ColorSpace.getInstance(ColorSpace.CS_sRGB); return sRGB.fromCIEXYZ(this.xyz); } /** {@inheritDoc} */ @Override public boolean equals(final Object obj) { if (!(obj instanceof NamedColorSpace)) { return false; } final NamedColorSpace other = (NamedColorSpace) obj; if (!this.name.equals(other.name)) { return false; } for (int i = 0, c = this.xyz.length; i < c; ++i) { if (this.xyz[i] != other.xyz[i]) { return false; } } return true; } /** {@inheritDoc} */ @Override public int hashCode() { return (this.profileName + this.name).hashCode(); } /** {@inheritDoc} */ @Override public String toString() { return "Named Color Space: " + getColorName(); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper.externalvalues; import com.spatial4j.core.shape.Point; import org.apache.lucene.document.Field; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.builders.ShapeBuilders; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.BinaryFieldMapper; import org.elasticsearch.index.mapper.core.BooleanFieldMapper; import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy; import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper; import java.io.IOException; import java.nio.charset.Charset; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Map; import static org.elasticsearch.index.mapper.MapperBuilders.stringField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; /** * This mapper add a new sub fields * .bin Binary type * .bool Boolean type * .point GeoPoint type * .shape GeoShape type */ public class ExternalMapper extends FieldMapper { public static class Names { public static final String FIELD_BIN = "bin"; public static final String FIELD_BOOL = "bool"; public static final String FIELD_POINT = "point"; public static final String FIELD_SHAPE = "shape"; } public static class Builder extends FieldMapper.Builder<Builder, ExternalMapper> { private BinaryFieldMapper.Builder binBuilder = new BinaryFieldMapper.Builder(Names.FIELD_BIN); private BooleanFieldMapper.Builder boolBuilder = new BooleanFieldMapper.Builder(Names.FIELD_BOOL); private GeoPointFieldMapper.Builder pointBuilder = new GeoPointFieldMapper.Builder(Names.FIELD_POINT); private GeoPointFieldMapperLegacy.Builder legacyPointBuilder = new GeoPointFieldMapperLegacy.Builder(Names.FIELD_POINT); private GeoShapeFieldMapper.Builder shapeBuilder = new GeoShapeFieldMapper.Builder(Names.FIELD_SHAPE); private Mapper.Builder stringBuilder; private String generatedValue; private String mapperName; public Builder(String name, String generatedValue, String mapperName) { super(name, new ExternalFieldType(), new ExternalFieldType()); this.builder = this; this.stringBuilder = stringField(name).store(false); this.generatedValue = generatedValue; this.mapperName = mapperName; } public Builder string(Mapper.Builder content) { this.stringBuilder = content; return this; } @Override public ExternalMapper build(BuilderContext context) { context.path().add(name); BinaryFieldMapper binMapper = binBuilder.build(context); BooleanFieldMapper boolMapper = boolBuilder.build(context); BaseGeoPointFieldMapper pointMapper = (context.indexCreatedVersion().before(Version.V_2_2_0)) ? legacyPointBuilder.build(context) : pointBuilder.build(context); GeoShapeFieldMapper shapeMapper = shapeBuilder.build(context); FieldMapper stringMapper = (FieldMapper)stringBuilder.build(context); context.path().remove(); setupFieldType(context); return new ExternalMapper(name, fieldType, generatedValue, mapperName, binMapper, boolMapper, pointMapper, shapeMapper, stringMapper, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); } } public static class TypeParser implements Mapper.TypeParser { private String generatedValue; private String mapperName; TypeParser(String mapperName, String generatedValue) { this.mapperName = mapperName; this.generatedValue = generatedValue; } @SuppressWarnings({"unchecked"}) @Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { ExternalMapper.Builder builder = new ExternalMapper.Builder(name, generatedValue, mapperName); parseField(builder, name, node, parserContext); for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry<String, Object> entry = iterator.next(); String propName = Strings.toUnderscoreCase(entry.getKey()); Object propNode = entry.getValue(); if (parseMultiField(builder, name, parserContext, propName, propNode)) { iterator.remove(); } } return builder; } } static class ExternalFieldType extends MappedFieldType { public ExternalFieldType() {} protected ExternalFieldType(ExternalFieldType ref) { super(ref); } @Override public MappedFieldType clone() { return new ExternalFieldType(this); } @Override public String typeName() { return "faketype"; } } private final String generatedValue; private final String mapperName; private final BinaryFieldMapper binMapper; private final BooleanFieldMapper boolMapper; private final BaseGeoPointFieldMapper pointMapper; private final GeoShapeFieldMapper shapeMapper; private final FieldMapper stringMapper; public ExternalMapper(String simpleName, MappedFieldType fieldType, String generatedValue, String mapperName, BinaryFieldMapper binMapper, BooleanFieldMapper boolMapper, BaseGeoPointFieldMapper pointMapper, GeoShapeFieldMapper shapeMapper, FieldMapper stringMapper, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { super(simpleName, fieldType, new ExternalFieldType(), indexSettings, multiFields, copyTo); this.generatedValue = generatedValue; this.mapperName = mapperName; this.binMapper = binMapper; this.boolMapper = boolMapper; this.pointMapper = pointMapper; this.shapeMapper = shapeMapper; this.stringMapper = stringMapper; } @Override public Mapper parse(ParseContext context) throws IOException { byte[] bytes = "Hello world".getBytes(Charset.defaultCharset()); binMapper.parse(context.createExternalValueContext(bytes)); boolMapper.parse(context.createExternalValueContext(true)); // Let's add a Dummy Point Double lat = 42.0; Double lng = 51.0; GeoPoint point = new GeoPoint(lat, lng); pointMapper.parse(context.createExternalValueContext(point)); // Let's add a Dummy Shape Point shape = ShapeBuilders.newPoint(-100, 45).build(); shapeMapper.parse(context.createExternalValueContext(shape)); context = context.createExternalValueContext(generatedValue); // Let's add a Original String stringMapper.parse(context); multiFields.parse(this, context); return null; } @Override protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException { throw new UnsupportedOperationException(); } @Override protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { // ignore this for now } @Override public Iterator<Mapper> iterator() { return Iterators.concat(super.iterator(), Arrays.asList(binMapper, boolMapper, pointMapper, shapeMapper, stringMapper).iterator()); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(simpleName()); builder.field("type", mapperName); multiFields.toXContent(builder, params); builder.endObject(); return builder; } @Override protected String contentType() { return mapperName; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.test.store; import com.carrotsearch.randomizedtesting.SeedUtils; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.lucene.index.CheckIndex; import org.apache.lucene.store.BaseDirectoryWrapper; import org.apache.lucene.store.Directory; import org.apache.lucene.store.LockFactory; import org.apache.lucene.store.LockObtainFailedException; import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.store.StoreRateLimiting; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestRuleMarkFailure; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.store.FsDirectoryService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.Store; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESTestCase; import org.junit.Assert; import java.io.Closeable; import java.io.IOException; import java.io.PrintStream; import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.util.Arrays; import java.util.Random; public class MockFSDirectoryService extends FsDirectoryService { public static final Setting<Double> RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING = Setting.doubleSetting("index.store.mock.random.io_exception_rate_on_open", 0.0d, 0.0d, Property.IndexScope, Property.NodeScope); public static final Setting<Double> RANDOM_IO_EXCEPTION_RATE_SETTING = Setting.doubleSetting("index.store.mock.random.io_exception_rate", 0.0d, 0.0d, Property.IndexScope, Property.NodeScope); public static final Setting<Boolean> RANDOM_PREVENT_DOUBLE_WRITE_SETTING = Setting.boolSetting("index.store.mock.random.prevent_double_write", true, Property.IndexScope, Property.NodeScope);// true is default in MDW public static final Setting<Boolean> RANDOM_NO_DELETE_OPEN_FILE_SETTING = Setting.boolSetting("index.store.mock.random.no_delete_open_file", true, Property.IndexScope, Property.NodeScope);// true is default in MDW public static final Setting<Boolean> CRASH_INDEX_SETTING = Setting.boolSetting("index.store.mock.random.crash_index", true, Property.IndexScope, Property.NodeScope);// true is default in MDW private final FsDirectoryService delegateService; private final Random random; private final double randomIOExceptionRate; private final double randomIOExceptionRateOnOpen; private final MockDirectoryWrapper.Throttling throttle; private final boolean preventDoubleWrite; private final boolean noDeleteOpenFile; private final boolean crashIndex; @Inject public MockFSDirectoryService(IndexSettings idxSettings, IndexStore indexStore, final ShardPath path) { super(idxSettings, indexStore, path); Settings indexSettings = idxSettings.getSettings(); final long seed = idxSettings.getValue(ESIntegTestCase.INDEX_TEST_SEED_SETTING); this.random = new Random(seed); randomIOExceptionRate = RANDOM_IO_EXCEPTION_RATE_SETTING.get(indexSettings); randomIOExceptionRateOnOpen = RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING.get(indexSettings); preventDoubleWrite = RANDOM_PREVENT_DOUBLE_WRITE_SETTING.get(indexSettings); noDeleteOpenFile = RANDOM_NO_DELETE_OPEN_FILE_SETTING.exists(indexSettings) ? RANDOM_NO_DELETE_OPEN_FILE_SETTING.get(indexSettings) : random.nextBoolean(); random.nextInt(shardId.getId() + 1); // some randomness per shard throttle = MockDirectoryWrapper.Throttling.NEVER; crashIndex = CRASH_INDEX_SETTING.get(indexSettings); if (logger.isDebugEnabled()) { logger.debug("Using MockDirWrapper with seed [{}] throttle: [{}] crashIndex: [{}]", SeedUtils.formatSeed(seed), throttle, crashIndex); } delegateService = randomDirectorService(indexStore, path); } @Override public Directory newDirectory() throws IOException { return wrap(delegateService.newDirectory()); } @Override protected synchronized Directory newFSDirectory(Path location, LockFactory lockFactory) throws IOException { throw new UnsupportedOperationException(); } public static void checkIndex(ESLogger logger, Store store, ShardId shardId) { if (store.tryIncRef()) { logger.info("start check index"); try { Directory dir = store.directory(); if (!Lucene.indexExists(dir)) { return; } try (CheckIndex checkIndex = new CheckIndex(dir)) { BytesStreamOutput os = new BytesStreamOutput(); PrintStream out = new PrintStream(os, false, StandardCharsets.UTF_8.name()); checkIndex.setInfoStream(out); out.flush(); CheckIndex.Status status = checkIndex.checkIndex(); if (!status.clean) { ESTestCase.checkIndexFailed = true; logger.warn("check index [failure] index files={}\n{}", Arrays.toString(dir.listAll()), os.bytes().utf8ToString()); throw new IOException("index check failure"); } else { if (logger.isDebugEnabled()) { logger.debug("check index [success]\n{}", os.bytes().utf8ToString()); } } } catch (LockObtainFailedException e) { ESTestCase.checkIndexFailed = true; throw new IllegalStateException("IndexWriter is still open on shard " + shardId, e); } } catch (Exception e) { logger.warn("failed to check index", e); } finally { logger.info("end check index"); store.decRef(); } } } @Override public void onPause(long nanos) { delegateService.onPause(nanos); } @Override public StoreRateLimiting rateLimiting() { return delegateService.rateLimiting(); } @Override public long throttleTimeInNanos() { return delegateService.throttleTimeInNanos(); } private Directory wrap(Directory dir) { final ElasticsearchMockDirectoryWrapper w = new ElasticsearchMockDirectoryWrapper(random, dir, this.crashIndex); w.setRandomIOExceptionRate(randomIOExceptionRate); w.setRandomIOExceptionRateOnOpen(randomIOExceptionRateOnOpen); w.setThrottling(throttle); w.setCheckIndexOnClose(false); // we do this on the index level w.setPreventDoubleWrite(preventDoubleWrite); // TODO: make this test robust to virus scanner w.setAssertNoDeleteOpenFile(false); w.setUseSlowOpenClosers(false); LuceneTestCase.closeAfterSuite(new CloseableDirectory(w)); return w; } private FsDirectoryService randomDirectorService(IndexStore indexStore, ShardPath path) { final IndexSettings indexSettings = indexStore.getIndexSettings(); final IndexMetaData build = IndexMetaData.builder(indexSettings.getIndexMetaData()).settings(Settings.builder().put(indexSettings.getSettings()).put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), RandomPicks.randomFrom(random, IndexModule.Type.values()).getSettingsKey())).build(); final IndexSettings newIndexSettings = new IndexSettings(build, indexSettings.getNodeSettings()); return new FsDirectoryService(newIndexSettings, indexStore, path); } public static final class ElasticsearchMockDirectoryWrapper extends MockDirectoryWrapper { private final boolean crash; public ElasticsearchMockDirectoryWrapper(Random random, Directory delegate, boolean crash) { super(random, delegate); this.crash = crash; } @Override public synchronized void crash() throws IOException { if (crash) { super.crash(); } } } final class CloseableDirectory implements Closeable { private final BaseDirectoryWrapper dir; private final TestRuleMarkFailure failureMarker; public CloseableDirectory(BaseDirectoryWrapper dir) { this.dir = dir; this.failureMarker = ESTestCase.getSuiteFailureMarker(); } @Override public void close() { // We only attempt to check open/closed state if there were no other test // failures. try { if (failureMarker.wasSuccessful() && dir.isOpen()) { Assert.fail("Directory not closed: " + dir); } } finally { // TODO: perform real close of the delegate: LUCENE-4058 // dir.close(); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.rya.streams.api.queries; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.util.HashSet; import java.util.Optional; import java.util.Set; import java.util.UUID; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import org.apache.rya.streams.api.entity.StreamsQuery; import org.apache.rya.streams.api.queries.QueryChangeLog.QueryChangeLogException; import org.junit.Test; import com.google.common.collect.Sets; import com.google.common.util.concurrent.AbstractScheduledService.Scheduler; /** * Unit tests the methods of {@link InMemoryQueryRepository}. */ public class InMemoryQueryRepositoryTest { private static final Scheduler SCHEDULE = Scheduler.newFixedRateSchedule(0L, 100, TimeUnit.MILLISECONDS); @Test public void canReadAddedQueries() throws Exception { // Setup a totally in memory QueryRepository. final QueryRepository queries = new InMemoryQueryRepository( new InMemoryQueryChangeLog(), SCHEDULE ); // Add some queries to it. final Set<StreamsQuery> expected = new HashSet<>(); expected.add( queries.add("query 1", true, true) ); expected.add( queries.add("query 2", false, true) ); expected.add( queries.add("query 3", true, false) ); // Show they are in the list of all queries. final Set<StreamsQuery> stored = queries.list(); assertEquals(expected, stored); } @Test public void deletedQueriesDisappear() throws Exception { // Setup a totally in memory QueryRepository. final QueryRepository queries = new InMemoryQueryRepository( new InMemoryQueryChangeLog(), SCHEDULE ); // Add some queries to it. The second one we will delete. final Set<StreamsQuery> expected = new HashSet<>(); expected.add( queries.add("query 1", true, true) ); final UUID deletedMeId = queries.add("query 2", false, true).getQueryId(); expected.add( queries.add("query 3", true, false) ); // Delete the second query. queries.delete( deletedMeId ); // Show only queries 1 and 3 are in the list. final Set<StreamsQuery> stored = queries.list(); assertEquals(expected, stored); } @Test public void initializedWithPopulatedChangeLog() throws Exception { // Setup a totally in memory QueryRepository. Hold onto the change log so that we can use it again later. final QueryChangeLog changeLog = new InMemoryQueryChangeLog(); final QueryRepository queries = new InMemoryQueryRepository( changeLog, SCHEDULE ); try { queries.startAndWait(); // Add some queries and deletes to it. final Set<StreamsQuery> expected = new HashSet<>(); expected.add( queries.add("query 1", true, true) ); final UUID deletedMeId = queries.add("query 2", false, true).getQueryId(); expected.add( queries.add("query 3", true, false) ); queries.delete( deletedMeId ); // Create a new totally in memory QueryRepository. final QueryRepository initializedQueries = new InMemoryQueryRepository( changeLog, SCHEDULE ); try { // Listing the queries should work using an initialized change log. final Set<StreamsQuery> stored = initializedQueries.list(); assertEquals(expected, stored); } finally { queries.stop(); } } finally { queries.stop(); } } @Test(expected = RuntimeException.class) public void changeLogThrowsExceptions() throws Exception { // Create a mock change log that throws an exception when you try to list what is in it. final QueryChangeLog changeLog = mock(QueryChangeLog.class); when(changeLog.readFromStart()).thenThrow(new QueryChangeLogException("Mocked exception.")); // Create the QueryRepository and invoke one of the methods. final QueryRepository queries = new InMemoryQueryRepository( changeLog, SCHEDULE ); queries.list(); } @Test public void get_present() throws Exception { // Setup a totally in memory QueryRepository. final QueryRepository queries = new InMemoryQueryRepository( new InMemoryQueryChangeLog(), SCHEDULE ); // Add a query to it. final StreamsQuery query = queries.add("query 1", true, false); // Show the fetched query matches the expected ones. final Optional<StreamsQuery> fetched = queries.get(query.getQueryId()); assertEquals(query, fetched.get()); } @Test public void get_notPresent() throws Exception { // Setup a totally in memory QueryRepository. final QueryRepository queries = new InMemoryQueryRepository( new InMemoryQueryChangeLog(), SCHEDULE ); // Fetch a query that was never added to the repository. final Optional<StreamsQuery> query = queries.get(UUID.randomUUID()); // Show it could not be found. assertFalse(query.isPresent()); } @Test public void update() throws Exception { // Setup a totally in memory QueryRepository. final QueryRepository queries = new InMemoryQueryRepository( new InMemoryQueryChangeLog(), SCHEDULE ); // Add a query to it. final StreamsQuery query = queries.add("query 1", true, false); // Change the isActive state of that query. queries.updateIsActive(query.getQueryId(), false); // Show the fetched query matches the expected one. final Optional<StreamsQuery> fetched = queries.get(query.getQueryId()); final StreamsQuery expected = new StreamsQuery(query.getQueryId(), query.getSparql(), false, false); assertEquals(expected, fetched.get()); } @Test public void updateListenerNotify() throws Exception { // Setup a totally in memory QueryRepository. final QueryRepository queries = new InMemoryQueryRepository( new InMemoryQueryChangeLog(), SCHEDULE ); try { queries.startAndWait(); // Add a query to it. final StreamsQuery query = queries.add("query 1", true, false); final Set<StreamsQuery> existing = queries.subscribe((queryChangeEvent, newQueryState) -> { final ChangeLogEntry<QueryChange> expected = new ChangeLogEntry<>(1L, QueryChange.create(queryChangeEvent.getEntry().getQueryId(), "query 2", true, false)); final Optional<StreamsQuery> expectedQueryState = Optional.of( new StreamsQuery(queryChangeEvent.getEntry().getQueryId(), "query 2", true, false)); assertEquals(expected, queryChangeEvent); assertEquals(expectedQueryState, newQueryState); }); assertEquals(Sets.newHashSet(query), existing); queries.add("query 2", true, false); } finally { queries.stop(); } } @Test public void updateListenerNotify_multiClient() throws Exception { // Setup a totally in memory QueryRepository. final QueryChangeLog changeLog = new InMemoryQueryChangeLog(); final QueryRepository queries = new InMemoryQueryRepository( changeLog, SCHEDULE ); final QueryRepository queries2 = new InMemoryQueryRepository( changeLog, SCHEDULE ); try { queries.startAndWait(); queries2.startAndWait(); //show listener on repo that query was added to is being notified of the new query. final CountDownLatch repo1Latch = new CountDownLatch(1); queries.subscribe((queryChangeEvent, newQueryState) -> { final ChangeLogEntry<QueryChange> expected = new ChangeLogEntry<>(0L, QueryChange.create(queryChangeEvent.getEntry().getQueryId(), "query 2", true, false)); final Optional<StreamsQuery> expectedQueryState = Optional.of( new StreamsQuery(queryChangeEvent.getEntry().getQueryId(), "query 2", true, false)); assertEquals(expected, queryChangeEvent); assertEquals(expectedQueryState, newQueryState); repo1Latch.countDown(); }); //show listener not on the repo that query was added to is being notified as well. final CountDownLatch repo2Latch = new CountDownLatch(1); queries2.subscribe((queryChangeEvent, newQueryState) -> { final ChangeLogEntry<QueryChange> expected = new ChangeLogEntry<>(0L, QueryChange.create(queryChangeEvent.getEntry().getQueryId(), "query 2", true, false)); final Optional<StreamsQuery> expectedQueryState = Optional.of( new StreamsQuery(queryChangeEvent.getEntry().getQueryId(), "query 2", true, false)); assertEquals(expected, queryChangeEvent); assertEquals(expectedQueryState, newQueryState); repo2Latch.countDown(); }); queries.add("query 2", true, false); assertTrue(repo1Latch.await(5, TimeUnit.SECONDS)); assertTrue(repo2Latch.await(5, TimeUnit.SECONDS)); } catch(final InterruptedException e ) { } finally { queries.stop(); queries2.stop(); } } @Test(expected = IllegalStateException.class) public void subscribe_notStarted() throws Exception { // Setup a totally in memory QueryRepository. final QueryRepository queries = new InMemoryQueryRepository(new InMemoryQueryChangeLog(), SCHEDULE); queries.subscribe((queryChangeEvent, newQueryState) -> {}); queries.add("query 2", true, false); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.cache.query; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Objects; import java.util.stream.Stream; import javax.cache.Cache; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteException; import org.apache.ignite.cache.CacheAtomicityMode; import org.apache.ignite.cache.query.annotations.QuerySqlField; import org.apache.ignite.cluster.ClusterTopologyException; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.cache.query.index.IndexName; import org.apache.ignite.internal.processors.cache.GatewayProtectedCacheProxy; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import static org.apache.ignite.cache.query.IndexQueryCriteriaBuilder.between; import static org.apache.ignite.cache.query.IndexQueryCriteriaBuilder.eq; import static org.apache.ignite.cache.query.IndexQueryCriteriaBuilder.gt; import static org.apache.ignite.cache.query.IndexQueryCriteriaBuilder.lt; /** */ @RunWith(Parameterized.class) public class IndexQueryFailoverTest extends GridCommonAbstractTest { /** */ private static final String CACHE = "TEST_CACHE"; /** */ private static final String IDX = "TEST_IDX"; /** */ private static final int CNT = 10_000; /** */ private static IgniteCache<Long, Person> cache; /** Query index, {@code null} or index name. */ @Parameterized.Parameter public String qryIdx; /** */ @Parameterized.Parameters(name = "qryIdx={0}") public static List<String> params() { return F.asList(null, IDX); } /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { Ignite crd = startGrids(2); cache = crd.cache(CACHE); } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { stopAllGrids(); } /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); CacheConfiguration<Long, Person> ccfg = new CacheConfiguration<Long, Person>() .setName(CACHE) .setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL) .setIndexedTypes(Long.class, Person.class); cfg.setCacheConfiguration(ccfg); return cfg; } /** */ @Test public void testQueryWithWrongCriteria() { GridTestUtils.assertThrowsAnyCause(null, () -> { IndexQuery<Long, Person> qryNullCriteria = new IndexQuery<Long, Person>(Person.class, qryIdx) .setCriteria(lt(null, 12)); return cache.query(qryNullCriteria); }, NullPointerException.class, "Ouch! Argument cannot be null: field"); } /** */ @Test public void testQueryWrongType() { GridTestUtils.assertThrows(null, () -> new IndexQuery<Long, Integer>((String)null, qryIdx), NullPointerException.class, "Ouch! Argument cannot be null: valType"); GridTestUtils.assertThrows(null, () -> new IndexQuery<Long, Integer>("", qryIdx), IllegalArgumentException.class, "Ouch! Argument is invalid: valType must not be empty"); GridTestUtils.assertThrowsAnyCause(null, () -> { IndexQuery<Long, Integer> qry = new IndexQuery<Long, Integer>(Integer.class, qryIdx) .setCriteria(lt("id", Integer.MAX_VALUE)); return cache.query(qry).getAll(); }, IgniteCheckedException.class, "No table found for type: " + Integer.class.getName()); } /** */ @Test public void testQueryWrongIndexName() { GridTestUtils.assertThrowsAnyCause(null, () -> { IndexQuery<Long, Person> qry = new IndexQuery<Long, Person>(Person.class, "") .setCriteria(lt("id", Integer.MAX_VALUE)); return cache.query(qry).getAll(); }, IllegalArgumentException.class, "Ouch! Argument is invalid: idxName must not be empty."); GridTestUtils.assertThrowsAnyCause(null, () -> { IndexQuery<Long, Person> qry = new IndexQuery<Long, Person>(Person.class, "DUMMY") .setCriteria(lt("id", Integer.MAX_VALUE)); return cache.query(qry).getAll(); }, IgniteCheckedException.class, "No index found for name: DUMMY"); } /** */ @Test public void testQueryWrongQuery() { String errMsg = qryIdx != null ? "Index doesn't match criteria." : "No index found for criteria."; GridTestUtils.assertThrowsAnyCause(null, () -> { IndexQuery<Long, Person> qry = new IndexQuery<Long, Person>(Person.class, qryIdx) .setCriteria(lt("dummy", Integer.MAX_VALUE)); return cache.query(qry).getAll(); }, IgniteCheckedException.class, errMsg); GridTestUtils.assertThrowsAnyCause(null, () -> { IndexQuery<Long, Person> qry = new IndexQuery<Long, Person>(Person.class, qryIdx) .setCriteria( lt("id", Integer.MAX_VALUE), lt("nonExistedField", Integer.MAX_VALUE)); return cache.query(qry).getAll(); }, IgniteCheckedException.class, errMsg); GridTestUtils.assertThrowsAnyCause(null, () -> { IndexQuery<Long, Person> qry = new IndexQuery<Long, Person>(Person.class, qryIdx) .setCriteria(between("id", 432, 40)); return cache.query(qry).getAll(); }, IgniteCheckedException.class, "Illegal criterion: lower boundary is greater than the upper boundary: " + "ID[432; 40]"); Stream.of( Arrays.asList(lt("id", 100), gt("id", 101)), Arrays.asList(eq("id", 100), eq("id", 101)), Arrays.asList(eq("id", 101), eq("id", 100)), Arrays.asList(eq("id", 101), between("id", 19, 40)) ).forEach(crit -> { String msg = "Failed to merge criterion " + crit.get(1).toString().replace("id", "ID") + " with previous criteria range " + crit.get(0).toString().replace("id", "ID"); GridTestUtils.assertThrowsAnyCause(null, () -> { IndexQuery<Long, Person> qry = new IndexQuery<Long, Person>(Person.class, qryIdx) .setCriteria(crit); return cache.query(qry).getAll(); }, IgniteCheckedException.class, msg); }); } /** */ @Test public void testStopNode() { insertData(0, CNT); IndexQuery<Long, Person> qry = new IndexQuery<Long, Person>(Person.class, qryIdx) .setCriteria(lt("id", CNT)); QueryCursor<Cache.Entry<Long, Person>> cursor = cache.query(qry); stopGrid(1); GridTestUtils.assertThrows(null, () -> cursor.getAll(), ClusterTopologyException.class, null); } /** */ @Test public void testDestroyIndex() { insertData(0, CNT); IndexQuery<Long, Person> qry = new IndexQuery<Long, Person>(Person.class, qryIdx) .setCriteria(lt("id", CNT)); Iterator<Cache.Entry<Long, Person>> cursor = cache.query(qry).iterator(); for (int i = 0; i < 10; i++) cursor.next(); destroyIndex(); // SQL doesn't lock index for querying. SQL is eager and fetch all data from index before return it to user by pages. // IndexQuery doesn't lock to, but IndexQuery is lazy and concurrent index operations will affect result of this query. GridTestUtils.assertThrows(null, () -> { while (cursor.hasNext()) cursor.next(); }, IgniteException.class, null); } /** */ @Test public void testConcurrentUpdateIndex() { insertData(0, CNT); IndexQuery<Long, Person> qry = new IndexQuery<Long, Person>(Person.class, qryIdx) .setCriteria(between("id", CNT / 2, CNT + CNT / 2)); Iterator<Cache.Entry<Long, Person>> cursor = cache.query(qry).iterator(); for (int i = 0; i < CNT / 10; i++) cursor.next(); insertData(CNT, CNT * 2); int size = CNT / 10; while (cursor.hasNext()) { cursor.next(); size++; } assertEquals(CNT + 1, size); } /** */ private void destroyIndex() { IndexName idxName = new IndexName(CACHE, CACHE, Person.class.getSimpleName().toUpperCase(), IDX); GridCacheContext cctx = ((GatewayProtectedCacheProxy)cache).context(); cctx.kernalContext().indexProcessor() .removeIndex(cctx, idxName, false); } /** */ private void insertData(int from, int to) { for (int i = from; i < to; i++) cache.put((long)i, new Person(i)); } /** */ private static class Person { /** */ @QuerySqlField(orderedGroups = @QuerySqlField.Group(name = IDX, order = 0)) final int id; /** */ @QuerySqlField final int descId; /** */ Person(int id) { this.id = id; this.descId = id; } /** {@inheritDoc} */ @Override public String toString() { return "Person[id=" + id + "]"; } /** {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Person person = (Person)o; return Objects.equals(id, person.id) && Objects.equals(descId, person.descId); } /** {@inheritDoc} */ @Override public int hashCode() { return Objects.hash(id, descId); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db.filter; import java.io.IOException; import java.util.*; import org.apache.cassandra.config.CFMetaData; import org.apache.cassandra.config.ColumnDefinition; import org.apache.cassandra.db.*; import org.apache.cassandra.db.rows.*; import org.apache.cassandra.db.partitions.*; import org.apache.cassandra.io.sstable.format.SSTableReader; import org.apache.cassandra.io.util.DataInputPlus; import org.apache.cassandra.io.util.DataOutputPlus; import org.apache.cassandra.utils.SearchIterator; import org.apache.cassandra.utils.btree.BTreeSet; /** * A filter selecting rows given their clustering value. */ public class ClusteringIndexNamesFilter extends AbstractClusteringIndexFilter { static final InternalDeserializer deserializer = new NamesDeserializer(); // This could be empty if selectedColumns only has static columns (in which case the filter still // selects the static row) private final NavigableSet<Clustering> clusterings; // clusterings is always in clustering order (because we need it that way in some methods), but we also // sometimes need those clustering in "query" order (i.e. in reverse clustering order if the query is // reversed), so we keep that too for simplicity. private final NavigableSet<Clustering> clusteringsInQueryOrder; public ClusteringIndexNamesFilter(NavigableSet<Clustering> clusterings, boolean reversed) { super(reversed); assert !clusterings.contains(Clustering.STATIC_CLUSTERING); this.clusterings = clusterings; this.clusteringsInQueryOrder = reversed ? clusterings.descendingSet() : clusterings; } /** * The set of requested rows. * * Please note that this can be empty if only the static row is requested. * * @return the set of requested clustering in clustering order (note that * this is always in clustering order even if the query is reversed). */ public NavigableSet<Clustering> requestedRows() { return clusterings; } public boolean selectsAllPartition() { return false; } public boolean selects(Clustering clustering) { return clusterings.contains(clustering); } public ClusteringIndexNamesFilter forPaging(ClusteringComparator comparator, Clustering lastReturned, boolean inclusive) { NavigableSet<Clustering> newClusterings = reversed ? clusterings.headSet(lastReturned, inclusive) : clusterings.tailSet(lastReturned, inclusive); return new ClusteringIndexNamesFilter(newClusterings, reversed); } public boolean isFullyCoveredBy(CachedPartition partition) { if (partition.isEmpty()) return false; // 'partition' contains all columns, so it covers our filter if our last clusterings // is smaller than the last in the cache return clusterings.comparator().compare(clusterings.last(), partition.lastRow().clustering()) <= 0; } public boolean isHeadFilter() { return false; } // Given another iterator, only return the rows that match this filter public UnfilteredRowIterator filterNotIndexed(ColumnFilter columnFilter, UnfilteredRowIterator iterator) { // Note that we don't filter markers because that's a bit trickier (we don't know in advance until when // the range extend) and it's harmless to left them. return new AlteringUnfilteredRowIterator(iterator) { @Override public Row computeNextStatic(Row row) { return columnFilter.fetchedColumns().statics.isEmpty() ? null : row.filter(columnFilter, iterator.metadata()); } @Override public Row computeNext(Row row) { return clusterings.contains(row.clustering()) ? row.filter(columnFilter, iterator.metadata()) : null; } }; } public UnfilteredRowIterator filter(final SliceableUnfilteredRowIterator iter) { // Please note that this method assumes that rows from 'iter' already have their columns filtered, i.e. that // they only include columns that we select. return new WrappingUnfilteredRowIterator(iter) { private final Iterator<Clustering> clusteringIter = clusteringsInQueryOrder.iterator(); private Iterator<Unfiltered> currentClustering; private Unfiltered next; @Override public boolean hasNext() { if (next != null) return true; if (currentClustering != null && currentClustering.hasNext()) { next = currentClustering.next(); return true; } while (clusteringIter.hasNext()) { Clustering nextClustering = clusteringIter.next(); currentClustering = iter.slice(Slice.make(nextClustering)); if (currentClustering.hasNext()) { next = currentClustering.next(); return true; } } return false; } @Override public Unfiltered next() { if (next == null && !hasNext()) throw new NoSuchElementException(); Unfiltered toReturn = next; next = null; return toReturn; } }; } public UnfilteredRowIterator getUnfilteredRowIterator(final ColumnFilter columnFilter, final Partition partition) { final SearchIterator<Clustering, Row> searcher = partition.searchIterator(columnFilter, reversed); return new AbstractUnfilteredRowIterator(partition.metadata(), partition.partitionKey(), partition.partitionLevelDeletion(), columnFilter.fetchedColumns(), searcher.next(Clustering.STATIC_CLUSTERING), reversed, partition.stats()) { private final Iterator<Clustering> clusteringIter = clusteringsInQueryOrder.iterator(); protected Unfiltered computeNext() { while (clusteringIter.hasNext() && searcher.hasNext()) { Row row = searcher.next(clusteringIter.next()); if (row != null) return row; } return endOfData(); } }; } public boolean shouldInclude(SSTableReader sstable) { // TODO: we could actually exclude some sstables return true; } public String toString(CFMetaData metadata) { StringBuilder sb = new StringBuilder(); sb.append("names("); int i = 0; for (Clustering clustering : clusterings) sb.append(i++ == 0 ? "" : ", ").append(clustering.toString(metadata)); if (reversed) sb.append(", reversed"); return sb.append(')').toString(); } public String toCQLString(CFMetaData metadata) { if (metadata.clusteringColumns().isEmpty() || clusterings.size() <= 1) return ""; StringBuilder sb = new StringBuilder(); sb.append('(').append(ColumnDefinition.toCQLString(metadata.clusteringColumns())).append(')'); sb.append(clusterings.size() == 1 ? " = " : " IN ("); int i = 0; for (Clustering clustering : clusterings) sb.append(i++ == 0 ? "" : ", ").append("(").append(clustering.toCQLString(metadata)).append(")"); sb.append(clusterings.size() == 1 ? "" : ")"); appendOrderByToCQLString(metadata, sb); return sb.toString(); } public Kind kind() { return Kind.NAMES; } protected void serializeInternal(DataOutputPlus out, int version) throws IOException { ClusteringComparator comparator = (ClusteringComparator)clusterings.comparator(); out.writeUnsignedVInt(clusterings.size()); for (Clustering clustering : clusterings) Clustering.serializer.serialize(clustering, out, version, comparator.subtypes()); } protected long serializedSizeInternal(int version) { ClusteringComparator comparator = (ClusteringComparator)clusterings.comparator(); long size = TypeSizes.sizeofUnsignedVInt(clusterings.size()); for (Clustering clustering : clusterings) size += Clustering.serializer.serializedSize(clustering, version, comparator.subtypes()); return size; } private static class NamesDeserializer implements InternalDeserializer { public ClusteringIndexFilter deserialize(DataInputPlus in, int version, CFMetaData metadata, boolean reversed) throws IOException { ClusteringComparator comparator = metadata.comparator; BTreeSet.Builder<Clustering> clusterings = BTreeSet.builder(comparator); int size = (int)in.readUnsignedVInt(); for (int i = 0; i < size; i++) clusterings.add(Clustering.serializer.deserialize(in, version, comparator.subtypes())); return new ClusteringIndexNamesFilter(clusterings.build(), reversed); } } }
package edu.cmu.hcii.whyline.bytecode; import java.io.DataInputStream; import java.io.IOException; import java.util.ArrayList; /** * @author Andrew J. Ko * */ public final class BytecodeParser { public static Instruction[] parse(DataInputStream reader, int numberOfBytesInCodeArray, CodeAttribute owner) throws IOException { ConstantPool pool = owner == null ? null : owner.getMethod().getClassfile().getConstantPool(); Instruction[] instructions = new Instruction[numberOfBytesInCodeArray]; int index = 0; int bytesRemaining = numberOfBytesInCodeArray; int remainder = 0; int padding = 0; // Construct a structured version of the bytecode while(bytesRemaining > 0) { int opcode = reader.readUnsignedByte(); Instruction instruction = null; int byteIndex = numberOfBytesInCodeArray - bytesRemaining; int byteLength = 0; switch(opcode) { case Opcodes.AALOAD: instruction = new AALOAD(owner); break; case Opcodes.AASTORE: instruction = new AASTORE(owner); break; case Opcodes.ACONST_NULL: instruction = new ACONST_NULL(owner); break; case Opcodes.ALOAD: instruction = new ALOAD(owner, reader.readUnsignedByte()); break; case Opcodes.ALOAD_0: instruction = new ALOAD_0(owner); break; case Opcodes.ALOAD_1: instruction = new ALOAD_1(owner); break; case Opcodes.ALOAD_2: instruction = new ALOAD_2(owner); break; case Opcodes.ALOAD_3: instruction = new ALOAD_3(owner); break; case Opcodes.ANEWARRAY: instruction = new ANEWARRAY(owner, (ClassInfo)pool.get(reader.readUnsignedShort())); break; case Opcodes.ARETURN: instruction = new ARETURN(owner); break; case Opcodes.ARRAYLENGTH: instruction = new ARRAYLENGTH(owner); break; case Opcodes.ASTORE: instruction = new ASTORE(owner, reader.readUnsignedByte()); break; case Opcodes.ASTORE_0: instruction = new ASTORE_0(owner); break; case Opcodes.ASTORE_1: instruction = new ASTORE_1(owner); break; case Opcodes.ASTORE_2: instruction = new ASTORE_2(owner); break; case Opcodes.ASTORE_3: instruction = new ASTORE_3(owner); break; case Opcodes.ATHROW: instruction = new ATHROW(owner); break; case Opcodes.BALOAD: instruction = new BALOAD(owner); break; case Opcodes.BASTORE: instruction = new BASTORE(owner); break; case Opcodes.BIPUSH: instruction = new BIPUSH(owner, reader.readByte()); break; case Opcodes.CALOAD: instruction = new CALOAD(owner); break; case Opcodes.CASTORE: instruction = new CASTORE(owner); break; case Opcodes.CHECKCAST: instruction = new CHECKCAST(owner, (ClassInfo)pool.get(reader.readUnsignedShort())); break; case Opcodes.D2F: instruction = new D2F(owner); break; case Opcodes.D2I: instruction = new D2I(owner); break; case Opcodes.D2L: instruction = new D2L(owner); break; case Opcodes.DADD: instruction = new DADD(owner); break; case Opcodes.DALOAD: instruction = new DALOAD(owner); break; case Opcodes.DASTORE: instruction = new DASTORE(owner); break; case Opcodes.DCMPG: instruction = new DCMPG(owner); break; case Opcodes.DCMPL: instruction = new DCMPL(owner); break; case Opcodes.DCONST_0: instruction = new DCONST_0(owner); break; case Opcodes.DCONST_1: instruction = new DCONST_1(owner); break; case Opcodes.DDIV: instruction = new DDIV(owner); break; case Opcodes.DLOAD: instruction = new DLOAD(owner, reader.readUnsignedByte()); break; case Opcodes.DLOAD_0: instruction = new DLOAD_0(owner); break; case Opcodes.DLOAD_1: instruction = new DLOAD_1(owner); break; case Opcodes.DLOAD_2: instruction = new DLOAD_2(owner); break; case Opcodes.DLOAD_3: instruction = new DLOAD_3(owner); break; case Opcodes.DMUL: instruction = new DMUL(owner); break; case Opcodes.DNEG: instruction = new DNEG(owner); break; case Opcodes.DREM: instruction = new DREM(owner); break; case Opcodes.DRETURN: instruction = new DRETURN(owner); break; case Opcodes.DSTORE: instruction = new DSTORE(owner, reader.readUnsignedByte()); break; case Opcodes.DSTORE_0: instruction = new DSTORE_0(owner); break; case Opcodes.DSTORE_1: instruction = new DSTORE_1(owner); break; case Opcodes.DSTORE_2: instruction = new DSTORE_2(owner); break; case Opcodes.DSTORE_3: instruction = new DSTORE_3(owner); break; case Opcodes.DSUB: instruction = new DSUB(owner); break; case Opcodes.DUP: instruction = new DUP(owner); break; case Opcodes.DUP2: instruction = new DUP2(owner); break; case Opcodes.DUP2_X1: instruction = new DUP2_X1(owner); break; case Opcodes.DUP2_X2: instruction = new DUP2_X2(owner); break; case Opcodes.DUP_X1: instruction = new DUP_X1(owner); break; case Opcodes.DUP_X2: instruction = new DUP_X2(owner); break; case Opcodes.F2D: instruction = new F2D(owner); break; case Opcodes.F2I: instruction = new F2I(owner); break; case Opcodes.F2L: instruction = new F2L(owner); break; case Opcodes.FADD: instruction = new FADD(owner); break; case Opcodes.FALOAD: instruction = new FALOAD(owner); break; case Opcodes.FASTORE: instruction = new FASTORE(owner); break; case Opcodes.FCMPG: instruction = new FCMPG(owner); break; case Opcodes.FCMPL: instruction = new FCMPL(owner); break; case Opcodes.FCONST_0: instruction = new FCONST_0(owner); break; case Opcodes.FCONST_1: instruction = new FCONST_1(owner); break; case Opcodes.FCONST_2: instruction = new FCONST_2(owner); break; case Opcodes.FDIV: instruction = new FDIV(owner); break; case Opcodes.FLOAD: instruction = new FLOAD(owner, reader.readUnsignedByte()); break; case Opcodes.FLOAD_0: instruction = new FLOAD_0(owner); break; case Opcodes.FLOAD_1: instruction = new FLOAD_1(owner); break; case Opcodes.FLOAD_2: instruction = new FLOAD_2(owner); break; case Opcodes.FLOAD_3: instruction = new FLOAD_3(owner); break; case Opcodes.FMUL: instruction = new FMUL(owner); break; case Opcodes.FNEG: instruction = new FNEG(owner); break; case Opcodes.FREM: instruction = new FREM(owner); break; case Opcodes.FRETURN: instruction = new FRETURN(owner); break; case Opcodes.FSTORE: instruction = new FSTORE(owner, reader.readUnsignedByte()); break; case Opcodes.FSTORE_0: instruction = new FSTORE_0(owner); break; case Opcodes.FSTORE_1: instruction = new FSTORE_1(owner); break; case Opcodes.FSTORE_2: instruction = new FSTORE_2(owner); break; case Opcodes.FSTORE_3: instruction = new FSTORE_3(owner); break; case Opcodes.FSUB: instruction = new FSUB(owner); break; case Opcodes.GETFIELD: instruction = new GETFIELD(owner, (FieldrefInfo)pool.get(reader.readUnsignedShort())); break; case Opcodes.GETSTATIC: instruction = new GETSTATIC(owner, (FieldrefInfo)pool.get(reader.readUnsignedShort())); break; case Opcodes.GOTO: instruction = new GOTO(owner, reader.readShort()); break; case Opcodes.GOTO_W: instruction = new GOTO_W(owner, reader.readInt()); break; case Opcodes.I2B: instruction = new I2B(owner); break; case Opcodes.I2C: instruction = new I2C(owner); break; case Opcodes.I2D: instruction = new I2D(owner); break; case Opcodes.I2F: instruction = new I2F(owner); break; case Opcodes.I2L: instruction = new I2L(owner); break; case Opcodes.I2S: instruction = new I2S(owner); break; case Opcodes.IADD: instruction = new IADD(owner); break; case Opcodes.IALOAD: instruction = new IALOAD(owner); break; case Opcodes.IAND: instruction = new IAND(owner); break; case Opcodes.IASTORE: instruction = new IASTORE(owner); break; case Opcodes.ICONST_0: instruction = new ICONST_0(owner); break; case Opcodes.ICONST_1: instruction = new ICONST_1(owner); break; case Opcodes.ICONST_2: instruction = new ICONST_2(owner); break; case Opcodes.ICONST_3: instruction = new ICONST_3(owner); break; case Opcodes.ICONST_4: instruction = new ICONST_4(owner); break; case Opcodes.ICONST_5: instruction = new ICONST_5(owner); break; case Opcodes.ICONST_M1: instruction = new ICONST_M1(owner); break; case Opcodes.IDIV: instruction = new IDIV(owner); break; case Opcodes.IFEQ: instruction = new IFEQ(owner, reader.readShort()); break; case Opcodes.IFGE: instruction = new IFGE(owner, reader.readShort()); break; case Opcodes.IFGT: instruction = new IFGT(owner, reader.readShort()); break; case Opcodes.IFLE: instruction = new IFLE(owner, reader.readShort()); break; case Opcodes.IFLT: instruction = new IFLT(owner, reader.readShort()); break; case Opcodes.IFNE: instruction = new IFNE(owner, reader.readShort()); break; case Opcodes.IFNONNULL: instruction = new IFNONNULL(owner, reader.readShort()); break; case Opcodes.IFNULL: instruction = new IFNULL(owner, reader.readShort()); break; case Opcodes.IF_ACMPEQ: instruction = new IF_ACMPEQ(owner, reader.readShort()); break; case Opcodes.IF_ACMPNE: instruction = new IF_ACMPNE(owner, reader.readShort()); break; case Opcodes.IF_ICMPEQ: instruction = new IF_ICMPEQ(owner, reader.readShort()); break; case Opcodes.IF_ICMPGE: instruction = new IF_ICMPGE(owner, reader.readShort()); break; case Opcodes.IF_ICMPGT: instruction = new IF_ICMPGT(owner, reader.readShort()); break; case Opcodes.IF_ICMPLE: instruction = new IF_ICMPLE(owner, reader.readShort()); break; case Opcodes.IF_ICMPLT: instruction = new IF_ICMPLT(owner, reader.readShort()); break; case Opcodes.IF_ICMPNE: instruction = new IF_ICMPNE(owner, reader.readShort()); break; case Opcodes.IINC: instruction = new IINC(owner, reader.readUnsignedByte(), reader.readByte()); break; case Opcodes.ILOAD: instruction = new ILOAD(owner, reader.readUnsignedByte()); break; case Opcodes.ILOAD_0: instruction = new ILOAD_0(owner); break; case Opcodes.ILOAD_1: instruction = new ILOAD_1(owner); break; case Opcodes.ILOAD_2: instruction = new ILOAD_2(owner); break; case Opcodes.ILOAD_3: instruction = new ILOAD_3(owner); break; case Opcodes.IMUL: instruction = new IMUL(owner); break; case Opcodes.INEG: instruction = new INEG(owner); break; case Opcodes.INSTANCEOF: instruction = new INSTANCEOF(owner, (ClassInfo)pool.get(reader.readUnsignedShort())); break; case Opcodes.INVOKEINTERFACE: instruction = new INVOKEINTERFACE(owner, (InterfaceMethodrefInfo)pool.get(reader.readUnsignedShort()), reader.readUnsignedByte()); reader.readByte(); break; case Opcodes.INVOKESPECIAL: instruction = new INVOKESPECIAL(owner, (MethodrefInfo)pool.get(reader.readUnsignedShort())); break; case Opcodes.INVOKESTATIC: instruction = new INVOKESTATIC(owner, (MethodrefInfo)pool.get(reader.readUnsignedShort())); break; case Opcodes.INVOKEVIRTUAL: instruction = new INVOKEVIRTUAL(owner, (MethodrefInfo)pool.get(reader.readUnsignedShort())); break; case Opcodes.IOR: instruction = new IOR(owner); break; case Opcodes.IREM: instruction = new IREM(owner); break; case Opcodes.IRETURN: instruction = new IRETURN(owner); break; case Opcodes.ISHL: instruction = new ISHL(owner); break; case Opcodes.ISHR: instruction = new ISHR(owner); break; case Opcodes.ISTORE: instruction = new ISTORE(owner, reader.readUnsignedByte()); break; case Opcodes.ISTORE_0: instruction = new ISTORE_0(owner); break; case Opcodes.ISTORE_1: instruction = new ISTORE_1(owner); break; case Opcodes.ISTORE_2: instruction = new ISTORE_2(owner); break; case Opcodes.ISTORE_3: instruction = new ISTORE_3(owner); break; case Opcodes.ISUB: instruction = new ISUB(owner); break; case Opcodes.IUSHR: instruction = new IUSHR(owner); break; case Opcodes.IXOR: instruction = new IXOR(owner); break; case Opcodes.JSR: instruction = new JSR(owner, reader.readShort()); break; case Opcodes.JSR_W: instruction = new JSR_W(owner, reader.readInt()); break; case Opcodes.L2D: instruction = new L2D(owner); break; case Opcodes.L2F: instruction = new L2F(owner); break; case Opcodes.L2I: instruction = new L2I(owner); break; case Opcodes.LADD: instruction = new LADD(owner); break; case Opcodes.LALOAD: instruction = new LALOAD(owner); break; case Opcodes.LAND: instruction = new LAND(owner); break; case Opcodes.LASTORE: instruction = new LASTORE(owner); break; case Opcodes.LCMP: instruction = new LCMP(owner); break; case Opcodes.LCONST_0: instruction = new LCONST_0(owner); break; case Opcodes.LCONST_1: instruction = new LCONST_1(owner); break; case Opcodes.LDC: instruction = new LDC(owner, pool.get(reader.readUnsignedByte())); break; case Opcodes.LDC2_W: instruction = new LDC2_W(owner, pool.get(reader.readUnsignedShort())); break; case Opcodes.LDC_W: instruction = new LDC_W(owner, pool.get(reader.readUnsignedShort())); break; case Opcodes.LDIV: instruction = new LDIV(owner); break; case Opcodes.LLOAD: instruction = new LLOAD(owner, reader.readUnsignedByte()); break; case Opcodes.LLOAD_0: instruction = new LLOAD_0(owner); break; case Opcodes.LLOAD_1: instruction = new LLOAD_1(owner); break; case Opcodes.LLOAD_2: instruction = new LLOAD_2(owner); break; case Opcodes.LLOAD_3: instruction = new LLOAD_3(owner); break; case Opcodes.LMUL: instruction = new LMUL(owner); break; case Opcodes.LNEG: instruction = new LNEG(owner); break; case Opcodes.LOOKUPSWITCH: // Read zeroed bytes so that the default is a multiple of four from the start of this code array. remainder = (byteIndex + 1) % 4; padding = remainder == 0 ? 0 : 4 - remainder; for(int i = 0; i < padding; i++) reader.readUnsignedByte(); // Read the default bytes int defaultOffset = reader.readInt(); // Read the number of pairs int numberOfPairs = reader.readInt(); // Read all of the pairs instruction = new LOOKUPSWITCH(owner, defaultOffset, numberOfPairs); for(int i = 0; i < numberOfPairs; i++) ((LOOKUPSWITCH)instruction).setPair(i, reader.readInt(), reader.readInt()); byteLength = 1 + padding + 4 + 4 + numberOfPairs * 8; break; case Opcodes.LOR: instruction = new LOR(owner); break; case Opcodes.LREM: instruction = new LREM(owner); break; case Opcodes.LRETURN: instruction = new LRETURN(owner); break; case Opcodes.LSHL: instruction = new LSHL(owner); break; case Opcodes.LSHR: instruction = new LSHR(owner); break; case Opcodes.LSTORE: instruction = new LSTORE(owner, reader.readUnsignedByte()); break; case Opcodes.LSTORE_0: instruction = new LSTORE_0(owner); break; case Opcodes.LSTORE_1: instruction = new LSTORE_1(owner); break; case Opcodes.LSTORE_2: instruction = new LSTORE_2(owner); break; case Opcodes.LSTORE_3: instruction = new LSTORE_3(owner); break; case Opcodes.LSUB: instruction = new LSUB(owner); break; case Opcodes.LUSHR: instruction = new LUSHR(owner); break; case Opcodes.LXOR: instruction = new LXOR(owner); break; case Opcodes.MONITORENTER: instruction = new MONITORENTER(owner); break; case Opcodes.MONITOREXIT: instruction = new MONITOREXIT(owner); break; case Opcodes.MULTIANEWARRAY: instruction = new MULTIANEWARRAY(owner, (ClassInfo)pool.get(reader.readUnsignedShort()), reader.readUnsignedByte()); break; case Opcodes.NEW: instruction = new NEW(owner, (ClassInfo)pool.get(reader.readUnsignedShort())); break; case Opcodes.NEWARRAY: instruction = new NEWARRAY(owner, reader.readUnsignedByte()); break; case Opcodes.NOP: instruction = new NOP(owner); break; case Opcodes.POP: instruction = new POP(owner); break; case Opcodes.POP2: instruction = new POP2(owner); break; case Opcodes.PUTFIELD: instruction = new PUTFIELD(owner, (FieldrefInfo)pool.get(reader.readUnsignedShort())); break; case Opcodes.PUTSTATIC: instruction = new PUTSTATIC(owner, (FieldrefInfo)pool.get(reader.readUnsignedShort())); break; case Opcodes.RET: instruction = new RET(owner, reader.readUnsignedByte()); break; case Opcodes.RETURN: instruction = new RETURN(owner); break; case Opcodes.SALOAD: instruction = new SALOAD(owner); break; case Opcodes.SASTORE: instruction = new SASTORE(owner); break; case Opcodes.SIPUSH: instruction = new SIPUSH(owner, reader.readShort()); break; case Opcodes.SWAP: instruction = new SWAP(owner); break; case Opcodes.TABLESWITCH: // Read zeroed bytes so that the default is a multiple of four from the start of this code array. remainder = (byteIndex + 1) % 4; padding = remainder == 0 ? 0 : 4 - remainder; for(int i = 0; i < padding; i++) reader.readUnsignedByte(); // Read the default bytes int def = reader.readInt(); // Read the number of pairs int low = reader.readInt(); int high = reader.readInt(); int number = high - low + 1; // Read all of the offsets ArrayList<Integer> offsets = new ArrayList<Integer>(number); for(int i = 0; i < high - low + 1; i++) offsets.add(reader.readInt()); instruction = new TABLESWITCH(owner, def, low, high, offsets); byteLength = 1 + padding + 4 + 4 + 4 + (high - low + 1) * 4; break; case Opcodes.WIDE: int nextOpcode = reader.readUnsignedByte(); int localID = reader.readUnsignedByte() << 8 | reader.readUnsignedByte(); instruction = new WIDE(owner, nextOpcode, localID, nextOpcode == Opcodes.IINC ? reader.readShort() : 0); break; default: assert false : "Couldn't find an opcode for opcode value " + opcode + " in code for " + owner.getMethod() + " of class " + owner.getMethod().getInternalName(); } // If we assigned the byte length above (for a TABLESWITCH or LOOKUPSWITCH), then we don't // compute the instruction length here because it depends on the byte index of the instruction, which is not // yet set. Otherwise, its constant and we can determine it here. if(byteLength == 0) byteLength = instruction.byteLength(); bytesRemaining -= byteLength; instructions[index] = instruction; index++; } Instruction[] array = new Instruction[index]; System.arraycopy(instructions, 0, array, 0, index); return array; } }
package com.techlords.crown.mvc.settings; import java.util.ArrayList; import java.util.List; import javax.faces.bean.ManagedBean; import javax.faces.bean.SessionScoped; import org.apache.log4j.Logger; import com.techlords.crown.CrownServiceLocator; import com.techlords.crown.business.model.CrownUserBO; import com.techlords.crown.business.model.DepartmentBO; import com.techlords.crown.business.model.DesignationBO; import com.techlords.crown.business.model.LocationBO; import com.techlords.crown.business.model.RoleBO; import com.techlords.crown.mvc.CrownModelController; import com.techlords.crown.mvc.auth.CrownUserDetailsService; import com.techlords.crown.mvc.util.CrownMVCHelper; import com.techlords.crown.mvc.util.FacesUtil; import com.techlords.crown.mvc.validators.CrownUserValidator; import com.techlords.crown.service.CrownUserService; import com.techlords.infra.CrownConstants; /** * Handles requests for the User Operations. */ @ManagedBean @SessionScoped public class CrownUserController extends CrownModelController { private static final long serialVersionUID = 1L; private static final Logger LOGGER = Logger.getLogger(CrownUserController.class); private final List<CrownUserBO> userBOs = new ArrayList<CrownUserBO>(); private CrownUserBO currentUser; private final List<LocationBO> locationBOs = new ArrayList<LocationBO>(); private final List<RoleBO> roleBOs = new ArrayList<RoleBO>(); private final List<DesignationBO> designationBOs = new ArrayList<DesignationBO>(); private final List<DepartmentBO> departmentBOs = new ArrayList<DepartmentBO>(); private boolean areAssociationsLoaded = false; public List<CrownUserBO> getUserBOs() { System.out.println("Getting all users..."); long time1 = System.currentTimeMillis(); if (FacesUtil.isRenderPhase() && !isListLoaded) { userBOs.clear(); setCurrentUser(null); CrownUserService service = CrownServiceLocator.INSTANCE .getCrownService(CrownUserService.class); userBOs.addAll(service.findAllUsers()); isListLoaded = true; } long time2 = System.currentTimeMillis(); System.err.println("TIME TAKEN TO LOAD USERS ::: " + (time2 - time1)); return userBOs; } public String setupForm(boolean isCreateMode) { return (isCreateMode) ? setupForm(new CrownUserBO()) : null; } public String setupForm(CrownUserBO bo) { setCurrentUser(bo); setFieldAvailability(EMPTY_STRING); loadAssociations(); isListLoaded = false; navigationBean.setNavigationUrl("config/UpdateUser.xhtml"); return null; } public CrownUserBO getLoggedInUser() { return CrownUserDetailsService.getCurrentUser(); } public CrownUserBO getCurrentUser() { return currentUser; } public void setCurrentUser(CrownUserBO currentUser) { this.currentUser = currentUser; } public String save() { currentUser.setRoleBO(getAppModel(currentUser.getRole(), roleBOs)); return (currentUser.isNew()) ? create() : update(); } public void checkUniqueUsername() { setFieldAvailability(CrownMVCHelper.checkUniqueness("username", currentUser.getUsername()) ? AVAILABLE : UNAVAILABLE); } public void checkUniqueEmail() { setFieldAvailability(CrownMVCHelper.checkUniqueness("email", currentUser.getEmail()) ? AVAILABLE : UNAVAILABLE); } public void checkUniqueEmployeeNo() { setFieldAvailability(CrownMVCHelper.checkUniqueness("employee_no", currentUser.getEmployeeNo()) ? AVAILABLE : UNAVAILABLE); } public String create() { CrownUserValidator validator = new CrownUserValidator(); try { validator.validateUserCreation(currentUser); } catch(Exception e) { FacesUtil.addExceptionMessages(e); return null; } currentUser.setPassword(CrownUserDetailsService .createMD5Password(currentUser.getPassword())); CrownUserService service = CrownServiceLocator.INSTANCE .getCrownService(CrownUserService.class); try { service.createUser(currentUser, CrownUserDetailsService .getCurrentUser().getId()); } catch(Exception e) { LOGGER.error(e.getMessage(), e); FacesUtil.addExceptionMessages(e); return null; } currentUser = null; navigationBean.setNavigationUrl("config/CrownUser.xhtml"); return null; } public String update() { CrownUserService service = CrownServiceLocator.INSTANCE .getCrownService(CrownUserService.class); try { service.updateUser(currentUser, CrownUserDetailsService .getCurrentUser().getId()); } catch(Exception e) { LOGGER.error(e.getMessage(), e); FacesUtil.addExceptionMessages(e); return null; } currentUser = null; navigationBean.setNavigationUrl("config/CrownUser.xhtml"); return null; } public String delete(CrownUserBO bo) { CrownUserService service = CrownServiceLocator.INSTANCE .getCrownService(CrownUserService.class); try { service.deleteUser(bo, CrownUserDetailsService.getCurrentUser().getId()); } catch(Exception e) { LOGGER.error(e.getMessage(), e); FacesUtil.addExceptionMessages(e); return null; } isListLoaded = false; navigationBean.setNavigationUrl("config/CrownUser.xhtml"); return null; } public String view(CrownUserBO bo) { setCurrentUser(bo); isListLoaded = false; navigationBean.setNavigationUrl("config/ViewUser.xhtml"); return null; } private String oldPassword; public void changePassword() { setCurrentUser(CrownUserDetailsService.getCurrentUser()); oldPassword = currentUser.getPassword(); navigationBean.setNavigationUrl("config/ChangePassword.xhtml"); } public void updatePassword() { if (!oldPassword.equals(CrownUserDetailsService .createMD5Password(currentUser.getPassword()))) { FacesUtil.addErrorFlashMessage("Please enter correct old Password"); return; } currentUser.setPassword(CrownUserDetailsService .createMD5Password(currentUser.getChangedPassword())); CrownUserService service = CrownServiceLocator.INSTANCE .getCrownService(CrownUserService.class); try { service.changePassword(currentUser, currentUser.getId()); } catch(Exception e) { LOGGER.error(e.getMessage(), e); FacesUtil.addExceptionMessages(e); return; } navigationBean.setNavigationUrl("home.xhtml"); navigationBean.notifyHomePageListeners(); } public void resetPassword(CrownUserBO userBO) { userBO.setPassword(CrownUserDetailsService .createMD5Password(CrownConstants.RESET_PASSWORD)); CrownUserService service = CrownServiceLocator.INSTANCE .getCrownService(CrownUserService.class); try { service.changePassword(userBO, CrownUserDetailsService.getCurrentUser().getId()); } catch(Exception e) { LOGGER.error(e.getMessage(), e); FacesUtil.addExceptionMessages(e); return; } } private void loadAssociations() { if (areAssociationsLoaded) { return; } designationBOs.clear(); locationBOs.clear(); departmentBOs.clear(); roleBOs.clear(); designationBOs.addAll(CrownMVCHelper.getDesignationBos()); roleBOs.addAll(CrownMVCHelper.getRoleBos()); locationBOs.addAll(CrownMVCHelper.getLocationBos()); departmentBOs.addAll(CrownMVCHelper.getDepartmentBos()); areAssociationsLoaded = true; } public List<LocationBO> getLocationBOs() { return locationBOs; } public List<RoleBO> getRoleBOs() { return roleBOs; } public List<DesignationBO> getDesignationBOs() { return designationBOs; } public List<DepartmentBO> getDepartmentBOs() { return departmentBOs; } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.activiti.engine.impl.db; import java.io.BufferedReader; import java.io.IOException; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.ResultSet; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.activiti.engine.ActivitiException; import org.activiti.engine.ActivitiOptimisticLockingException; import org.activiti.engine.ActivitiWrongDbException; import org.activiti.engine.ProcessEngine; import org.activiti.engine.delegate.event.impl.ActivitiEventBuilder; import org.activiti.engine.impl.DeploymentQueryImpl; import org.activiti.engine.impl.ExecutionQueryImpl; import org.activiti.engine.impl.HistoricActivityInstanceQueryImpl; import org.activiti.engine.impl.HistoricDetailQueryImpl; import org.activiti.engine.impl.HistoricProcessInstanceQueryImpl; import org.activiti.engine.impl.HistoricTaskInstanceQueryImpl; import org.activiti.engine.impl.HistoricVariableInstanceQueryImpl; import org.activiti.engine.impl.JobQueryImpl; import org.activiti.engine.impl.ModelQueryImpl; import org.activiti.engine.impl.Page; import org.activiti.engine.impl.ProcessDefinitionQueryImpl; import org.activiti.engine.impl.ProcessInstanceQueryImpl; import org.activiti.engine.impl.TaskQueryImpl; import org.activiti.engine.impl.context.Context; import org.activiti.engine.impl.interceptor.Session; import org.activiti.engine.impl.persistence.entity.VariableInstanceEntity; import org.activiti.engine.impl.variable.DeserializedObject; import org.apache.ibatis.session.SqlSession; import org.flowable.engine.common.api.delegate.event.FlowableEventDispatcher; import org.flowable.engine.delegate.event.FlowableEngineEventType; import org.flowable.engine.delegate.event.FlowableVariableEvent; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Responsibilities: - delayed flushing of inserts updates and deletes - optional dirty checking - db specific statement name mapping * * @author Tom Baeyens * @author Joram Barrez */ public class DbSqlSession implements Session { private static final Logger log = LoggerFactory.getLogger(DbSqlSession.class); protected static final Pattern CLEAN_VERSION_REGEX = Pattern.compile("\\d\\.\\d*"); protected SqlSession sqlSession; protected DbSqlSessionFactory dbSqlSessionFactory; protected Map<Class<? extends PersistentObject>, List<PersistentObject>> insertedObjects = new HashMap<Class<? extends PersistentObject>, List<PersistentObject>>(); protected Map<Class<?>, Map<String, CachedObject>> cachedObjects = new HashMap<Class<?>, Map<String, CachedObject>>(); protected List<DeleteOperation> deleteOperations = new ArrayList<DeleteOperation>(); protected List<DeserializedObject> deserializedObjects = new ArrayList<DeserializedObject>(); protected String connectionMetadataDefaultCatalog; protected String connectionMetadataDefaultSchema; public DbSqlSession(DbSqlSessionFactory dbSqlSessionFactory) { this.dbSqlSessionFactory = dbSqlSessionFactory; this.sqlSession = dbSqlSessionFactory .getSqlSessionFactory() .openSession(); } public DbSqlSession(DbSqlSessionFactory dbSqlSessionFactory, Connection connection, String catalog, String schema) { this.dbSqlSessionFactory = dbSqlSessionFactory; this.sqlSession = dbSqlSessionFactory .getSqlSessionFactory() .openSession(connection); this.connectionMetadataDefaultCatalog = catalog; this.connectionMetadataDefaultSchema = schema; } // insert /////////////////////////////////////////////////////////////////// public void insert(PersistentObject persistentObject) { if (persistentObject.getId() == null) { String id = dbSqlSessionFactory.getIdGenerator().getNextId(); persistentObject.setId(id); } Class<? extends PersistentObject> clazz = persistentObject.getClass(); if (!insertedObjects.containsKey(clazz)) { insertedObjects.put(clazz, new ArrayList<PersistentObject>()); } insertedObjects.get(clazz).add(persistentObject); cachePut(persistentObject, false); } // update /////////////////////////////////////////////////////////////////// public void update(PersistentObject persistentObject) { cachePut(persistentObject, false); } public int update(String statement, Object parameters) { String updateStatement = dbSqlSessionFactory.mapStatement(statement); return getSqlSession().update(updateStatement, parameters); } // delete /////////////////////////////////////////////////////////////////// public void delete(String statement, Object parameter) { deleteOperations.add(new BulkDeleteOperation(statement, parameter)); } public void delete(PersistentObject persistentObject) { for (DeleteOperation deleteOperation : deleteOperations) { if (deleteOperation.sameIdentity(persistentObject)) { log.debug("skipping redundant delete: {}", persistentObject); return; // Skip this delete. It was already added. } } deleteOperations.add(new CheckedDeleteOperation(persistentObject)); } public interface DeleteOperation { /** * @return The persistent object class that is being deleted. Null in case there are multiple objects of different types! */ Class<? extends PersistentObject> getPersistentObjectClass(); boolean sameIdentity(PersistentObject other); void clearCache(); void execute(); } /** * Use this {@link DeleteOperation} to execute a dedicated delete statement. It is important to note there won't be any optimistic locking checks done for these kind of delete operations! * * For example, a usage of this operation would be to delete all variables for a certain execution, when that certain execution is removed. The optimistic locking happens on the execution, but the * variables can be removed by a simple 'delete from var_table where execution_id is xxx'. It could very well be there are no variables, which would also work with this query, but not with the * regular {@link CheckedDeleteOperation}. */ public class BulkDeleteOperation implements DeleteOperation { private String statement; private Object parameter; public BulkDeleteOperation(String statement, Object parameter) { this.statement = dbSqlSessionFactory.mapStatement(statement); this.parameter = parameter; } @Override public Class<? extends PersistentObject> getPersistentObjectClass() { return null; } @Override public boolean sameIdentity(PersistentObject other) { // this implementation is unable to determine what the identity of the removed object(s) will be. return false; } @Override public void clearCache() { // this implementation cannot clear the object(s) to be removed from the cache. } @Override public void execute() { sqlSession.delete(statement, parameter); } @Override public String toString() { return "bulk delete: " + statement + "(" + parameter + ")"; } } /** * A {@link DeleteOperation} that checks for concurrent modifications if the persistent object implements {@link HasRevision}. That is, it employs optimizing concurrency control. Used when the * persistent object has been fetched already. */ public class CheckedDeleteOperation implements DeleteOperation { protected final PersistentObject persistentObject; public CheckedDeleteOperation(PersistentObject persistentObject) { this.persistentObject = persistentObject; } @Override public Class<? extends PersistentObject> getPersistentObjectClass() { return persistentObject.getClass(); } @Override public boolean sameIdentity(PersistentObject other) { return persistentObject.getClass().equals(other.getClass()) && persistentObject.getId().equals(other.getId()); } @Override public void clearCache() { cacheRemove(persistentObject.getClass(), persistentObject.getId()); } @Override public void execute() { String deleteStatement = dbSqlSessionFactory.getDeleteStatement(persistentObject.getClass()); deleteStatement = dbSqlSessionFactory.mapStatement(deleteStatement); if (deleteStatement == null) { throw new ActivitiException("no delete statement for " + persistentObject.getClass() + " in the ibatis mapping files"); } // It only makes sense to check for optimistic locking exceptions for objects that actually have a revision if (persistentObject instanceof HasRevision) { int nrOfRowsDeleted = sqlSession.delete(deleteStatement, persistentObject); if (nrOfRowsDeleted == 0) { throw new ActivitiOptimisticLockingException(persistentObject + " was updated by another transaction concurrently"); } } else { sqlSession.delete(deleteStatement, persistentObject); } } public PersistentObject getPersistentObject() { return persistentObject; } @Override public String toString() { return "delete " + persistentObject; } } /** * A bulk version of the {@link CheckedDeleteOperation}. */ public class BulkCheckedDeleteOperation implements DeleteOperation { protected Class<? extends PersistentObject> persistentObjectClass; protected List<PersistentObject> persistentObjects = new ArrayList<PersistentObject>(); public BulkCheckedDeleteOperation(Class<? extends PersistentObject> persistentObjectClass) { this.persistentObjectClass = persistentObjectClass; } public void addPersistentObject(PersistentObject persistentObject) { persistentObjects.add(persistentObject); } @Override public boolean sameIdentity(PersistentObject other) { for (PersistentObject persistentObject : persistentObjects) { if (persistentObject.getClass().equals(other.getClass()) && persistentObject.getId().equals(other.getId())) { return true; } } return false; } @Override public void clearCache() { for (PersistentObject persistentObject : persistentObjects) { cacheRemove(persistentObject.getClass(), persistentObject.getId()); } } @Override public void execute() { if (persistentObjects.isEmpty()) { return; } String bulkDeleteStatement = dbSqlSessionFactory.getBulkDeleteStatement(persistentObjectClass); bulkDeleteStatement = dbSqlSessionFactory.mapStatement(bulkDeleteStatement); if (bulkDeleteStatement == null) { throw new ActivitiException("no bulk delete statement for " + persistentObjectClass + " in the mapping files"); } // It only makes sense to check for optimistic locking exceptions for objects that actually have a revision if (persistentObjects.get(0) instanceof HasRevision) { int nrOfRowsDeleted = sqlSession.delete(bulkDeleteStatement, persistentObjects); if (nrOfRowsDeleted < persistentObjects.size()) { throw new ActivitiOptimisticLockingException("One of the entities " + persistentObjectClass + " was updated by another transaction concurrently while trying to do a bulk delete"); } } else { sqlSession.delete(bulkDeleteStatement, persistentObjects); } } public Class<? extends PersistentObject> getPersistentObjectClass() { return persistentObjectClass; } public void setPersistentObjectClass( Class<? extends PersistentObject> persistentObjectClass) { this.persistentObjectClass = persistentObjectClass; } public List<PersistentObject> getPersistentObjects() { return persistentObjects; } public void setPersistentObjects(List<PersistentObject> persistentObjects) { this.persistentObjects = persistentObjects; } @Override public String toString() { return "bulk delete of " + persistentObjects.size() + (!persistentObjects.isEmpty() ? " entities of " + persistentObjects.get(0).getClass() : 0); } } // select /////////////////////////////////////////////////////////////////// @SuppressWarnings({ "rawtypes" }) public List selectList(String statement) { return selectList(statement, null, 0, Integer.MAX_VALUE); } @SuppressWarnings("rawtypes") public List selectList(String statement, Object parameter) { return selectList(statement, parameter, 0, Integer.MAX_VALUE); } @SuppressWarnings("rawtypes") public List selectList(String statement, Object parameter, Page page) { if (page != null) { return selectList(statement, parameter, page.getFirstResult(), page.getMaxResults()); } else { return selectList(statement, parameter, 0, Integer.MAX_VALUE); } } @SuppressWarnings("rawtypes") public List selectList(String statement, ListQueryParameterObject parameter, Page page) { if (page != null) { parameter.setFirstResult(page.getFirstResult()); parameter.setMaxResults(page.getMaxResults()); } return selectList(statement, parameter); } @SuppressWarnings("rawtypes") public List selectList(String statement, Object parameter, int firstResult, int maxResults) { return selectList(statement, new ListQueryParameterObject(parameter, firstResult, maxResults)); } @SuppressWarnings("rawtypes") public List selectList(String statement, ListQueryParameterObject parameter) { return selectListWithRawParameter(statement, parameter, parameter.getFirstResult(), parameter.getMaxResults()); } @SuppressWarnings({ "rawtypes", "unchecked" }) public List selectListWithRawParameter(String statement, Object parameter, int firstResult, int maxResults) { statement = dbSqlSessionFactory.mapStatement(statement); if (firstResult == -1 || maxResults == -1) { return Collections.EMPTY_LIST; } List loadedObjects = sqlSession.selectList(statement, parameter); return filterLoadedObjects(loadedObjects); } @SuppressWarnings({ "rawtypes" }) public List selectListWithRawParameterWithoutFilter(String statement, Object parameter, int firstResult, int maxResults) { statement = dbSqlSessionFactory.mapStatement(statement); if (firstResult == -1 || maxResults == -1) { return Collections.EMPTY_LIST; } return sqlSession.selectList(statement, parameter); } public Object selectOne(String statement, Object parameter) { statement = dbSqlSessionFactory.mapStatement(statement); Object result = sqlSession.selectOne(statement, parameter); if (result instanceof PersistentObject) { PersistentObject loadedObject = (PersistentObject) result; result = cacheFilter(loadedObject); } return result; } @SuppressWarnings("unchecked") public <T extends PersistentObject> T selectById(Class<T> entityClass, String id) { T persistentObject = cacheGet(entityClass, id); if (persistentObject != null) { return persistentObject; } String selectStatement = dbSqlSessionFactory.getSelectStatement(entityClass); selectStatement = dbSqlSessionFactory.mapStatement(selectStatement); persistentObject = (T) sqlSession.selectOne(selectStatement, id); if (persistentObject == null) { return null; } cachePut(persistentObject, true); return persistentObject; } // internal session cache /////////////////////////////////////////////////// @SuppressWarnings("rawtypes") protected List filterLoadedObjects(List<Object> loadedObjects) { if (loadedObjects.isEmpty()) { return loadedObjects; } if (!(loadedObjects.get(0) instanceof PersistentObject)) { return loadedObjects; } List<PersistentObject> filteredObjects = new ArrayList<PersistentObject>(loadedObjects.size()); for (Object loadedObject : loadedObjects) { PersistentObject cachedPersistentObject = cacheFilter((PersistentObject) loadedObject); filteredObjects.add(cachedPersistentObject); } return filteredObjects; } protected CachedObject cachePut(PersistentObject persistentObject, boolean storeState) { Map<String, CachedObject> classCache = cachedObjects.get(persistentObject.getClass()); if (classCache == null) { classCache = new HashMap<String, CachedObject>(); cachedObjects.put(persistentObject.getClass(), classCache); } CachedObject cachedObject = new CachedObject(persistentObject, storeState); classCache.put(persistentObject.getId(), cachedObject); return cachedObject; } /** * returns the object in the cache. if this object was loaded before, then the original object is returned. if this is the first time this object is loaded, then the loadedObject is added to the * cache. */ protected PersistentObject cacheFilter(PersistentObject persistentObject) { PersistentObject cachedPersistentObject = cacheGet(persistentObject.getClass(), persistentObject.getId()); if (cachedPersistentObject != null) { return cachedPersistentObject; } cachePut(persistentObject, true); return persistentObject; } @SuppressWarnings("unchecked") protected <T> T cacheGet(Class<T> entityClass, String id) { CachedObject cachedObject = null; Map<String, CachedObject> classCache = cachedObjects.get(entityClass); if (classCache != null) { cachedObject = classCache.get(id); } if (cachedObject != null) { return (T) cachedObject.getPersistentObject(); } return null; } protected void cacheRemove(Class<?> persistentObjectClass, String persistentObjectId) { Map<String, CachedObject> classCache = cachedObjects.get(persistentObjectClass); if (classCache == null) { return; } classCache.remove(persistentObjectId); } @SuppressWarnings("unchecked") public <T> List<T> findInCache(Class<T> entityClass) { Map<String, CachedObject> classCache = cachedObjects.get(entityClass); if (classCache != null) { List<T> entities = new ArrayList<T>(classCache.size()); for (CachedObject cachedObject : classCache.values()) { entities.add((T) cachedObject.getPersistentObject()); } return entities; } return Collections.emptyList(); } public <T> T findInCache(Class<T> entityClass, String id) { return cacheGet(entityClass, id); } public static class CachedObject { protected PersistentObject persistentObject; protected Object persistentObjectState; public CachedObject(PersistentObject persistentObject, boolean storeState) { this.persistentObject = persistentObject; if (storeState) { this.persistentObjectState = persistentObject.getPersistentState(); } } public PersistentObject getPersistentObject() { return persistentObject; } public Object getPersistentObjectState() { return persistentObjectState; } } // deserialized objects ///////////////////////////////////////////////////// public void addDeserializedObject(DeserializedObject deserializedObject) { deserializedObjects.add(deserializedObject); } // flush //////////////////////////////////////////////////////////////////// @Override public void flush() { List<DeleteOperation> removedOperations = removeUnnecessaryOperations(); flushDeserializedObjects(); List<PersistentObject> updatedObjects = getUpdatedObjects(); if (log.isDebugEnabled()) { Collection<List<PersistentObject>> insertedObjectLists = insertedObjects.values(); int nrOfInserts = 0; int nrOfUpdates = 0; int nrOfDeletes = 0; for (List<PersistentObject> insertedObjectList : insertedObjectLists) { for (PersistentObject insertedObject : insertedObjectList) { log.debug(" insert {}", insertedObject); nrOfInserts++; } } for (PersistentObject updatedObject : updatedObjects) { log.debug(" update {}", updatedObject); nrOfUpdates++; } for (DeleteOperation deleteOperation : deleteOperations) { log.debug(" {}", deleteOperation); nrOfDeletes++; } log.debug("flush summary: {} insert, {} update, {} delete.", nrOfInserts, nrOfUpdates, nrOfDeletes); log.debug("now executing flush..."); } flushInserts(); flushUpdates(updatedObjects); flushDeletes(removedOperations); } /** * Clears all deleted and inserted objects from the cache, and removes inserts and deletes that cancel each other. */ protected List<DeleteOperation> removeUnnecessaryOperations() { List<DeleteOperation> removedDeleteOperations = new ArrayList<DeleteOperation>(); for (Iterator<DeleteOperation> deleteIterator = deleteOperations.iterator(); deleteIterator.hasNext();) { DeleteOperation deleteOperation = deleteIterator.next(); Class<? extends PersistentObject> deletedPersistentObjectClass = deleteOperation.getPersistentObjectClass(); List<PersistentObject> insertedObjectsOfSameClass = insertedObjects.get(deletedPersistentObjectClass); if (insertedObjectsOfSameClass != null && insertedObjectsOfSameClass.size() > 0) { for (Iterator<PersistentObject> insertIterator = insertedObjectsOfSameClass.iterator(); insertIterator.hasNext();) { PersistentObject insertedObject = insertIterator.next(); // if the deleted object is inserted, if (deleteOperation.sameIdentity(insertedObject)) { // remove the insert and the delete, they cancel each other insertIterator.remove(); deleteIterator.remove(); // add removed operations to be able to fire events removedDeleteOperations.add(deleteOperation); } } if (insertedObjects.get(deletedPersistentObjectClass).size() == 0) { insertedObjects.remove(deletedPersistentObjectClass); } } // in any case, remove the deleted object from the cache deleteOperation.clearCache(); } for (Class<? extends PersistentObject> persistentObjectClass : insertedObjects.keySet()) { for (PersistentObject insertedObject : insertedObjects.get(persistentObjectClass)) { cacheRemove(insertedObject.getClass(), insertedObject.getId()); } } return removedDeleteOperations; } // // [Joram] Put this in comments. Had all kinds of errors. // // /** // * Optimizes the given delete operations: // * for example, if there are two deletes for two different variables, merges this into // * one bulk delete which improves performance // */ // protected List<DeleteOperation> optimizeDeleteOperations(List<DeleteOperation> deleteOperations) { // // // No optimization possible for 0 or 1 operations // if (!isOptimizeDeleteOperationsEnabled || deleteOperations.size() <= 1) { // return deleteOperations; // } // // List<DeleteOperation> optimizedDeleteOperations = new ArrayList<DbSqlSession.DeleteOperation>(); // boolean[] checkedIndices = new boolean[deleteOperations.size()]; // for (int i=0; i<deleteOperations.size(); i++) { // // if (checkedIndices[i] == true) { // continue; // } // // DeleteOperation deleteOperation = deleteOperations.get(i); // boolean couldOptimize = false; // if (deleteOperation instanceof CheckedDeleteOperation) { // // PersistentObject persistentObject = ((CheckedDeleteOperation) deleteOperation).getPersistentObject(); // if (persistentObject instanceof BulkDeleteable) { // String bulkDeleteStatement = dbSqlSessionFactory.getBulkDeleteStatement(persistentObject.getClass()); // bulkDeleteStatement = dbSqlSessionFactory.mapStatement(bulkDeleteStatement); // if (bulkDeleteStatement != null) { // BulkCheckedDeleteOperation bulkCheckedDeleteOperation = null; // // // Find all objects of the same type // for (int j=0; j<deleteOperations.size(); j++) { // DeleteOperation otherDeleteOperation = deleteOperations.get(j); // if (j != i && checkedIndices[j] == false && otherDeleteOperation instanceof CheckedDeleteOperation) { // PersistentObject otherPersistentObject = ((CheckedDeleteOperation) otherDeleteOperation).getPersistentObject(); // if (otherPersistentObject.getClass().equals(persistentObject.getClass())) { // if (bulkCheckedDeleteOperation == null) { // bulkCheckedDeleteOperation = new BulkCheckedDeleteOperation(persistentObject.getClass()); // bulkCheckedDeleteOperation.addPersistentObject(persistentObject); // optimizedDeleteOperations.add(bulkCheckedDeleteOperation); // } // couldOptimize = true; // bulkCheckedDeleteOperation.addPersistentObject(otherPersistentObject); // checkedIndices[j] = true; // } else { // // We may only optimize subsequent delete operations of the same type, to prevent messing up // // the order of deletes of related entities which may depend on the referenced entity being deleted before // break; // } // } // // } // } // } // } // // if (!couldOptimize) { // optimizedDeleteOperations.add(deleteOperation); // } // checkedIndices[i]=true; // // } // return optimizedDeleteOperations; // } protected void flushDeserializedObjects() { for (DeserializedObject deserializedObject : deserializedObjects) { deserializedObject.flush(); } } public List<PersistentObject> getUpdatedObjects() { List<PersistentObject> updatedObjects = new ArrayList<PersistentObject>(); for (Class<?> clazz : cachedObjects.keySet()) { Map<String, CachedObject> classCache = cachedObjects.get(clazz); for (CachedObject cachedObject : classCache.values()) { PersistentObject persistentObject = cachedObject.getPersistentObject(); if (!isPersistentObjectDeleted(persistentObject)) { Object originalState = cachedObject.getPersistentObjectState(); if (persistentObject.getPersistentState() != null && !persistentObject.getPersistentState().equals(originalState)) { updatedObjects.add(persistentObject); } else { log.trace("loaded object '{}' was not updated", persistentObject); } } } } return updatedObjects; } protected boolean isPersistentObjectDeleted(PersistentObject persistentObject) { for (DeleteOperation deleteOperation : deleteOperations) { if (deleteOperation.sameIdentity(persistentObject)) { return true; } } return false; } public <T extends PersistentObject> List<T> pruneDeletedEntities(List<T> listToPrune) { List<T> prunedList = new ArrayList<T>(listToPrune); for (T potentiallyDeleted : listToPrune) { for (DeleteOperation deleteOperation : deleteOperations) { if (deleteOperation.sameIdentity(potentiallyDeleted)) { prunedList.remove(potentiallyDeleted); } } } return prunedList; } protected void flushInserts() { // Handle in entity dependency order for (Class<? extends PersistentObject> persistentObjectClass : EntityDependencyOrder.INSERT_ORDER) { if (insertedObjects.containsKey(persistentObjectClass)) { flushPersistentObjects(persistentObjectClass, insertedObjects.get(persistentObjectClass)); } } // Next, in case of custom entities or we've screwed up and forgotten some entity if (insertedObjects.size() > 0) { for (Class<? extends PersistentObject> persistentObjectClass : insertedObjects.keySet()) { flushPersistentObjects(persistentObjectClass, insertedObjects.get(persistentObjectClass)); } } insertedObjects.clear(); } protected void flushPersistentObjects(Class<? extends PersistentObject> persistentObjectClass, List<PersistentObject> persistentObjectsToInsert) { if (persistentObjectsToInsert.size() == 1) { flushRegularInsert(persistentObjectsToInsert.get(0), persistentObjectClass); } else if (Boolean.FALSE.equals(dbSqlSessionFactory.isBulkInsertable(persistentObjectClass))) { for (PersistentObject persistentObject : persistentObjectsToInsert) { flushRegularInsert(persistentObject, persistentObjectClass); } } else { flushBulkInsert(insertedObjects.get(persistentObjectClass), persistentObjectClass); } insertedObjects.remove(persistentObjectClass); } protected void flushRegularInsert(PersistentObject persistentObject, Class<? extends PersistentObject> clazz) { String insertStatement = dbSqlSessionFactory.getInsertStatement(persistentObject); insertStatement = dbSqlSessionFactory.mapStatement(insertStatement); if (insertStatement == null) { throw new ActivitiException("no insert statement for " + persistentObject.getClass() + " in the ibatis mapping files"); } log.debug("inserting: {}", persistentObject); sqlSession.insert(insertStatement, persistentObject); // See https://activiti.atlassian.net/browse/ACT-1290 if (persistentObject instanceof HasRevision) { HasRevision revisionEntity = (HasRevision) persistentObject; if (revisionEntity.getRevision() == 0) { revisionEntity.setRevision(revisionEntity.getRevisionNext()); } } } protected void flushBulkInsert(List<PersistentObject> persistentObjectList, Class<? extends PersistentObject> clazz) { String insertStatement = dbSqlSessionFactory.getBulkInsertStatement(clazz); insertStatement = dbSqlSessionFactory.mapStatement(insertStatement); if (insertStatement == null) { throw new ActivitiException("no insert statement for " + persistentObjectList.get(0).getClass() + " in the ibatis mapping files"); } if (persistentObjectList.size() <= dbSqlSessionFactory.getMaxNrOfStatementsInBulkInsert()) { sqlSession.insert(insertStatement, persistentObjectList); } else { for (int start = 0; start < persistentObjectList.size(); start += dbSqlSessionFactory.getMaxNrOfStatementsInBulkInsert()) { List<PersistentObject> subList = persistentObjectList.subList(start, Math.min(start + dbSqlSessionFactory.getMaxNrOfStatementsInBulkInsert(), persistentObjectList.size())); sqlSession.insert(insertStatement, subList); } } if (persistentObjectList.get(0) instanceof HasRevision) { for (PersistentObject insertedObject : persistentObjectList) { HasRevision revisionEntity = (HasRevision) insertedObject; if (revisionEntity.getRevision() == 0) { revisionEntity.setRevision(revisionEntity.getRevisionNext()); } } } } protected void flushUpdates(List<PersistentObject> updatedObjects) { for (PersistentObject updatedObject : updatedObjects) { String updateStatement = dbSqlSessionFactory.getUpdateStatement(updatedObject); updateStatement = dbSqlSessionFactory.mapStatement(updateStatement); if (updateStatement == null) { throw new ActivitiException("no update statement for " + updatedObject.getClass() + " in the ibatis mapping files"); } log.debug("updating: {}", updatedObject); int updatedRecords = sqlSession.update(updateStatement, updatedObject); if (updatedRecords != 1) { throw new ActivitiOptimisticLockingException(updatedObject + " was updated by another transaction concurrently"); } // See https://activiti.atlassian.net/browse/ACT-1290 if (updatedObject instanceof HasRevision) { ((HasRevision) updatedObject).setRevision(((HasRevision) updatedObject).getRevisionNext()); } } updatedObjects.clear(); } protected void flushDeletes(List<DeleteOperation> removedOperations) { boolean dispatchEvent = false; FlowableEventDispatcher eventDispatcher = Context.getProcessEngineConfiguration().getEventDispatcher(); if (eventDispatcher != null && eventDispatcher.isEnabled()) { dispatchEvent = eventDispatcher.isEnabled(); } flushRegularDeletes(dispatchEvent); if (dispatchEvent) { dispatchEventsForRemovedOperations(removedOperations); } deleteOperations.clear(); } protected void dispatchEventsForRemovedOperations(List<DeleteOperation> removedOperations) { for (DeleteOperation delete : removedOperations) { // dispatch removed delete events if (delete instanceof CheckedDeleteOperation) { CheckedDeleteOperation checkedDeleteOperation = (CheckedDeleteOperation) delete; PersistentObject persistentObject = checkedDeleteOperation.getPersistentObject(); if (persistentObject instanceof VariableInstanceEntity) { VariableInstanceEntity variableInstance = (VariableInstanceEntity) persistentObject; Context.getProcessEngineConfiguration().getEventDispatcher().dispatchEvent( createVariableDeleteEvent(variableInstance)); } } } } protected static FlowableVariableEvent createVariableDeleteEvent(VariableInstanceEntity variableInstance) { return ActivitiEventBuilder.createVariableEvent(FlowableEngineEventType.VARIABLE_DELETED, variableInstance.getName(), null, variableInstance.getType(), variableInstance.getTaskId(), variableInstance.getExecutionId(), variableInstance.getProcessInstanceId(), null); } protected void flushRegularDeletes(boolean dispatchEvent) { for (DeleteOperation delete : deleteOperations) { log.debug("executing: {}", delete); delete.execute(); // fire event for variable delete operation. (BulkDeleteOperation is not taken into account) if (dispatchEvent) { // prepare delete event to fire for variable delete operation. (BulkDeleteOperation is not taken into account) if (delete instanceof CheckedDeleteOperation) { CheckedDeleteOperation checkedDeleteOperation = (CheckedDeleteOperation) delete; PersistentObject persistentObject = checkedDeleteOperation.getPersistentObject(); if (persistentObject instanceof VariableInstanceEntity) { VariableInstanceEntity variableInstance = (VariableInstanceEntity) persistentObject; Context.getProcessEngineConfiguration().getEventDispatcher().dispatchEvent( createVariableDeleteEvent(variableInstance)); } } else if (delete instanceof BulkCheckedDeleteOperation) { BulkCheckedDeleteOperation bulkCheckedDeleteOperation = (BulkCheckedDeleteOperation) delete; if (VariableInstanceEntity.class.isAssignableFrom(bulkCheckedDeleteOperation.getPersistentObjectClass())) { for (PersistentObject persistentObject : bulkCheckedDeleteOperation.getPersistentObjects()) { VariableInstanceEntity variableInstance = (VariableInstanceEntity) persistentObject; Context.getProcessEngineConfiguration().getEventDispatcher().dispatchEvent( createVariableDeleteEvent(variableInstance)); } } } } } } @Override public void close() { sqlSession.close(); } public void commit() { sqlSession.commit(); } public void rollback() { sqlSession.rollback(); } // schema operations //////////////////////////////////////////////////////// public void dbSchemaCheckVersion() { try { String dbVersion = getDbVersion(); if (!ProcessEngine.VERSION.equals(dbVersion)) { throw new ActivitiWrongDbException(ProcessEngine.VERSION, dbVersion); } String errorMessage = null; if (!isEngineTablePresent()) { errorMessage = addMissingComponent(errorMessage, "engine"); } if (dbSqlSessionFactory.isDbHistoryUsed() && !isHistoryTablePresent()) { errorMessage = addMissingComponent(errorMessage, "history"); } if (errorMessage != null) { throw new ActivitiException("Activiti database problem: " + errorMessage); } } catch (Exception e) { if (isMissingTablesException(e)) { throw new ActivitiException( "no activiti tables in db. set <property name=\"databaseSchemaUpdate\" to value=\"true\" or value=\"create-drop\" (use create-drop for testing only!) in bean processEngineConfiguration in flowable.cfg.xml for automatic schema creation", e); } else { if (e instanceof RuntimeException) { throw (RuntimeException) e; } else { throw new ActivitiException("couldn't get db schema version", e); } } } log.debug("activiti db schema check successful"); } protected String addMissingComponent(String missingComponents, String component) { if (missingComponents == null) { return "Tables missing for component(s) " + component; } return missingComponents + ", " + component; } protected String getDbVersion() { String selectSchemaVersionStatement = dbSqlSessionFactory.mapStatement("selectDbSchemaVersion"); return (String) sqlSession.selectOne(selectSchemaVersionStatement); } public static String[] JDBC_METADATA_TABLE_TYPES = { "TABLE" }; public boolean isEngineTablePresent() { return isTablePresent("ACT_RU_EXECUTION"); } public boolean isHistoryTablePresent() { return isTablePresent("ACT_HI_PROCINST"); } public boolean isTablePresent(String tableName) { // ACT-1610: in case the prefix IS the schema itself, we don't add the prefix, since the // check is already aware of the schema if (!dbSqlSessionFactory.isTablePrefixIsSchema()) { tableName = prependDatabaseTablePrefix(tableName); } Connection connection = null; try { connection = sqlSession.getConnection(); DatabaseMetaData databaseMetaData = connection.getMetaData(); ResultSet tables = null; String catalog = this.connectionMetadataDefaultCatalog; if (dbSqlSessionFactory.getDatabaseCatalog() != null && dbSqlSessionFactory.getDatabaseCatalog().length() > 0) { catalog = dbSqlSessionFactory.getDatabaseCatalog(); } String schema = this.connectionMetadataDefaultSchema; if (dbSqlSessionFactory.getDatabaseSchema() != null && dbSqlSessionFactory.getDatabaseSchema().length() > 0) { schema = dbSqlSessionFactory.getDatabaseSchema(); } String databaseType = dbSqlSessionFactory.getDatabaseType(); if ("postgres".equals(databaseType)) { tableName = tableName.toLowerCase(); } try { tables = databaseMetaData.getTables(catalog, schema, tableName, JDBC_METADATA_TABLE_TYPES); return tables.next(); } finally { try { tables.close(); } catch (Exception e) { log.error("Error closing meta data tables", e); } } } catch (Exception e) { throw new ActivitiException("couldn't check if tables are already present using metadata: " + e.getMessage(), e); } } protected String getCleanVersion(String versionString) { Matcher matcher = CLEAN_VERSION_REGEX.matcher(versionString); if (!matcher.find()) { throw new ActivitiException("Illegal format for version: " + versionString); } String cleanString = matcher.group(); try { Double.parseDouble(cleanString); // try to parse it, to see if it is really a number return cleanString; } catch (NumberFormatException nfe) { throw new ActivitiException("Illegal format for version: " + versionString); } } protected String prependDatabaseTablePrefix(String tableName) { return dbSqlSessionFactory.getDatabaseTablePrefix() + tableName; } protected String readNextTrimmedLine(BufferedReader reader) throws IOException { String line = reader.readLine(); if (line != null) { line = line.trim(); } return line; } protected boolean isMissingTablesException(Exception e) { String exceptionMessage = e.getMessage(); if (e.getMessage() != null) { // Matches message returned from H2 if ((exceptionMessage.indexOf("Table") != -1) && (exceptionMessage.indexOf("not found") != -1)) { return true; } // Message returned from MySQL and Oracle if (((exceptionMessage.indexOf("Table") != -1 || exceptionMessage.indexOf("table") != -1)) && (exceptionMessage.indexOf("doesn't exist") != -1)) { return true; } // Message returned from Postgres if (((exceptionMessage.indexOf("relation") != -1 || exceptionMessage.indexOf("table") != -1)) && (exceptionMessage.indexOf("does not exist") != -1)) { return true; } } return false; } public <T> T getCustomMapper(Class<T> type) { return sqlSession.getMapper(type); } // query factory methods //////////////////////////////////////////////////// public DeploymentQueryImpl createDeploymentQuery() { return new DeploymentQueryImpl(); } public ModelQueryImpl createModelQueryImpl() { return new ModelQueryImpl(); } public ProcessDefinitionQueryImpl createProcessDefinitionQuery() { return new ProcessDefinitionQueryImpl(); } public ProcessInstanceQueryImpl createProcessInstanceQuery() { return new ProcessInstanceQueryImpl(); } public ExecutionQueryImpl createExecutionQuery() { return new ExecutionQueryImpl(); } public TaskQueryImpl createTaskQuery() { return new TaskQueryImpl(); } public JobQueryImpl createJobQuery() { return new JobQueryImpl(); } public HistoricProcessInstanceQueryImpl createHistoricProcessInstanceQuery() { return new HistoricProcessInstanceQueryImpl(); } public HistoricActivityInstanceQueryImpl createHistoricActivityInstanceQuery() { return new HistoricActivityInstanceQueryImpl(); } public HistoricTaskInstanceQueryImpl createHistoricTaskInstanceQuery() { return new HistoricTaskInstanceQueryImpl(); } public HistoricDetailQueryImpl createHistoricDetailQuery() { return new HistoricDetailQueryImpl(); } public HistoricVariableInstanceQueryImpl createHistoricVariableInstanceQuery() { return new HistoricVariableInstanceQueryImpl(); } // getters and setters ////////////////////////////////////////////////////// public SqlSession getSqlSession() { return sqlSession; } public DbSqlSessionFactory getDbSqlSessionFactory() { return dbSqlSessionFactory; } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. */ package com.microsoft.azure.management.network.implementation; import java.util.Collections; import java.util.Map; import java.util.TreeMap; import com.microsoft.azure.SubResource; import com.microsoft.azure.management.apigeneration.LangDefinition; import com.microsoft.azure.management.network.Frontend; import com.microsoft.azure.management.network.IPAllocationMethod; import com.microsoft.azure.management.network.InboundNatPool; import com.microsoft.azure.management.network.InboundNatRule; import com.microsoft.azure.management.network.PrivateFrontend; import com.microsoft.azure.management.network.PublicFrontend; import com.microsoft.azure.management.network.LoadBalancer; import com.microsoft.azure.management.network.LoadBalancingRule; import com.microsoft.azure.management.network.Network; import com.microsoft.azure.management.network.PublicIpAddress; import com.microsoft.azure.management.resources.fluentcore.arm.ResourceUtils; import com.microsoft.azure.management.resources.fluentcore.arm.models.implementation.ChildResourceImpl; /** * Implementation for {@link PublicFrontend}. */ @LangDefinition class FrontendImpl extends ChildResourceImpl<FrontendIPConfigurationInner, LoadBalancerImpl, LoadBalancer> implements Frontend, PrivateFrontend, PrivateFrontend.Definition<LoadBalancer.DefinitionStages.WithPrivateFrontendOrBackend>, PrivateFrontend.UpdateDefinition<LoadBalancer.Update>, PrivateFrontend.Update, PublicFrontend, PublicFrontend.Definition<LoadBalancer.DefinitionStages.WithPublicFrontendOrBackend>, PublicFrontend.UpdateDefinition<LoadBalancer.Update>, PublicFrontend.Update { FrontendImpl(FrontendIPConfigurationInner inner, LoadBalancerImpl parent) { super(inner, parent); } // Getters @Override public String networkId() { SubResource subnetRef = this.inner().subnet(); if (subnetRef != null) { return ResourceUtils.parentResourcePathFromResourceId(subnetRef.id()); } else { return null; } } @Override public String subnetName() { SubResource subnetRef = this.inner().subnet(); if (subnetRef != null) { return ResourceUtils.nameFromResourceId(subnetRef.id()); } else { return null; } } @Override public String privateIpAddress() { return this.inner().privateIPAddress(); } @Override public IPAllocationMethod privateIpAllocationMethod() { return this.inner().privateIPAllocationMethod(); } @Override public String name() { return this.inner().name(); } @Override public String publicIpAddressId() { return this.inner().publicIPAddress().id(); } @Override public boolean isPublic() { return (this.inner().publicIPAddress() != null); } @Override public Map<String, LoadBalancingRule> loadBalancingRules() { final Map<String, LoadBalancingRule> rules = new TreeMap<>(); if (this.inner().loadBalancingRules() != null) { for (SubResource innerRef : this.inner().loadBalancingRules()) { String name = ResourceUtils.nameFromResourceId(innerRef.id()); LoadBalancingRule rule = this.parent().loadBalancingRules().get(name); if (rule != null) { rules.put(name, rule); } } } return Collections.unmodifiableMap(rules); } @Override public Map<String, InboundNatPool> inboundNatPools() { final Map<String, InboundNatPool> pools = new TreeMap<>(); if (this.inner().inboundNatPools() != null) { for (SubResource innerRef : this.inner().inboundNatPools()) { String name = ResourceUtils.nameFromResourceId(innerRef.id()); InboundNatPool pool = this.parent().inboundNatPools().get(name); if (pool != null) { pools.put(name, pool); } } } return Collections.unmodifiableMap(pools); } @Override public Map<String, InboundNatRule> inboundNatRules() { final Map<String, InboundNatRule> rules = new TreeMap<>(); if (this.inner().inboundNatRules() != null) { for (SubResource innerRef : this.inner().inboundNatRules()) { String name = ResourceUtils.nameFromResourceId(innerRef.id()); InboundNatRule rule = this.parent().inboundNatRules().get(name); if (rule != null) { rules.put(name, rule); } } } return Collections.unmodifiableMap(rules); } // Fluent setters @Override public FrontendImpl withExistingSubnet(Network network, String subnetName) { return this.withExistingSubnet(network.id(), subnetName); } @Override public FrontendImpl withExistingSubnet(String parentNetworkResourceId, String subnetName) { SubResource subnetRef = new SubResource() .withId(parentNetworkResourceId + "/subnets/" + subnetName); this.inner() .withSubnet(subnetRef) .withPublicIPAddress(null); // Ensure no conflicting public and private settings return this; } @Override public FrontendImpl withExistingPublicIpAddress(PublicIpAddress pip) { return this.withExistingPublicIpAddress(pip.id()); } @Override public FrontendImpl withExistingPublicIpAddress(String resourceId) { SubResource pipRef = new SubResource().withId(resourceId); this.inner() .withPublicIPAddress(pipRef) // Ensure no conflicting public and private settings .withSubnet(null) .withPrivateIPAddress(null) .withPrivateIPAllocationMethod(null); return this; } @Override public FrontendImpl withoutPublicIpAddress() { this.inner().withPublicIPAddress(null); return this; } @Override public FrontendImpl withPrivateIpAddressDynamic() { this.inner() .withPrivateIPAddress(null) .withPrivateIPAllocationMethod(IPAllocationMethod.DYNAMIC) // Ensure no conflicting public and private settings .withPublicIPAddress(null); return this; } @Override public FrontendImpl withPrivateIpAddressStatic(String ipAddress) { this.inner() .withPrivateIPAddress(ipAddress) .withPrivateIPAllocationMethod(IPAllocationMethod.STATIC) // Ensure no conflicting public and private settings .withPublicIPAddress(null); return this; } // Verbs @Override public LoadBalancerImpl attach() { return this.parent().withFrontend(this); } @Override public PublicIpAddress getPublicIpAddress() { final String pipId = this.publicIpAddressId(); if (pipId == null) { return null; } else { return this.parent().manager().publicIpAddresses().getById(pipId); } } }
package com.wear.ubiqlog; import android.app.Service; import android.content.Intent; import android.hardware.Sensor; import android.hardware.SensorEvent; import android.hardware.SensorEventListener; import android.hardware.SensorManager; import android.os.AsyncTask; import android.os.Bundle; import android.os.IBinder; import android.util.Log; import android.widget.EditText; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.GooglePlayServicesUtil; import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.common.api.ResultCallback; import com.google.android.gms.wearable.MessageApi; import com.google.android.gms.wearable.MessageEvent; import com.google.android.gms.wearable.Node; import com.google.android.gms.wearable.NodeApi; import com.google.android.gms.wearable.Wearable; import java.util.Date; /** /** * Class will take 3 lux readings every SensorConstant.LIGHT_SENSOR_INTERVAL * and write to file */ public class LightSensor extends Service implements SensorEventListener { private static final String LOG_TAG = LightSensor.class.getSimpleName(); private Sensor mLight; private SensorManager mSensorManager; int count; // store number of samples float totalSum; // store sum of 3 sampling to get avg value by davide to 3 after 3rd sample private final String MESSAGE1_PATH = "/message1"; private final String MESSAGE2_PATH = "/message2"; private GoogleApiClient apiClient; private EditText receivedMessagesEditText; // private NodeApi.NodeListener nodeListener; private MessageApi.MessageListener messageListener; private String remoteNodeId; public LightSensor() { } @Override public IBinder onBind(Intent intent) { return null; } @Override public void onCreate() { super.onCreate(); count = 0; totalSum = 0f; mSensorManager = (SensorManager) getSystemService(SENSOR_SERVICE); mLight = mSensorManager.getDefaultSensor(Sensor.TYPE_LIGHT); Log.d(LOG_TAG, "Light sensor started"); // Create NodeListener that enables buttons when a node is connected and disables buttons when a node is disconnected // nodeListener = new NodeApi.NodeListener() { // @Override // public void onPeerConnected(Node node) { // // } // // @Override // public void onPeerDisconnected(Node node) { // // } // }; // Create MessageListener that receives messages sent from a mobile messageListener = new MessageApi.MessageListener() { @Override public void onMessageReceived(MessageEvent messageEvent) { if (messageEvent.getPath().equals(MESSAGE1_PATH)) { } else if (messageEvent.getPath().equals(MESSAGE2_PATH)) { } } }; // Create GoogleApiClient apiClient = new GoogleApiClient.Builder(getApplicationContext()).addConnectionCallbacks(new GoogleApiClient.ConnectionCallbacks() { @Override public void onConnected(Bundle bundle) { // Register Node and Message listeners // Wearable.NodeApi.addListener(apiClient, nodeListener); Wearable.MessageApi.addListener(apiClient, messageListener); // If there is a connected node, get it's id that is used when sending messages Wearable.NodeApi.getConnectedNodes(apiClient).setResultCallback(new ResultCallback<NodeApi.GetConnectedNodesResult>() { @Override public void onResult(NodeApi.GetConnectedNodesResult getConnectedNodesResult) { if (getConnectedNodesResult.getStatus().isSuccess() && getConnectedNodesResult.getNodes().size() > 0) { remoteNodeId = getConnectedNodesResult.getNodes().get(0).getId(); } } }); } @Override public void onConnectionSuspended(int i) { } }).addApi(Wearable.API).build(); } @Override public int onStartCommand(Intent intent, int flags, int startId) { //SensorDelayNormal is 200,000 ms mSensorManager.registerListener(LightSensor.this, mLight, SensorManager.SENSOR_DELAY_FASTEST); // Check is Google Play Services available int connectionResult = GooglePlayServicesUtil.isGooglePlayServicesAvailable(getApplicationContext()); if (connectionResult != ConnectionResult.SUCCESS) { } else { apiClient.connect(); } return START_NOT_STICKY; // If process died, it will <<NOT>> start again } @Override public void onSensorChanged(SensorEvent event) { new SensorEventLoggerTask().execute(event); mSensorManager.unregisterListener(this); } private class SensorEventLoggerTask extends AsyncTask<SensorEvent, Void, Void> { @Override protected Void doInBackground(SensorEvent... events) { SensorEvent event = events[0]; float lux = event.values[0]; totalSum += lux; count++; //Log.d(LOG_TAG, "Sample count:" + count + ", lux:" + lux + ", total:" + totalSum); if (count >= 3) { Date date = new Date(); float avg = totalSum / count; Log.e("LIGHT",""+avg); //Encode the lux value and date //String encoded = JSONUtil.encodeLight(avg, date); //Log.d(LOG_TAG, encoded); //add encoded string to buffer //mDataBuffer.insert(encoded, true, Setting.bufferMaxSize); // 1 for BufferMaxSize causes to flush Buffer automatically after inserting value //String encoded_SA = SemanticTempCSVUtil.encodeLight(avg, date); //mSA_lightBuffer.insert(encoded_SA, true, Setting.bufferMaxSize); // 1 for BufferMaxSize causes to flush Buffer automatically after inserting value String s = ""+avg; Wearable.MessageApi.sendMessage(apiClient, remoteNodeId, MESSAGE2_PATH, s.getBytes()).setResultCallback(new ResultCallback<MessageApi.SendMessageResult>() { @Override public void onResult(MessageApi.SendMessageResult sendMessageResult) { } }); totalSum = 0; count = 0; // stop the service. The service will run after 15min by ServiceMonitor class(AlarmService) //stopSelf(); try { //sleep current thread for about 30sec to get a new sample Thread.sleep(300000); // register a listener to waiting for onSensorChanged() event } catch (InterruptedException e) { e.printStackTrace(); } } else { try { //sleep current thread for about 30sec to get a new sample Thread.sleep(30000); // register a listener to waiting for onSensorChanged() event mSensorManager.registerListener(LightSensor.this, mLight, SensorManager.SENSOR_DELAY_FASTEST); } catch (InterruptedException e) { e.printStackTrace(); //ErrorCollector.Log("1501", "InterruptedException:" + count + "," + e.getMessage()); } } return null; } } @Override public void onAccuracyChanged(Sensor sensor, int accuracy) { } @Override public void onDestroy() { //mDataBuffer.flush(true); //Unregister the listener // Wearable.NodeApi.removeListener(apiClient, nodeListener); Wearable.MessageApi.removeListener(apiClient, messageListener); apiClient.disconnect(); mSensorManager.unregisterListener(this); //Log.d(LOG_TAG, "Light sensor stopped"); super.onDestroy(); } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package edacc.satinstances; import edacc.EDACCApp; import edacc.model.ComputationMethod; import edacc.model.ComputationMethodAlreadyExistsException; import edacc.model.ComputationMethodDAO; import edacc.model.ComputationMethodSameMD5AlreadyExists; import edacc.model.ComputationMethodSameNameAlreadyExists; import edacc.model.NoComputationMethodBinarySpecifiedException; import edacc.model.NoConnectionToDBException; import edacc.model.Property; import edacc.model.PropertyDAO; import edacc.model.PropertyIsUsedException; import edacc.model.PropertyType; import edacc.model.PropertyTypeDoesNotExistException; import edacc.properties.PropertySource; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Modifier; import java.net.URL; import java.net.URLClassLoader; import java.sql.SQLException; import java.util.ArrayList; import java.util.Hashtable; import java.util.LinkedList; import java.util.List; import java.util.Vector; import java.util.jar.JarEntry; import java.util.jar.JarFile; import java.util.jar.JarInputStream; /** * * @author dgall */ public class DefaultPropertiesManager { private static DefaultPropertiesManager instance; public static DefaultPropertiesManager getInstance() { if (instance == null) { instance = new DefaultPropertiesManager(); } return instance; } public void addDefaultToDB() throws NoConnectionToDBException, SQLException, IOException, ComputationMethodAlreadyExistsException, NoComputationMethodBinarySpecifiedException, FileNotFoundException, ComputationMethodSameNameAlreadyExists, ComputationMethodSameMD5AlreadyExists, PropertyIsUsedException, PropertyTypeDoesNotExistException { addSATPC(); } private void addSATPC() throws FileNotFoundException, IOException, NoConnectionToDBException, SQLException, ComputationMethodAlreadyExistsException, NoComputationMethodBinarySpecifiedException, ComputationMethodSameNameAlreadyExists, ComputationMethodSameMD5AlreadyExists, PropertyIsUsedException, PropertyTypeDoesNotExistException { ComputationMethod satpc = new ComputationMethod(); satpc.setName("satpc"); satpc.setDescription("A generic program that can calculate a lot of instance properties."); satpc.setBinaryName("SATPC.jar"); org.jdesktop.application.ResourceMap resourceMap = org.jdesktop.application.Application.getInstance(EDACCApp.class).getContext().getResourceMap(); File file = new File(edacc.experiment.Util.getPath()); file = new File(file, "tmp"); if (file.exists() && !file.isDirectory()) { if (!file.delete()) { EDACCApp.getLogger().logError("Could not remove file: " + file.getPath()); return; } } if (!file.exists() && !file.mkdir()) { EDACCApp.getLogger().logError("Could not create directory: " + file.getPath()); return; } file = new File(file, satpc.getBinaryName()); byte[] buffer = new byte[2048]; int read = 0; InputStream is = resourceMap.getClassLoader().getResourceAsStream("edacc/resources/SATPC.jar"); FileOutputStream os = new FileOutputStream(file); while ((read = is.read(buffer)) > 0) { os.write(buffer, 0, read); } satpc.setBinary(file); ComputationMethodDAO.save(satpc); addSATPCProperties(file, satpc); } private void addSATPCProperties(File f, ComputationMethod satpc) throws IOException, NoConnectionToDBException, SQLException, PropertyIsUsedException, PropertyTypeDoesNotExistException { URL url = f.toURI().toURL(); URL[] urls = new URL[]{url}; List<Class<SATInstanceProperty>> instancePropertyClasses = extractClassesFromJAR(f, new URLClassLoader(urls, SATInstanceProperty.class.getClassLoader())); List<Property> properties = createSATPCPropertyObjects(instancePropertyClasses, satpc); System.out.println("Hi!"); for (Property p : properties) { System.out.println("prop: " + p.getName()); PropertyDAO.save(p); } } /** * This method creates the PropertyValueType objects of all found ProeprtyValueTypes and adds them * to the list. */ private List<Property> createSATPCPropertyObjects( List<Class<SATInstanceProperty>> propertyClasses, ComputationMethod satpc) { List<Property> properties = new LinkedList<Property>(); for (Class<SATInstanceProperty> propertyClass : propertyClasses) { try { SATInstanceProperty pInst = propertyClass.newInstance(); Property p = new Property(); p.setName(propertyClass.getSimpleName()); p.setDescription(pInst.getName()); p.setComputationMethod(satpc); p.setComputationMethodParameters(propertyClass.getSimpleName()); p.setPropertyValueTypeName(pInst.getPropertyValueType().getName()); p.setType(PropertyType.InstanceProperty); p.setSource(PropertySource.Instance); p.setIsDefault(true); properties.add(p); } catch (InstantiationException e) { /*System.err.println("Can't instantiate Property: " + propertyClass.getName()); e.printStackTrace();*/ } catch (IllegalAccessException e) { System.err.println("IllegalAccess for PropertyValueType: " + propertyClass.getName()); e.printStackTrace(); } } System.out.println("PROPS: " + properties.size()); return properties; } /** * Extracts all pluggable PropertyValueType classes from a list of files. * * @param files * @param cl * @return List * @throws IOException */ /*@SuppressWarnings("unchecked") private List<Class<SATInstanceProperty>> getClassesFromFiles(List<File> files, ClassLoader cl) throws IOException { List<Class<SATInstanceProperty>> classes = new ArrayList<Class<SATInstanceProperty>>(); for (File f : files) { if (f.getName().toLowerCase().endsWith(".class")) { try { //System.out.println("Absolute Path: " + f.getAbsolutePath()); Class<?> cls = cl.loadClass("edacc.satinstances." + f.getName().substring(0, f.getName().length() - 6).replace('/', '.')); if (isPluggableClass(cls)) { classes.add((Class<SATInstanceProperty>) cls); } } catch (ClassNotFoundException e) { System.err.println("Can't load Class " + f.getName()); e.printStackTrace(); } } } return classes; }*/ /** * Checks if a given class is pluggable, which means: has the type PropertyValueType. * * @param cls * @return true if class is pluggable */ private boolean isPluggableClass(Class<?> cls) { System.out.println(Modifier.isAbstract(cls.getModifiers())); for (Class<?> i : cls.getInterfaces()) {; if (i.equals(SATInstanceProperty.class)) { return true; } } if (cls.getSuperclass() == null) return false; return isPluggableClass(cls.getSuperclass()) && !Modifier.isAbstract(cls.getModifiers()); } /** * Creates the file of the given JarEntry out from the given JarFile. * @param jf JarFile which contains the JarEntry * @param ent JarEntry from which the file is requested * @param root File which is the JarFile * @return File of the given JarEntry * @throws IOException * @author rretz */ private File getFileOfJarEntry(JarFile jf, JarEntry ent, File root) throws IOException { File input = new File(ent.getName()); BufferedInputStream bis = new BufferedInputStream(jf.getInputStream(ent)); File dir = new File(root.getParent()); dir.mkdirs(); BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(input)); for (int c; (c = bis.read()) != -1;) { bos.write((byte) c); } bos.close(); return input; } @SuppressWarnings("unchecked") private List<Class<SATInstanceProperty>> extractClassesFromJAR(File jar, ClassLoader cl) throws IOException { List<Class<SATInstanceProperty>> classes = new ArrayList<Class<SATInstanceProperty>>(); JarInputStream jaris = new JarInputStream(new FileInputStream(jar)); JarEntry ent = null; while ((ent = jaris.getNextJarEntry()) != null) { if (ent.getName().toLowerCase().endsWith(".class")) { try { Class<?> cls = cl.loadClass(ent.getName().substring(0, ent.getName().length() - 6).replace('/', '.')); if (isPluggableClass(cls)) { classes.add((Class<SATInstanceProperty>) cls); } } catch (ClassNotFoundException e) { System.err.println("Can't load Class " + ent.getName()); e.printStackTrace(); } } } jaris.close(); System.out.println(classes.size()); return classes; } }
/* * Copyright 2008 Android4ME * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.apkplugin.android.axmlprinter; import java.io.File; import java.io.InputStream; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import org.xmlpull.v1.XmlPullParser; import android.util.TypedValue; /** * This is example usage of AXMLParser class. * * Prints xml document from Android's binary xml file. */ public class AXMLPrinter { private static final String DEFAULT_XML = "AndroidManifest.xml"; /* * public static void main(String[] arguments) { if (arguments.length<1) { * System.out.println("Usage: AXMLPrinter <APK FILE PATH>"); return; } // * String apkPath = "E:/apk_file/201008300227127991.apk"; String apkPath = * arguments[0]; System.out.println(getManifestXMLFromAPK(apkPath)); } */ public static String getManifestXMLFromAPK(InputStream inputstream) { StringBuilder xmlSb = new StringBuilder(100); try { AXmlResourceParser parser = new AXmlResourceParser(); parser.open(inputstream); StringBuilder sb = new StringBuilder(10); final String indentStep = " "; int type; while ((type = parser.next()) != XmlPullParser.END_DOCUMENT) { switch (type) { case XmlPullParser.START_DOCUMENT: { log(xmlSb, "<?xml version=\"1.0\" encoding=\"utf-8\"?>"); break; } case XmlPullParser.START_TAG: { log(false, xmlSb, "%s<%s%s", sb, getNamespacePrefix(parser.getPrefix()), parser.getName()); sb.append(indentStep); int namespaceCountBefore = parser.getNamespaceCount(parser .getDepth() - 1); int namespaceCount = parser.getNamespaceCount(parser .getDepth()); for (int i = namespaceCountBefore; i != namespaceCount; ++i) { log(xmlSb, "%sxmlns:%s=\"%s\"", i == namespaceCountBefore ? " " : sb, parser.getNamespacePrefix(i), parser.getNamespaceUri(i)); } for (int i = 0, size = parser.getAttributeCount(); i != size; ++i) { log(false, xmlSb, "%s%s%s=\"%s\"", " ", getNamespacePrefix(parser.getAttributePrefix(i)), parser.getAttributeName(i), getAttributeValue(parser, i)); } // log("%s>",sb); log(xmlSb, ">"); break; } case XmlPullParser.END_TAG: { sb.setLength(sb.length() - indentStep.length()); log(xmlSb, "%s</%s%s>", sb, getNamespacePrefix(parser.getPrefix()), parser.getName()); break; } case XmlPullParser.TEXT: { log(xmlSb, "%s%s", sb, parser.getText()); break; } } } parser.close(); } catch (Exception e) { e.printStackTrace(); } return xmlSb.toString(); } public static String getManifestXMLFromAPK(String apkPath) { ZipFile file = null; StringBuilder xmlSb = new StringBuilder(100); try { File apkFile = new File(apkPath); file = new ZipFile(apkFile, ZipFile.OPEN_READ); ZipEntry entry = file.getEntry(DEFAULT_XML); AXmlResourceParser parser = new AXmlResourceParser(); parser.open(file.getInputStream(entry)); StringBuilder sb = new StringBuilder(10); final String indentStep = " "; int type; while ((type = parser.next()) != XmlPullParser.END_DOCUMENT) { switch (type) { case XmlPullParser.START_DOCUMENT: { log(xmlSb, "<?xml version=\"1.0\" encoding=\"utf-8\"?>"); break; } case XmlPullParser.START_TAG: { log(false, xmlSb, "%s<%s%s", sb, getNamespacePrefix(parser.getPrefix()), parser.getName()); sb.append(indentStep); int namespaceCountBefore = parser.getNamespaceCount(parser .getDepth() - 1); int namespaceCount = parser.getNamespaceCount(parser .getDepth()); for (int i = namespaceCountBefore; i != namespaceCount; ++i) { log(xmlSb, "%sxmlns:%s=\"%s\"", i == namespaceCountBefore ? " " : sb, parser.getNamespacePrefix(i), parser.getNamespaceUri(i)); } for (int i = 0, size = parser.getAttributeCount(); i != size; ++i) { log(false, xmlSb, "%s%s%s=\"%s\"", " ", getNamespacePrefix(parser.getAttributePrefix(i)), parser.getAttributeName(i), getAttributeValue(parser, i)); } // log("%s>",sb); log(xmlSb, ">"); break; } case XmlPullParser.END_TAG: { sb.setLength(sb.length() - indentStep.length()); log(xmlSb, "%s</%s%s>", sb, getNamespacePrefix(parser.getPrefix()), parser.getName()); break; } case XmlPullParser.TEXT: { log(xmlSb, "%s%s", sb, parser.getText()); break; } } } parser.close(); } catch (Exception e) { e.printStackTrace(); } return xmlSb.toString(); } private static String getNamespacePrefix(String prefix) { if (prefix == null || prefix.length() == 0) { return ""; } return prefix + ":"; } private static String getAttributeValue(AXmlResourceParser parser, int index) { int type = parser.getAttributeValueType(index); int data = parser.getAttributeValueData(index); if (type == TypedValue.TYPE_STRING) { return parser.getAttributeValue(index); } if (type == TypedValue.TYPE_ATTRIBUTE) { return String.format("?%s%08X", getPackage(data), data); } if (type == TypedValue.TYPE_REFERENCE) { return String.format("@%s%08X", getPackage(data), data); } if (type == TypedValue.TYPE_FLOAT) { return String.valueOf(Float.intBitsToFloat(data)); } if (type == TypedValue.TYPE_INT_HEX) { return String.format("0x%08X", data); } if (type == TypedValue.TYPE_INT_BOOLEAN) { return data != 0 ? "true" : "false"; } if (type == TypedValue.TYPE_DIMENSION) { return Float.toString(complexToFloat(data)) + DIMENSION_UNITS[data & TypedValue.COMPLEX_UNIT_MASK]; } if (type == TypedValue.TYPE_FRACTION) { return Float.toString(complexToFloat(data)) + FRACTION_UNITS[data & TypedValue.COMPLEX_UNIT_MASK]; } if (type >= TypedValue.TYPE_FIRST_COLOR_INT && type <= TypedValue.TYPE_LAST_COLOR_INT) { return String.format("#%08X", data); } if (type >= TypedValue.TYPE_FIRST_INT && type <= TypedValue.TYPE_LAST_INT) { return String.valueOf(data); } return String.format("<0x%X, type 0x%02X>", data, type); } private static String getPackage(int id) { if (id >>> 24 == 1) { return "android:"; } return ""; } private static void log(StringBuilder xmlSb, String format, Object... arguments) { log(true, xmlSb, format, arguments); } private static void log(boolean newLine, StringBuilder xmlSb, String format, Object... arguments) { // System.out.printf(format,arguments); // if(newLine) System.out.println(); xmlSb.append(String.format(format, arguments)); if (newLine) xmlSb.append("\n"); } // ///////////////////////////////// ILLEGAL STUFF, DONT LOOK :) public static float complexToFloat(int complex) { return (float) (complex & 0xFFFFFF00) * RADIX_MULTS[(complex >> 4) & 3]; } private static final float RADIX_MULTS[] = { 0.00390625F, 3.051758E-005F, 1.192093E-007F, 4.656613E-010F }; private static final String DIMENSION_UNITS[] = { "px", "dip", "sp", "pt", "in", "mm", "", "" }; private static final String FRACTION_UNITS[] = { "%", "%p", "", "", "", "", "", "" }; }
package com.braintreegateway.testhelpers; import com.braintreegateway.*; import com.braintreegateway.Transaction.Status; import com.braintreegateway.exceptions.UnexpectedException; import com.braintreegateway.util.Sha1Hasher; import com.braintreegateway.util.Http; import com.braintreegateway.util.NodeWrapper; import com.braintreegateway.util.QueryString; import com.braintreegateway.EuropeBankAccount.MandateType; import com.braintreegateway.org.apache.commons.codec.binary.Base64; import org.junit.Ignore; import java.io.IOException; import java.net.HttpURLConnection; import java.net.URL; import java.net.URLEncoder; import java.net.URLDecoder; import java.nio.charset.Charset; import java.util.Calendar; import java.util.Comparator; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.*; import java.io.UnsupportedEncodingException; import static org.junit.Assert.*; @Ignore("Testing utility class") public abstract class TestHelper { public static final class CompareModificationsById implements Comparator<Modification> { public int compare(Modification left, Modification right) { return left.getId().compareTo(right.getId()); } } public static void assertDatesEqual(Calendar first, Calendar second) { if (first == null && second != null) { throw new AssertionError("dates are not equal. first is null, second is not"); } else if (first != null && second == null) { throw new AssertionError("dates are not equal. second is null, first is not"); } boolean yearsNotEqual = first.get(Calendar.YEAR) != second.get(Calendar.YEAR); boolean monthsNotEqual = first.get(Calendar.MONTH) != second.get(Calendar.MONTH); boolean daysNotEqual = first.get(Calendar.DAY_OF_MONTH) != second.get(Calendar.DAY_OF_MONTH); if (yearsNotEqual || monthsNotEqual || daysNotEqual) { StringBuffer buffer = new StringBuffer("dates are not equal. "); if (yearsNotEqual) { buffer.append("years (" + first.get(Calendar.YEAR) + ", " + second.get(Calendar.YEAR) + ") not equal."); } if (monthsNotEqual) { buffer.append("months (" + first.get(Calendar.MONTH) + ", " + second.get(Calendar.MONTH) + ") not equal."); } if (daysNotEqual) { buffer.append("days (" + first.get(Calendar.DAY_OF_MONTH) + ", " + second.get(Calendar.DAY_OF_MONTH) + ") not equal."); } throw new AssertionError(buffer.toString()); } } public static void assertIncludes(String expected, String all) { assertTrue("Expected:\n" + all + "\nto include:\n" + expected, all.indexOf(expected) >= 0); } public static void assertValidTrData(Configuration configuration, String trData) { String[] dataSections = trData.split("\\|"); String trHash = dataSections[0]; String trContent = dataSections[1]; assertEquals(trHash, new Sha1Hasher().hmacHash(configuration.getPrivateKey(), trContent)); } public static boolean listIncludes(List<? extends Object> list, Object expectedItem) { for (Object item : list) { if (item.equals(expectedItem)) { return true; } } return false; } public static boolean includesSubscription(ResourceCollection<Subscription> collection, Subscription item) { for (Subscription subscription : collection) { if (subscription.getId().equals(item.getId())) { return true; } } return false; } public static boolean includesStatus(ResourceCollection<Transaction> collection, Status status) { for (Transaction transaction : collection) { if (transaction.getStatus().equals(status)) { return true; } } return false; } public static Result<Transaction> settle(BraintreeGateway gateway, String transactionId) { return gateway.testing().settle(transactionId); } public static Result<Transaction> settlement_confirm(BraintreeGateway gateway, String transactionId) { return gateway.testing().settlementConfirm(transactionId); } public static Result<Transaction> settlement_decline(BraintreeGateway gateway, String transactionId) { return gateway.testing().settlementDecline(transactionId); } public static void escrow(BraintreeGateway gateway, String transactionId) { NodeWrapper response = new Http(gateway.getConfiguration()).put(gateway.getConfiguration().getMerchantPath() + "/transactions/" + transactionId + "/escrow"); assertTrue(response.isSuccess()); } public static String createTest3DS(BraintreeGateway gateway, String merchantAccountId, ThreeDSecureRequestForTests request) { String url = gateway.getConfiguration().getMerchantPath() + "/three_d_secure/create_verification/" + merchantAccountId; NodeWrapper response = new Http(gateway.getConfiguration()).post(url, request); assertTrue(response.isSuccess()); String token = response.findString("three-d-secure-token"); assertNotNull(token); return token; } public static String simulateFormPostForTR(BraintreeGateway gateway, Request trParams, Request request, String postUrl) { String response = ""; try { String trData = gateway.transparentRedirect().trData(trParams, "http://example.com"); StringBuilder postData = new StringBuilder("tr_data=") .append(URLEncoder.encode(trData, "UTF-8")) .append("&") .append(request.toQueryString()); URL url = new URL(postUrl); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setInstanceFollowRedirects(false); connection.setDoOutput(true); connection.setRequestMethod("POST"); connection.addRequestProperty("Accept", "application/xml"); connection.addRequestProperty("Content-Type", "application/x-www-form-urlencoded"); connection.getOutputStream().write(postData.toString().getBytes("UTF-8")); connection.getOutputStream().close(); if (connection.getResponseCode() == 422) { connection.getErrorStream(); } else { connection.getInputStream(); } response = new URL(connection.getHeaderField("Location")).getQuery(); } catch (IOException e) { throw new UnexpectedException(e.getMessage()); } return response; } public static String generateUnlockedNonce(BraintreeGateway gateway, String customerId, String creditCardNumber) { ClientTokenRequest request = new ClientTokenRequest(); if (customerId != null) { request = request.customerId(customerId); } String encodedClientToken = gateway.clientToken().generate(request); String clientToken = TestHelper.decodeClientToken(encodedClientToken); String authorizationFingerprint = extractParamFromJson("authorizationFingerprint", clientToken); Configuration configuration = gateway.getConfiguration(); String url = configuration.getBaseURL() + configuration.getMerchantPath() + "/client_api/nonces.json"; QueryString payload = new QueryString(); payload.append("authorization_fingerprint", authorizationFingerprint). append("shared_customer_identifier_type", "testing"). append("shared_customer_identifier", "test-identifier"). append("credit_card[number]", creditCardNumber). append("credit_card[expiration_month]", "11"). append("share", "true"). append("credit_card[expiration_year]", "2099"); String responseBody; String nonce = ""; try { responseBody = HttpHelper.post(url, payload.toString()); nonce = extractParamFromJson("nonce", responseBody); } catch (Exception e) { throw new RuntimeException(e); } return nonce; } public static String decodeClientToken(String rawClientToken) { String decodedClientToken = new String(Base64.decodeBase64(rawClientToken), Charset.forName("UTF-8")); return decodedClientToken.replace("\\u0026", "&"); } public static String generateOneTimePayPalNonce(BraintreeGateway gateway) { String encodedClientToken = gateway.clientToken().generate(); String clientToken = TestHelper.decodeClientToken(encodedClientToken); String authorizationFingerprint = extractParamFromJson("authorizationFingerprint", clientToken); Configuration configuration = gateway.getConfiguration(); String url = configuration.getBaseURL() + configuration.getMerchantPath() + "/client_api/v1/payment_methods/paypal_accounts"; QueryString payload = new QueryString(); payload.append("authorization_fingerprint", authorizationFingerprint). append("shared_customer_identifier_type", "testing"). append("shared_customer_identifier", "test-identifier"). append("paypal_account[access_token]", "access"). append("paypal_account[options][validate]", "false"); String responseBody; String nonce = ""; try { responseBody = HttpHelper.post(url, payload.toString()); nonce = extractParamFromJson("nonce", responseBody); } catch (Exception e) { throw new RuntimeException(e); } return nonce; } public static String generateNonceForCreditCard(BraintreeGateway gateway, CreditCardRequest creditCardRequest, String customerId, boolean validate) { ClientTokenRequest clientTokenRequest = new ClientTokenRequest(). customerId(customerId); String encodedClientToken = gateway.clientToken().generate(clientTokenRequest); String clientToken = TestHelper.decodeClientToken(encodedClientToken); String authorizationFingerprint = extractParamFromJson("authorizationFingerprint", clientToken); Configuration configuration = gateway.getConfiguration(); String url = configuration.getBaseURL() + configuration.getMerchantPath() + "/client_api/v1/payment_methods/credit_cards"; QueryString payload = new QueryString(); payload.append("authorization_fingerprint", authorizationFingerprint). append("shared_customer_identifier_type", "testing"). append("shared_customer_identifier", "fake_identifier"). append("credit_card[options][validate]", new Boolean(validate).toString()); String responseBody; String nonce = ""; try { String payloadString = payload.toString(); payloadString += "&" + creditCardRequest.toQueryString(); responseBody = HttpHelper.post(url, payloadString); nonce = extractParamFromJson("nonce", responseBody); } catch (Exception e) { throw new RuntimeException(e); } return nonce; } public static String generateEuropeBankAccountNonce(BraintreeGateway gateway, Customer customer) { SEPAClientTokenRequest request = new SEPAClientTokenRequest(); request.customerId(customer.getId()); request.mandateType(EuropeBankAccount.MandateType.BUSINESS); request.mandateAcceptanceLocation("Rostock, Germany"); String encodedClientToken = gateway.clientToken().generate(request); String clientToken = TestHelper.decodeClientToken(encodedClientToken); String authorizationFingerprint = extractParamFromJson("authorizationFingerprint", clientToken); Configuration configuration = gateway.getConfiguration(); String url = configuration.getBaseURL() + configuration.getMerchantPath() + "/client_api/v1/sepa_mandates"; QueryString payload = new QueryString(); payload.append("authorization_fingerprint", authorizationFingerprint) .append("sepa_mandate[locale]", "de-DE") .append("sepa_mandate[bic]", "DEUTDEFF") .append("sepa_mandate[iban]", "DE89370400440532013000") .append("sepa_mandate[accountHolderName]", "Bob Holder") .append("sepa_mandate[billingAddress][streetAddress]", "123 Currywurst Way") .append("sepa_mandate[billingAddress][extendedAddress]", "Lager Suite") .append("sepa_mandate[billingAddress][firstName]", "Wilhelm") .append("sepa_mandate[billingAddress][lastName]", "Dix") .append("sepa_mandate[billingAddress][locality]", "Frankfurt") .append("sepa_mandate[billingAddress][postalCode]", "60001") .append("sepa_mandate[billingAddress][countryCodeAlpha2]", "DE") .append("sepa_mandate[billingAddress][region]", "Hesse"); String responseBody; String nonce = ""; try { responseBody = HttpHelper.post(url, payload.toString()); nonce = extractParamFromJson("nonce", responseBody); } catch (Exception e) { throw new RuntimeException(e); } return nonce; } public static String getNonceForPayPalAccount(BraintreeGateway gateway, String consentCode) { return getNonceForPayPalAccount(gateway, consentCode, null); } public static String getNonceForPayPalAccount(BraintreeGateway gateway, String consentCode, String token) { String encodedClientToken = gateway.clientToken().generate(); String clientToken = TestHelper.decodeClientToken(encodedClientToken); String authorizationFingerprint = extractParamFromJson("authorizationFingerprint", clientToken); Configuration configuration = gateway.getConfiguration(); String url = configuration.getBaseURL() + configuration.getMerchantPath() + "/client_api/v1/payment_methods/paypal_accounts"; QueryString payload = new QueryString(); payload.append("authorization_fingerprint", authorizationFingerprint). append("shared_customer_identifier_type", "testing"). append("shared_customer_identifier", "test-identifier"). append("paypal_account[consent_code]", consentCode). append("paypal_account[token]", token). append("paypal_account[options][validate]", "false"); String responseBody; String nonce = ""; try { responseBody = HttpHelper.post(url, payload.toString()); nonce = extractParamFromJson("nonce", responseBody); } catch (Exception e) { throw new RuntimeException(e); } return nonce; } public static String generateFuturePaymentPayPalNonce(BraintreeGateway gateway) { QueryString payload = new QueryString(); payload.append("paypal_account[consent_code]", "consent"); return generatePayPalNonce(gateway, payload); } public static String generateBillingAgreementPayPalNonce(BraintreeGateway gateway) { QueryString payload = new QueryString(); payload.append("paypal_account[billing_agreement_token]", "fake_ba_token"); return generatePayPalNonce(gateway, payload); } private static String generatePayPalNonce(BraintreeGateway gateway, QueryString payload) { String encodedClientToken = gateway.clientToken().generate(); String clientToken = TestHelper.decodeClientToken(encodedClientToken); String authorizationFingerprint = extractParamFromJson("authorizationFingerprint", clientToken); Configuration configuration = gateway.getConfiguration(); String url = configuration.getBaseURL() + configuration.getMerchantPath() + "/client_api/v1/payment_methods/paypal_accounts"; payload.append("authorization_fingerprint", authorizationFingerprint). append("shared_customer_identifier_type", "testing"). append("shared_customer_identifier", "test-identifier"). append("paypal_account[options][validate]", "false"); String responseBody; String nonce = ""; try { responseBody = HttpHelper.post(url, payload.toString()); nonce = extractParamFromJson("nonce", responseBody); } catch (Exception e) { throw new RuntimeException(e); } return nonce; } public static String generateUnlockedNonce(BraintreeGateway gateway) { return generateUnlockedNonce(gateway, null, "4111111111111111"); } public static String extractParamFromJson(String keyName, String json) { String regex = "\"" + keyName + "\":\\s*\"([^\"]+)\""; Pattern keyPattern = Pattern.compile(regex); Matcher m = keyPattern.matcher(json); String value = ""; if (m.find()) { value = m.group(1); } return value; } public static int extractIntParamFromJson(String keyName, String json) { String regex = "\"" + keyName + "\":\\s*(\\d+)"; Pattern keyPattern = Pattern.compile(regex); Matcher m = keyPattern.matcher(json); int value = 0; if (m.find()) { value = Integer.parseInt(m.group(1)); } return value; } public static final class OAuthGrantRequest extends Request { private String scope; private String merchantId; public OAuthGrantRequest scope(String scope) { this.scope = scope; return this; } public OAuthGrantRequest merchantId(String merchantId) { this.merchantId = merchantId; return this; } @Override public String toXML() { return new RequestBuilder("grant"). addElement("scope", scope). addElement("merchant_public_id", merchantId). toXML(); } } public static String createOAuthGrant(BraintreeGateway gateway, String merchantId, String scope) { Http http = new Http(gateway.getConfiguration()); OAuthGrantRequest request = new OAuthGrantRequest(). scope(scope). merchantId(merchantId); NodeWrapper node = http.post("/oauth_testing/grants", request); return node.findString("code"); } /* http://stackoverflow.com/questions/13592236/parse-the-uri-string-into-name-value-collection-in-java */ public static Map<String, String> splitQuery(URL url) throws UnsupportedEncodingException { Map<String, String> queryPairs = new LinkedHashMap<String, String>(); String query = url.getQuery(); String[] pairs = query.split("&"); for (String pair : pairs) { int idx = pair.indexOf("="); String key = URLDecoder.decode(pair.substring(0, idx), "UTF-8"); if (queryPairs.get(key) == null) { queryPairs.put(key, URLDecoder.decode(pair.substring(idx + 1), "UTF-8")); } else { queryPairs.put(key, queryPairs.get(key) + ", " + URLDecoder.decode(pair.substring(idx + 1), "UTF-8")); } } return queryPairs; } }
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.buffer; import io.netty.buffer.CompositeByteBuf.ByteWrapper; import io.netty.util.internal.PlatformDependent; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.CharBuffer; import java.nio.charset.Charset; import java.util.Arrays; /** * Creates a new {@link ByteBuf} by allocating new space or by wrapping * or copying existing byte arrays, byte buffers and a string. * * <h3>Use static import</h3> * This classes is intended to be used with Java 5 static import statement: * * <pre> * import static io.netty.buffer.{@link Unpooled}.*; * * {@link ByteBuf} heapBuffer = buffer(128); * {@link ByteBuf} directBuffer = directBuffer(256); * {@link ByteBuf} wrappedBuffer = wrappedBuffer(new byte[128], new byte[256]); * {@link ByteBuf} copiedBuffer = copiedBuffer({@link ByteBuffer}.allocate(128)); * </pre> * * <h3>Allocating a new buffer</h3> * * Three buffer types are provided out of the box. * * <ul> * <li>{@link #buffer(int)} allocates a new fixed-capacity heap buffer.</li> * <li>{@link #directBuffer(int)} allocates a new fixed-capacity direct buffer.</li> * </ul> * * <h3>Creating a wrapped buffer</h3> * * Wrapped buffer is a buffer which is a view of one or more existing * byte arrays and byte buffers. Any changes in the content of the original * array or buffer will be visible in the wrapped buffer. Various wrapper * methods are provided and their name is all {@code wrappedBuffer()}. * You might want to take a look at the methods that accept varargs closely if * you want to create a buffer which is composed of more than one array to * reduce the number of memory copy. * * <h3>Creating a copied buffer</h3> * * Copied buffer is a deep copy of one or more existing byte arrays, byte * buffers or a string. Unlike a wrapped buffer, there's no shared data * between the original data and the copied buffer. Various copy methods are * provided and their name is all {@code copiedBuffer()}. It is also convenient * to use this operation to merge multiple buffers into one buffer. */ public final class Unpooled { private static final ByteBufAllocator ALLOC = UnpooledByteBufAllocator.DEFAULT; /** * Big endian byte order. */ public static final ByteOrder BIG_ENDIAN = ByteOrder.BIG_ENDIAN; /** * Little endian byte order. */ public static final ByteOrder LITTLE_ENDIAN = ByteOrder.LITTLE_ENDIAN; /** * A buffer whose capacity is {@code 0}. */ public static final ByteBuf EMPTY_BUFFER = ALLOC.buffer(0, 0); static { assert EMPTY_BUFFER instanceof EmptyByteBuf: "EMPTY_BUFFER must be an EmptyByteBuf."; } /** * Creates a new big-endian Java heap buffer with reasonably small initial capacity, which * expands its capacity boundlessly on demand. */ public static ByteBuf buffer() { return ALLOC.heapBuffer(); } /** * Creates a new big-endian direct buffer with reasonably small initial capacity, which * expands its capacity boundlessly on demand. */ public static ByteBuf directBuffer() { return ALLOC.directBuffer(); } /** * Creates a new big-endian Java heap buffer with the specified {@code capacity}, which * expands its capacity boundlessly on demand. The new buffer's {@code readerIndex} and * {@code writerIndex} are {@code 0}. */ public static ByteBuf buffer(int initialCapacity) { return ALLOC.heapBuffer(initialCapacity); } /** * Creates a new big-endian direct buffer with the specified {@code capacity}, which * expands its capacity boundlessly on demand. The new buffer's {@code readerIndex} and * {@code writerIndex} are {@code 0}. */ public static ByteBuf directBuffer(int initialCapacity) { return ALLOC.directBuffer(initialCapacity); } /** * Creates a new big-endian Java heap buffer with the specified * {@code initialCapacity}, that may grow up to {@code maxCapacity} * The new buffer's {@code readerIndex} and {@code writerIndex} are * {@code 0}. */ public static ByteBuf buffer(int initialCapacity, int maxCapacity) { return ALLOC.heapBuffer(initialCapacity, maxCapacity); } /** * Creates a new big-endian direct buffer with the specified * {@code initialCapacity}, that may grow up to {@code maxCapacity}. * The new buffer's {@code readerIndex} and {@code writerIndex} are * {@code 0}. */ public static ByteBuf directBuffer(int initialCapacity, int maxCapacity) { return ALLOC.directBuffer(initialCapacity, maxCapacity); } /** * Creates a new big-endian buffer which wraps the specified {@code array}. * A modification on the specified array's content will be visible to the * returned buffer. */ public static ByteBuf wrappedBuffer(byte[] array) { if (array.length == 0) { return EMPTY_BUFFER; } return new UnpooledHeapByteBuf(ALLOC, array, array.length); } /** * Creates a new big-endian buffer which wraps the sub-region of the * specified {@code array}. A modification on the specified array's * content will be visible to the returned buffer. */ public static ByteBuf wrappedBuffer(byte[] array, int offset, int length) { if (length == 0) { return EMPTY_BUFFER; } if (offset == 0 && length == array.length) { return wrappedBuffer(array); } return wrappedBuffer(array).slice(offset, length); } /** * Creates a new buffer which wraps the specified NIO buffer's current * slice. A modification on the specified buffer's content will be * visible to the returned buffer. */ public static ByteBuf wrappedBuffer(ByteBuffer buffer) { if (!buffer.hasRemaining()) { return EMPTY_BUFFER; } if (!buffer.isDirect() && buffer.hasArray()) { return wrappedBuffer( buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining()).order(buffer.order()); } else if (PlatformDependent.hasUnsafe()) { if (buffer.isReadOnly()) { if (buffer.isDirect()) { return new ReadOnlyUnsafeDirectByteBuf(ALLOC, buffer); } else { return new ReadOnlyByteBufferBuf(ALLOC, buffer); } } else { return new UnpooledUnsafeDirectByteBuf(ALLOC, buffer, buffer.remaining()); } } else { if (buffer.isReadOnly()) { return new ReadOnlyByteBufferBuf(ALLOC, buffer); } else { return new UnpooledDirectByteBuf(ALLOC, buffer, buffer.remaining()); } } } /** * Creates a new buffer which wraps the specified memory address. If {@code doFree} is true the * memoryAddress will automatically be freed once the reference count of the {@link ByteBuf} reaches {@code 0}. */ public static ByteBuf wrappedBuffer(long memoryAddress, int size, boolean doFree) { return new WrappedUnpooledUnsafeDirectByteBuf(ALLOC, memoryAddress, size, doFree); } /** * Creates a new buffer which wraps the specified buffer's readable bytes. * A modification on the specified buffer's content will be visible to the * returned buffer. * @param buffer The buffer to wrap. Reference count ownership of this variable is transferred to this method. * @return The readable portion of the {@code buffer}, or an empty buffer if there is no readable portion. * The caller is responsible for releasing this buffer. */ public static ByteBuf wrappedBuffer(ByteBuf buffer) { if (buffer.isReadable()) { return buffer.slice(); } else { buffer.release(); return EMPTY_BUFFER; } } /** * Creates a new big-endian composite buffer which wraps the specified * arrays without copying them. A modification on the specified arrays' * content will be visible to the returned buffer. */ public static ByteBuf wrappedBuffer(byte[]... arrays) { return wrappedBuffer(arrays.length, arrays); } /** * Creates a new big-endian composite buffer which wraps the readable bytes of the * specified buffers without copying them. A modification on the content * of the specified buffers will be visible to the returned buffer. * @param buffers The buffers to wrap. Reference count ownership of all variables is transferred to this method. * @return The readable portion of the {@code buffers}. The caller is responsible for releasing this buffer. */ public static ByteBuf wrappedBuffer(ByteBuf... buffers) { return wrappedBuffer(buffers.length, buffers); } /** * Creates a new big-endian composite buffer which wraps the slices of the specified * NIO buffers without copying them. A modification on the content of the * specified buffers will be visible to the returned buffer. */ public static ByteBuf wrappedBuffer(ByteBuffer... buffers) { return wrappedBuffer(buffers.length, buffers); } static <T> ByteBuf wrappedBuffer(int maxNumComponents, ByteWrapper<T> wrapper, T[] array) { switch (array.length) { case 0: break; case 1: if (!wrapper.isEmpty(array[0])) { return wrapper.wrap(array[0]); } break; default: for (int i = 0, len = array.length; i < len; i++) { T bytes = array[i]; if (bytes == null) { return EMPTY_BUFFER; } if (!wrapper.isEmpty(bytes)) { return new CompositeByteBuf(ALLOC, false, maxNumComponents, wrapper, array, i); } } } return EMPTY_BUFFER; } /** * Creates a new big-endian composite buffer which wraps the specified * arrays without copying them. A modification on the specified arrays' * content will be visible to the returned buffer. */ public static ByteBuf wrappedBuffer(int maxNumComponents, byte[]... arrays) { return wrappedBuffer(maxNumComponents, CompositeByteBuf.BYTE_ARRAY_WRAPPER, arrays); } /** * Creates a new big-endian composite buffer which wraps the readable bytes of the * specified buffers without copying them. A modification on the content * of the specified buffers will be visible to the returned buffer. * @param maxNumComponents Advisement as to how many independent buffers are allowed to exist before * consolidation occurs. * @param buffers The buffers to wrap. Reference count ownership of all variables is transferred to this method. * @return The readable portion of the {@code buffers}. The caller is responsible for releasing this buffer. */ public static ByteBuf wrappedBuffer(int maxNumComponents, ByteBuf... buffers) { switch (buffers.length) { case 0: break; case 1: ByteBuf buffer = buffers[0]; if (buffer.isReadable()) { return wrappedBuffer(buffer.order(BIG_ENDIAN)); } else { buffer.release(); } break; default: for (int i = 0; i < buffers.length; i++) { ByteBuf buf = buffers[i]; if (buf.isReadable()) { return new CompositeByteBuf(ALLOC, false, maxNumComponents, buffers, i); } buf.release(); } break; } return EMPTY_BUFFER; } /** * Creates a new big-endian composite buffer which wraps the slices of the specified * NIO buffers without copying them. A modification on the content of the * specified buffers will be visible to the returned buffer. */ public static ByteBuf wrappedBuffer(int maxNumComponents, ByteBuffer... buffers) { return wrappedBuffer(maxNumComponents, CompositeByteBuf.BYTE_BUFFER_WRAPPER, buffers); } /** * Returns a new big-endian composite buffer with no components. */ public static CompositeByteBuf compositeBuffer() { return compositeBuffer(AbstractByteBufAllocator.DEFAULT_MAX_COMPONENTS); } /** * Returns a new big-endian composite buffer with no components. */ public static CompositeByteBuf compositeBuffer(int maxNumComponents) { return new CompositeByteBuf(ALLOC, false, maxNumComponents); } /** * Creates a new big-endian buffer whose content is a copy of the * specified {@code array}. The new buffer's {@code readerIndex} and * {@code writerIndex} are {@code 0} and {@code array.length} respectively. */ public static ByteBuf copiedBuffer(byte[] array) { if (array.length == 0) { return EMPTY_BUFFER; } return wrappedBuffer(array.clone()); } /** * Creates a new big-endian buffer whose content is a copy of the * specified {@code array}'s sub-region. The new buffer's * {@code readerIndex} and {@code writerIndex} are {@code 0} and * the specified {@code length} respectively. */ public static ByteBuf copiedBuffer(byte[] array, int offset, int length) { if (length == 0) { return EMPTY_BUFFER; } byte[] copy = PlatformDependent.allocateUninitializedArray(length); System.arraycopy(array, offset, copy, 0, length); return wrappedBuffer(copy); } /** * Creates a new buffer whose content is a copy of the specified * {@code buffer}'s current slice. The new buffer's {@code readerIndex} * and {@code writerIndex} are {@code 0} and {@code buffer.remaining} * respectively. */ public static ByteBuf copiedBuffer(ByteBuffer buffer) { int length = buffer.remaining(); if (length == 0) { return EMPTY_BUFFER; } byte[] copy = PlatformDependent.allocateUninitializedArray(length); // Duplicate the buffer so we not adjust the position during our get operation. // See https://github.com/netty/netty/issues/3896 ByteBuffer duplicate = buffer.duplicate(); duplicate.get(copy); return wrappedBuffer(copy).order(duplicate.order()); } /** * Creates a new buffer whose content is a copy of the specified * {@code buffer}'s readable bytes. The new buffer's {@code readerIndex} * and {@code writerIndex} are {@code 0} and {@code buffer.readableBytes} * respectively. */ public static ByteBuf copiedBuffer(ByteBuf buffer) { int readable = buffer.readableBytes(); if (readable > 0) { ByteBuf copy = buffer(readable); copy.writeBytes(buffer, buffer.readerIndex(), readable); return copy; } else { return EMPTY_BUFFER; } } /** * Creates a new big-endian buffer whose content is a merged copy of * the specified {@code arrays}. The new buffer's {@code readerIndex} * and {@code writerIndex} are {@code 0} and the sum of all arrays' * {@code length} respectively. */ public static ByteBuf copiedBuffer(byte[]... arrays) { switch (arrays.length) { case 0: return EMPTY_BUFFER; case 1: if (arrays[0].length == 0) { return EMPTY_BUFFER; } else { return copiedBuffer(arrays[0]); } } // Merge the specified arrays into one array. int length = 0; for (byte[] a: arrays) { if (Integer.MAX_VALUE - length < a.length) { throw new IllegalArgumentException( "The total length of the specified arrays is too big."); } length += a.length; } if (length == 0) { return EMPTY_BUFFER; } byte[] mergedArray = PlatformDependent.allocateUninitializedArray(length); for (int i = 0, j = 0; i < arrays.length; i ++) { byte[] a = arrays[i]; System.arraycopy(a, 0, mergedArray, j, a.length); j += a.length; } return wrappedBuffer(mergedArray); } /** * Creates a new buffer whose content is a merged copy of the specified * {@code buffers}' readable bytes. The new buffer's {@code readerIndex} * and {@code writerIndex} are {@code 0} and the sum of all buffers' * {@code readableBytes} respectively. * * @throws IllegalArgumentException * if the specified buffers' endianness are different from each * other */ public static ByteBuf copiedBuffer(ByteBuf... buffers) { switch (buffers.length) { case 0: return EMPTY_BUFFER; case 1: return copiedBuffer(buffers[0]); } // Merge the specified buffers into one buffer. ByteOrder order = null; int length = 0; for (ByteBuf b: buffers) { int bLen = b.readableBytes(); if (bLen <= 0) { continue; } if (Integer.MAX_VALUE - length < bLen) { throw new IllegalArgumentException( "The total length of the specified buffers is too big."); } length += bLen; if (order != null) { if (!order.equals(b.order())) { throw new IllegalArgumentException("inconsistent byte order"); } } else { order = b.order(); } } if (length == 0) { return EMPTY_BUFFER; } byte[] mergedArray = PlatformDependent.allocateUninitializedArray(length); for (int i = 0, j = 0; i < buffers.length; i ++) { ByteBuf b = buffers[i]; int bLen = b.readableBytes(); b.getBytes(b.readerIndex(), mergedArray, j, bLen); j += bLen; } return wrappedBuffer(mergedArray).order(order); } /** * Creates a new buffer whose content is a merged copy of the specified * {@code buffers}' slices. The new buffer's {@code readerIndex} and * {@code writerIndex} are {@code 0} and the sum of all buffers' * {@code remaining} respectively. * * @throws IllegalArgumentException * if the specified buffers' endianness are different from each * other */ public static ByteBuf copiedBuffer(ByteBuffer... buffers) { switch (buffers.length) { case 0: return EMPTY_BUFFER; case 1: return copiedBuffer(buffers[0]); } // Merge the specified buffers into one buffer. ByteOrder order = null; int length = 0; for (ByteBuffer b: buffers) { int bLen = b.remaining(); if (bLen <= 0) { continue; } if (Integer.MAX_VALUE - length < bLen) { throw new IllegalArgumentException( "The total length of the specified buffers is too big."); } length += bLen; if (order != null) { if (!order.equals(b.order())) { throw new IllegalArgumentException("inconsistent byte order"); } } else { order = b.order(); } } if (length == 0) { return EMPTY_BUFFER; } byte[] mergedArray = PlatformDependent.allocateUninitializedArray(length); for (int i = 0, j = 0; i < buffers.length; i ++) { // Duplicate the buffer so we not adjust the position during our get operation. // See https://github.com/netty/netty/issues/3896 ByteBuffer b = buffers[i].duplicate(); int bLen = b.remaining(); b.get(mergedArray, j, bLen); j += bLen; } return wrappedBuffer(mergedArray).order(order); } /** * Creates a new big-endian buffer whose content is the specified * {@code string} encoded in the specified {@code charset}. * The new buffer's {@code readerIndex} and {@code writerIndex} are * {@code 0} and the length of the encoded string respectively. */ public static ByteBuf copiedBuffer(CharSequence string, Charset charset) { if (string == null) { throw new NullPointerException("string"); } if (string instanceof CharBuffer) { return copiedBuffer((CharBuffer) string, charset); } return copiedBuffer(CharBuffer.wrap(string), charset); } /** * Creates a new big-endian buffer whose content is a subregion of * the specified {@code string} encoded in the specified {@code charset}. * The new buffer's {@code readerIndex} and {@code writerIndex} are * {@code 0} and the length of the encoded string respectively. */ public static ByteBuf copiedBuffer( CharSequence string, int offset, int length, Charset charset) { if (string == null) { throw new NullPointerException("string"); } if (length == 0) { return EMPTY_BUFFER; } if (string instanceof CharBuffer) { CharBuffer buf = (CharBuffer) string; if (buf.hasArray()) { return copiedBuffer( buf.array(), buf.arrayOffset() + buf.position() + offset, length, charset); } buf = buf.slice(); buf.limit(length); buf.position(offset); return copiedBuffer(buf, charset); } return copiedBuffer(CharBuffer.wrap(string, offset, offset + length), charset); } /** * Creates a new big-endian buffer whose content is the specified * {@code array} encoded in the specified {@code charset}. * The new buffer's {@code readerIndex} and {@code writerIndex} are * {@code 0} and the length of the encoded string respectively. */ public static ByteBuf copiedBuffer(char[] array, Charset charset) { if (array == null) { throw new NullPointerException("array"); } return copiedBuffer(array, 0, array.length, charset); } /** * Creates a new big-endian buffer whose content is a subregion of * the specified {@code array} encoded in the specified {@code charset}. * The new buffer's {@code readerIndex} and {@code writerIndex} are * {@code 0} and the length of the encoded string respectively. */ public static ByteBuf copiedBuffer(char[] array, int offset, int length, Charset charset) { if (array == null) { throw new NullPointerException("array"); } if (length == 0) { return EMPTY_BUFFER; } return copiedBuffer(CharBuffer.wrap(array, offset, length), charset); } private static ByteBuf copiedBuffer(CharBuffer buffer, Charset charset) { return ByteBufUtil.encodeString0(ALLOC, true, buffer, charset, 0); } /** * Creates a read-only buffer which disallows any modification operations * on the specified {@code buffer}. The new buffer has the same * {@code readerIndex} and {@code writerIndex} with the specified * {@code buffer}. * * @deprecated Use {@link ByteBuf#asReadOnly()}. */ @Deprecated public static ByteBuf unmodifiableBuffer(ByteBuf buffer) { ByteOrder endianness = buffer.order(); if (endianness == BIG_ENDIAN) { return new ReadOnlyByteBuf(buffer); } return new ReadOnlyByteBuf(buffer.order(BIG_ENDIAN)).order(LITTLE_ENDIAN); } /** * Creates a new 4-byte big-endian buffer that holds the specified 32-bit integer. */ public static ByteBuf copyInt(int value) { ByteBuf buf = buffer(4); buf.writeInt(value); return buf; } /** * Create a big-endian buffer that holds a sequence of the specified 32-bit integers. */ public static ByteBuf copyInt(int... values) { if (values == null || values.length == 0) { return EMPTY_BUFFER; } ByteBuf buffer = buffer(values.length * 4); for (int v: values) { buffer.writeInt(v); } return buffer; } /** * Creates a new 2-byte big-endian buffer that holds the specified 16-bit integer. */ public static ByteBuf copyShort(int value) { ByteBuf buf = buffer(2); buf.writeShort(value); return buf; } /** * Create a new big-endian buffer that holds a sequence of the specified 16-bit integers. */ public static ByteBuf copyShort(short... values) { if (values == null || values.length == 0) { return EMPTY_BUFFER; } ByteBuf buffer = buffer(values.length * 2); for (int v: values) { buffer.writeShort(v); } return buffer; } /** * Create a new big-endian buffer that holds a sequence of the specified 16-bit integers. */ public static ByteBuf copyShort(int... values) { if (values == null || values.length == 0) { return EMPTY_BUFFER; } ByteBuf buffer = buffer(values.length * 2); for (int v: values) { buffer.writeShort(v); } return buffer; } /** * Creates a new 3-byte big-endian buffer that holds the specified 24-bit integer. */ public static ByteBuf copyMedium(int value) { ByteBuf buf = buffer(3); buf.writeMedium(value); return buf; } /** * Create a new big-endian buffer that holds a sequence of the specified 24-bit integers. */ public static ByteBuf copyMedium(int... values) { if (values == null || values.length == 0) { return EMPTY_BUFFER; } ByteBuf buffer = buffer(values.length * 3); for (int v: values) { buffer.writeMedium(v); } return buffer; } /** * Creates a new 8-byte big-endian buffer that holds the specified 64-bit integer. */ public static ByteBuf copyLong(long value) { ByteBuf buf = buffer(8); buf.writeLong(value); return buf; } /** * Create a new big-endian buffer that holds a sequence of the specified 64-bit integers. */ public static ByteBuf copyLong(long... values) { if (values == null || values.length == 0) { return EMPTY_BUFFER; } ByteBuf buffer = buffer(values.length * 8); for (long v: values) { buffer.writeLong(v); } return buffer; } /** * Creates a new single-byte big-endian buffer that holds the specified boolean value. */ public static ByteBuf copyBoolean(boolean value) { ByteBuf buf = buffer(1); buf.writeBoolean(value); return buf; } /** * Create a new big-endian buffer that holds a sequence of the specified boolean values. */ public static ByteBuf copyBoolean(boolean... values) { if (values == null || values.length == 0) { return EMPTY_BUFFER; } ByteBuf buffer = buffer(values.length); for (boolean v: values) { buffer.writeBoolean(v); } return buffer; } /** * Creates a new 4-byte big-endian buffer that holds the specified 32-bit floating point number. */ public static ByteBuf copyFloat(float value) { ByteBuf buf = buffer(4); buf.writeFloat(value); return buf; } /** * Create a new big-endian buffer that holds a sequence of the specified 32-bit floating point numbers. */ public static ByteBuf copyFloat(float... values) { if (values == null || values.length == 0) { return EMPTY_BUFFER; } ByteBuf buffer = buffer(values.length * 4); for (float v: values) { buffer.writeFloat(v); } return buffer; } /** * Creates a new 8-byte big-endian buffer that holds the specified 64-bit floating point number. */ public static ByteBuf copyDouble(double value) { ByteBuf buf = buffer(8); buf.writeDouble(value); return buf; } /** * Create a new big-endian buffer that holds a sequence of the specified 64-bit floating point numbers. */ public static ByteBuf copyDouble(double... values) { if (values == null || values.length == 0) { return EMPTY_BUFFER; } ByteBuf buffer = buffer(values.length * 8); for (double v: values) { buffer.writeDouble(v); } return buffer; } /** * Return a unreleasable view on the given {@link ByteBuf} which will just ignore release and retain calls. */ public static ByteBuf unreleasableBuffer(ByteBuf buf) { return new UnreleasableByteBuf(buf); } /** * Wrap the given {@link ByteBuf}s in an unmodifiable {@link ByteBuf}. Be aware the returned {@link ByteBuf} will * not try to slice the given {@link ByteBuf}s to reduce GC-Pressure. * * @deprecated Use {@link #wrappedUnmodifiableBuffer(ByteBuf...)}. */ @Deprecated public static ByteBuf unmodifiableBuffer(ByteBuf... buffers) { return wrappedUnmodifiableBuffer(true, buffers); } /** * Wrap the given {@link ByteBuf}s in an unmodifiable {@link ByteBuf}. Be aware the returned {@link ByteBuf} will * not try to slice the given {@link ByteBuf}s to reduce GC-Pressure. * * The returned {@link ByteBuf} may wrap the provided array directly, and so should not be subsequently modified. */ public static ByteBuf wrappedUnmodifiableBuffer(ByteBuf... buffers) { return wrappedUnmodifiableBuffer(false, buffers); } private static ByteBuf wrappedUnmodifiableBuffer(boolean copy, ByteBuf... buffers) { switch (buffers.length) { case 0: return EMPTY_BUFFER; case 1: return buffers[0].asReadOnly(); default: if (copy) { buffers = Arrays.copyOf(buffers, buffers.length, ByteBuf[].class); } return new FixedCompositeByteBuf(ALLOC, buffers); } } private Unpooled() { // Unused } }
/* * Copyright (c) 2016 Aditya Vaidyam * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.kihara.util; import jdk.nashorn.api.scripting.ClassFilter; import jdk.nashorn.api.scripting.NashornException; import jdk.nashorn.api.scripting.ScriptObjectMirror; import jdk.nashorn.internal.codegen.Compiler; import jdk.nashorn.internal.ir.FunctionNode; import jdk.nashorn.internal.ir.debug.ASTWriter; import jdk.nashorn.internal.ir.debug.PrintVisitor; import jdk.nashorn.internal.objects.Global; import jdk.nashorn.internal.parser.Parser; import jdk.nashorn.internal.runtime.*; import jdk.nashorn.internal.runtime.options.Options; import javax.script.*; import java.io.*; import java.lang.reflect.Method; import java.util.Arrays; import java.util.List; import java.util.function.Consumer; import java.util.function.Supplier; // Internal Nashorn Environment /** * Command line Shell for processing JavaScript files. */ public final class JavascriptEngine { private static final String SHELL_INIT_JS = "Object.defineProperty(this, \"input\", {\n" + " value: function input(endMarker, prompt) {\n" + " if (!endMarker) {\n" + " endMarker = \"\";\n" + " }\n" + "\n" + " if (!prompt) {\n" + " prompt = \" >> \";\n" + " }\n" + "\n" + " var imports = new JavaImporter(java.io, java.lang);\n" + " var str = \"\";\n" + " with (imports) {\n" + " var reader = new BufferedReader(new InputStreamReader(System['in']));\n" + " var line;\n" + " while (true) {\n" + " System.out.print(prompt);\n" + " line = reader.readLine();\n" + " if (line == null || line == endMarker) {\n" + " break;\n" + " }\n" + " str += line + \"\\n\";\n" + " }\n" + " }\n" + "\n" + " return str;\n" + " },\n" + " enumerable: false,\n" + " writable: true,\n" + " configurable: true\n" + "});\n" + "\n" + "Object.defineProperty(this, \"evalinput\", {\n" + " value: function evalinput(endMarker, prompt) {\n" + " var code = input(endMarker, prompt);\n" + " // make sure everything is evaluated in global scope!\n" + " return this.eval(code);\n" + " },\n" + " enumerable: false,\n" + " writable: true,\n" + " configurable: true\n" + "});\n"; // CLI Exit Codes public static final int SUCCESS = 0; public static final int COMMANDLINE_ERROR = 100; public static final int COMPILATION_ERROR = 101; public static final int RUNTIME_ERROR = 102; public static final int IO_ERROR = 103; public static final int INTERNAL_ERROR = 104; public static Consumer<Bindings> bindings = n -> {}; public static Supplier<String> script = () -> ""; private JavascriptEngine() {} /** * Starting point for executing a {@code Shell}. Starts a shell with the * given arguments and streams and lets it run until exit. * * @param in input stream for Shell * @param out output stream for Shell * @param err error stream for Shell * * @return exit code * * @throws IOException if there's a problem setting up the streams */ public static int shell(final InputStream in, final OutputStream out, final OutputStream err) throws IOException { return new JavascriptEngine().run(in, out, err); } /** * Run method logic. * * @param in input stream for Shell * @param out output stream for Shell * @param err error stream for Shell * * @return exit code * * @throws IOException if there's a problem setting up the streams */ protected final int run(final InputStream in, final OutputStream out, final OutputStream err) throws IOException { final Context context = makeContext(in, out, err); if (context == null) { return COMMANDLINE_ERROR; } final ScriptObject global = context.createGlobal(); final ScriptEnvironment env = context.getEnv(); final List<String> files = env.getFiles(); // Prepare bindings here. SimpleScriptContext scriptContext = new SimpleScriptContext(); ScriptObjectMirror mirror = (ScriptObjectMirror)ScriptObjectMirror.wrap(global, global); scriptContext.setBindings(mirror, ScriptContext.ENGINE_SCOPE); bindings.accept(scriptContext.getBindings(ScriptContext.ENGINE_SCOPE)); // TODO: JDK 1.8u65 method only. Will invoke old call if fails. try { Global g = ((Global)global); ScriptEngine s = new ScriptEngineManager().getEngineByName("nashorn"); try { Method m = Global.class.getMethod("initBuiltinObjects", ScriptEngine.class, ScriptContext.class); m.invoke(g, s, scriptContext); } catch (NoSuchMethodException e) { try { Method m = Global.class.getMethod("initBuiltinObjects", ScriptEngine.class); m.invoke(g, s); g.setScriptContext(scriptContext); } catch (NoSuchMethodException ee) { System.err.println("COULD NOT INIT!"); throw new IOException("COULD NOT INIT!"); } } } catch (Exception ignored) {} if (files.isEmpty()) { return readEvalPrint(context, global); } if (env._compile_only) { return compileScripts(context, global, files); } if (env._fx) { return runFXScripts(context, global, files); } return runScripts(context, global, files); } // These classes are prohibited by the runtime engine. // Cannot be invoked without a proper ACL and SecurityManager context. // Provide replacement methods of operation where appropriate. private static final String[] prohibitedClasses = { "java.lang.System", "java.lang.ClassLoader", "java.lang.RuntimePermission", "java.lang.SecurityManager", "java.lang.instrument.ClassDefinition", "java.lang.invoke.CallSite", "java.lang.invoke.MethodHandle", "java.lang.invoke.MethodType", "java.lang.reflect.Field", "java.lang.reflect.Method", "java.lang.reflect.Proxy", "java.lang.reflect.Constructor", "javax.script.ScriptEngine", "javax.script.ScriptEngineFactory", "javax.script.ScriptEngineManager"}; /** * Make a new Nashorn Context to compile and/or run JavaScript files. * * @param i input stream for Shell * @param o output stream for Shell * @param e error stream for Shell * * @return null if there are problems with option parsing. */ @SuppressWarnings("resource") private static Context makeContext(final InputStream i, final OutputStream o, final OutputStream e) { final PrintWriter out = new PrintWriter(o instanceof PrintStream ? (PrintStream)o : new PrintStream(o), true); final PrintWriter err = new PrintWriter(e instanceof PrintStream ? (PrintStream)e : new PrintStream(e), true); final ClassLoader loader = Thread.currentThread().getContextClassLoader(); final ErrorManager errors = new ErrorManager(err); final Options options = new Options("nashorn", err); options.set("scripting", true); options.set("language", "es6"); options.set("optimistic.types", true); options.set("lazy.compilation", true); final ClassFilter filter = (name) -> { if(Arrays.stream(prohibitedClasses).parallel().filter(name::equals).findAny().isPresent()) { System.err.println("Access Manager: Denied item " + name + "."); return false; } else return true; }; return new Context(options, errors, out, err, loader, filter); } /** * Compiles the given script files in the command line * * @param context the nashorn context * @param global the global scope * @param files the list of script files to compile * * @return error code * @throws IOException when any script file read results in I/O error */ private static int compileScripts(final Context context, final ScriptObject global, final List<String> files) throws IOException { final ScriptObject oldGlobal = Context.getGlobal(); final boolean globalChanged = (oldGlobal != global); final ScriptEnvironment env = context.getEnv(); try { if (globalChanged) { Context.setGlobal(global); } final ErrorManager errors = context.getErrorManager(); // For each file on the command line. for (final String fileName : files) { final FunctionNode functionNode = new Parser(env, Source.sourceFor(fileName, new File(fileName)), errors, env._strict, 0, context.getLogger(Parser.class)).parse(); if (errors.getNumberOfErrors() != 0) { return COMPILATION_ERROR; } new Compiler(context, env, null, //null - pass no code installer - this is compile only functionNode.getSource(), context.getErrorManager(), env._strict | functionNode.isStrict()). compile(functionNode, Compiler.CompilationPhases.COMPILE_ALL_NO_INSTALL); //*/ /* Compiler.forNoInstallerCompilation(context, functionNode.getSource(), env._strict | functionNode.isStrict()). compile(functionNode, Compiler.CompilationPhases.COMPILE_ALL_NO_INSTALL); //*/ if (env._print_ast) { context.getErr().println(new ASTWriter(functionNode)); } if (env._print_parse) { context.getErr().println(new PrintVisitor(functionNode)); } if (errors.getNumberOfErrors() != 0) { return COMPILATION_ERROR; } } } finally { env.getOut().flush(); env.getErr().flush(); if (globalChanged) { Context.setGlobal(oldGlobal); } } return SUCCESS; } /** * Runs the given JavaScript files in the command line * * @param context the nashorn context * @param global the global scope * @param files the list of script files to run * * @return error code * @throws IOException when any script file read results in I/O error */ private int runScripts(final Context context, final ScriptObject global, final List<String> files) throws IOException { final ScriptObject oldGlobal = Context.getGlobal(); final boolean globalChanged = (oldGlobal != global); try { if (globalChanged) { Context.setGlobal(global); } final ErrorManager errors = context.getErrorManager(); // For each file on the command line. for (final String fileName : files) { if ("-".equals(fileName)) { final int res = readEvalPrint(context, global); if (res != SUCCESS) { return res; } continue; } final File file = new File(fileName); final ScriptFunction script = context.compileScript(Source.sourceFor(fileName, file.toURI().toURL()), global); if (script == null || errors.getNumberOfErrors() != 0) { return COMPILATION_ERROR; } try { apply(script, global); } catch (final NashornException e) { errors.error(e.toString()); if (context.getEnv()._dump_on_error) { e.printStackTrace(context.getErr()); } return RUNTIME_ERROR; } } } finally { context.getOut().flush(); context.getErr().flush(); if (globalChanged) { Context.setGlobal(oldGlobal); } } return SUCCESS; } /** * Runs launches "fx:bootstrap.js" with the given JavaScript files provided * as arguments. * * @param context the nashorn context * @param global the global scope * @param files the list of script files to provide * * @return error code * @throws IOException when any script file read results in I/O error */ private static int runFXScripts(final Context context, final ScriptObject global, final List<String> files) throws IOException { final ScriptObject oldGlobal = Context.getGlobal(); final boolean globalChanged = (oldGlobal != global); try { if (globalChanged) { Context.setGlobal(global); } global.addOwnProperty("$GLOBAL", Property.NOT_ENUMERABLE, global); global.addOwnProperty("$SCRIPTS", Property.NOT_ENUMERABLE, files); context.load(global, "fx:bootstrap.js"); } catch (final NashornException e) { context.getErrorManager().error(e.toString()); if (context.getEnv()._dump_on_error) { e.printStackTrace(context.getErr()); } return RUNTIME_ERROR; } finally { context.getOut().flush(); context.getErr().flush(); if (globalChanged) { Context.setGlobal(oldGlobal); } } return SUCCESS; } /** * Hook to ScriptFunction "apply". A performance metering shell may * introduce enter/exit timing here. * * @param target target function for apply * @param self self reference for apply * * @return result of the function apply */ protected Object apply(final ScriptFunction target, final Object self) { return ScriptRuntime.apply(target, self); } /** * read-eval-print loop for Nashorn shell. * * @param context the nashorn context * @param global global scope object to use * @return return code */ @SuppressWarnings("resource") private static int readEvalPrint(final Context context, final ScriptObject global) { final String prompt = "> ";//bundle.getString("shell.prompt"); final BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); final PrintWriter err = context.getErr(); final ScriptObject oldGlobal = Context.getGlobal(); final boolean globalChanged = (oldGlobal != global); final ScriptEnvironment env = context.getEnv(); try { if (globalChanged) { Context.setGlobal(global); } // initialize with "shell.js" script try { final Source source = Source.sourceFor("<shell.js>", JavascriptEngine.SHELL_INIT_JS); context.eval(global, source.getString(), global, "<shell.js>", false, false); // custom scripts context.eval(global, script.get(), global, "<shell.js>", false, false); } catch (final Exception e) { err.println(e); if (env._dump_on_error) { e.printStackTrace(err); } return INTERNAL_ERROR; } while (true) { err.print(prompt); err.flush(); String source = ""; try { source = in.readLine(); } catch (final IOException ioe) { err.println(ioe.toString()); } if (source == null) { break; } if (source.isEmpty()) { continue; } Object res; try { res = context.eval(global, source, global, "<shell>", env._strict, false); } catch (final Exception e) { err.println(e); if (env._dump_on_error) { e.printStackTrace(err); } continue; } if (res != ScriptRuntime.UNDEFINED) { err.println(JSType.toString(res)); } } } finally { if (globalChanged) { Context.setGlobal(global); } } return SUCCESS; } }
/* Copyright (C) 2014 Parrot SA Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Parrot nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.parrot.arsdk.arnetwork; import com.parrot.arsdk.arsal.ARSALPrint; import com.parrot.arsdk.arsal.ARNativeData; import com.parrot.arsdk.arnetworkal.ARNetworkALManager; /** * Network manager allow to send and receive data acknowledged or not. */ public abstract class ARNetworkManager { private static final String TAG = "NetworkManager"; private static native void nativeStaticInit (); private native long nativeNew(long jOSSpecificManagerPtr, int numberOfInput, Object[] inputParamArray, int numberOfOutput, Object[] outputParamArray, int timeBetweenPingsMs, int jerror ); private native int nativeDelete( long nativeManager); private native void nativeStop(long nativeManager); private native int nativeFlush(long nativeManager); private native int nativeSendData( long nativeManager,int inputBufferID, ARNativeData ARData, long dataPtr, int dataSize, Object customData, int doDataCopy); private native int nativeReadData( long nativeManager, int outputBufferID, long dataPointer, int capacity, ARNativeData data); private native int nativeTryReadData( long nativeManager, int outputBufferID, long dataPointer, int capacity, ARNativeData data); private native int nativeReadDataWithTimeout( long nativeManager, int outputBufferID, long dataPointer, int capacity, ARNativeData data, int timeoutMs); private long nativeManager; private boolean m_initOk; private ARNetworkALManager alManager; public SendingRunnable m_sendingRunnable; public ReceivingRunnable m_receivingRunnable; static { nativeStaticInit(); } /** * Constructor * @param osSpecificManager The ARNetworkALManager to use. Must be initialized and valid * @param inputParamArray array of the parameters of the input buffers * @param outputParamArray array of the parameters of the output buffers * @param timeBetweenPingsMs Minimum time between two pings. A negative value means "no pings", Zero means "use default value" */ public ARNetworkManager(ARNetworkALManager osSpecificManager, ARNetworkIOBufferParam inputParamArray[], ARNetworkIOBufferParam outputParamArray[], int timeBetweenPingsMs) { int error = ARNETWORK_ERROR_ENUM.ARNETWORK_OK.getValue(); m_initOk = false; nativeManager = nativeNew(osSpecificManager.getManager(), inputParamArray.length, inputParamArray, outputParamArray.length, outputParamArray, timeBetweenPingsMs, error); ARSALPrint.d (TAG, "Error:" + error ); if( nativeManager != 0 ) { m_initOk = true; m_sendingRunnable = new SendingRunnable(nativeManager); m_receivingRunnable = new ReceivingRunnable(nativeManager); alManager = osSpecificManager; } } /** * Dispose */ public void dispose() { if(m_initOk == true) { nativeDelete(nativeManager); nativeManager = 0; m_initOk = false; } } /** * Destructor */ public void finalize () throws Throwable { try { dispose (); } finally { super.finalize (); } } /** * Stop the threads of sending and reception<br> * Used to kill the threads calling ARNETWORK_Manager_SendingThreadRun() and ARNETWORK_Manager_ReceivingThreadRun(). */ public void stop() { if(m_initOk == true) { nativeStop(nativeManager); } } /** * Flush all buffers of the network manager * @return error ARNETWORK_ERROR_ENUM */ public ARNETWORK_ERROR_ENUM Flush() { ARNETWORK_ERROR_ENUM error = ARNETWORK_ERROR_ENUM.ARNETWORK_OK; if(m_initOk == true) { int intError = nativeFlush( nativeManager ); error = ARNETWORK_ERROR_ENUM.getFromValue(intError); } else { error = ARNETWORK_ERROR_ENUM.ARNETWORK_ERROR_BAD_PARAMETER; } return error; } /** * Add data to send * @param inputBufferID identifier of the input buffer in which the data must be stored * @param arData data to send * @param doDataCopy indocator to copy the data in the ARNETWORK_Manager * @return error ARNETWORK_ERROR_ENUM */ public ARNETWORK_ERROR_ENUM sendData(int inputBufferID, ARNativeData arData, Object customData,boolean doDataCopy) { ARNETWORK_ERROR_ENUM error = ARNETWORK_ERROR_ENUM.ARNETWORK_OK; int doDataCopyInt = (doDataCopy) ? 1 : 0; if(m_initOk == true) { long dataPtr = arData.getData(); int dataSize = arData.getDataSize(); int intError = nativeSendData (nativeManager, inputBufferID, arData, dataPtr, dataSize, customData, doDataCopyInt ); error = ARNETWORK_ERROR_ENUM.getFromValue(intError); } else { error = ARNETWORK_ERROR_ENUM.ARNETWORK_ERROR_BAD_PARAMETER; } return error; } /** * Read data received<br> * Warning: This is a blocking function * @param outputBufferID identifier of the output buffer in which the data must be read * @param data Data where store the reading * @return error ARNETWORK_ERROR_ENUM type */ public ARNETWORK_ERROR_ENUM readData(int outputBufferID, ARNativeData data) { ARNETWORK_ERROR_ENUM error = ARNETWORK_ERROR_ENUM.ARNETWORK_OK; if(m_initOk == true) { int intError = nativeReadData( nativeManager, outputBufferID, data.getData (), data.getCapacity (), data); error = ARNETWORK_ERROR_ENUM.getFromValue(intError); } else { error = ARNETWORK_ERROR_ENUM.ARNETWORK_ERROR_BAD_PARAMETER; } return error; } /** * try read data received (non-blocking function) * @param outputBufferID identifier of the output buffer in which the data must be read * @param data Data where store the reading * @return error ARNETWORK_ERROR_ENUM type */ public ARNETWORK_ERROR_ENUM tryReadData(int outputBufferID, ARNativeData data) { ARNETWORK_ERROR_ENUM error = ARNETWORK_ERROR_ENUM.ARNETWORK_OK; if(m_initOk == true) { int intError = nativeTryReadData( nativeManager, outputBufferID, data.getData (), data.getCapacity (), data); error = ARNETWORK_ERROR_ENUM.getFromValue(intError); } else { error = ARNETWORK_ERROR_ENUM.ARNETWORK_ERROR_BAD_PARAMETER; } return error; } /** * Read data received with timeout * @param outputBufferID identifier of the output buffer in which the data must be read * @param data Data where store the reading * @param timeoutMs maximum time in millisecond to wait if there is no data to read * @return error ARNETWORK_ERROR_ENUM type */ public ARNETWORK_ERROR_ENUM readDataWithTimeout(int outputBufferID, ARNativeData data, int timeoutMs) { ARNETWORK_ERROR_ENUM error = ARNETWORK_ERROR_ENUM.ARNETWORK_OK; if(m_initOk == true) { int intError = nativeReadDataWithTimeout( nativeManager, outputBufferID, data.getData (), data.getCapacity (), data, timeoutMs); error = ARNETWORK_ERROR_ENUM.getFromValue(intError); } else { error = ARNETWORK_ERROR_ENUM.ARNETWORK_ERROR_BAD_PARAMETER; } return error; } /** * Get the pointer C on the network manager * @return Pointer C on the network manager */ public long getManager () { return nativeManager; } /** * Get is the Manager is correctly initialized and if it is usable * @return true is the Manager is usable */ public boolean isCorrectlyInitialized () { return m_initOk; } /** * CallBack for the status of the data sent or free * @param IoBufferId identifier of the IoBuffer is calling back * @param data data sent * @param status reason of the callback * @param customData custom data * @return ARNETWORK_MANAGER_CALLBACK_RETURN_ENUM what do in timeout case */ public abstract ARNETWORK_MANAGER_CALLBACK_RETURN_ENUM onCallback (int IoBufferId, ARNativeData data, ARNETWORK_MANAGER_CALLBACK_STATUS_ENUM status, Object customData); /** * CallBack for the status of the data sent or free * @param IoBufferId identifier of the IoBuffer is calling back * @param data data sent * @param status reason of the callback * @param customData custom data * @return ARNETWORK_MANAGER_CALLBACK_RETURN_ENUM what do in timeout case */ private int callback (int IoBufferId, ARNativeData data, int status, Object customData) { ARNETWORK_MANAGER_CALLBACK_STATUS_ENUM jStatus = ARNETWORK_MANAGER_CALLBACK_STATUS_ENUM.getFromValue(status); ARNETWORK_MANAGER_CALLBACK_RETURN_ENUM retVal = onCallback (IoBufferId, data, jStatus, customData); return retVal.getValue(); } /** * @brief function called on disconnect * @param alManager The ARNetworkAL manager */ public abstract void onDisconnect (ARNetworkALManager alManager); /** * @brief function called on disconnect */ private void disconnectCallback () { onDisconnect (alManager); } } /** * Sending Runnable */ class SendingRunnable implements Runnable { private static native int nativeSendingThreadRun( long nativeManager); long nativeManager; /** * Constructor * @param managerPtr Pointer C on the network manager */ SendingRunnable(long managerPtr) { nativeManager = managerPtr; } /** * Manage the sending of the data */ public void run() { nativeSendingThreadRun(nativeManager); } } /** * Reception Runnable */ class ReceivingRunnable implements Runnable { private static native int nativeReceivingThreadRun(long nativeManager); long nativeManager; /** * Constructor * @param managerPtr Pointer C on the network manager */ ReceivingRunnable(long managerPtr) { nativeManager = managerPtr; } /** * Manage the reception of the data. */ public void run() { nativeReceivingThreadRun(nativeManager); } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.storage.ldap.mappers.msad; import org.jboss.logging.Logger; import org.keycloak.component.ComponentModel; import org.keycloak.credential.CredentialInput; import org.keycloak.models.LDAPConstants; import org.keycloak.models.ModelException; import org.keycloak.models.RealmModel; import org.keycloak.models.UserModel; import org.keycloak.models.credential.PasswordUserCredentialModel; import org.keycloak.models.utils.UserModelDelegate; import org.keycloak.storage.UserStorageProvider; import org.keycloak.storage.ldap.LDAPStorageProvider; import org.keycloak.storage.ldap.idm.model.LDAPObject; import org.keycloak.storage.ldap.idm.query.internal.LDAPQuery; import org.keycloak.storage.ldap.mappers.AbstractLDAPStorageMapper; import org.keycloak.storage.ldap.mappers.LDAPOperationDecorator; import org.keycloak.storage.ldap.mappers.PasswordUpdateCallback; import org.keycloak.storage.ldap.mappers.TxAwareLDAPUserModelDelegate; import javax.naming.AuthenticationException; import java.util.HashSet; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Mapper specific to MSAD. It's able to read the userAccountControl and pwdLastSet attributes and set actions in Keycloak based on that. * It's also able to handle exception code from LDAP user authentication (See http://www-01.ibm.com/support/docview.wss?uid=swg21290631 ) * * @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a> */ public class MSADUserAccountControlStorageMapper extends AbstractLDAPStorageMapper implements PasswordUpdateCallback { public static final String LDAP_PASSWORD_POLICY_HINTS_ENABLED = "ldap.password.policy.hints.enabled"; private static final Logger logger = Logger.getLogger(MSADUserAccountControlStorageMapper.class); private static final Pattern AUTH_EXCEPTION_REGEX = Pattern.compile(".*AcceptSecurityContext error, data ([0-9a-f]*), v.*"); private static final Pattern AUTH_INVALID_NEW_PASSWORD = Pattern.compile(".*ERROR CODE ([0-9A-F]+) - ([0-9A-F]+): .*WILL_NOT_PERFORM.*"); public MSADUserAccountControlStorageMapper(ComponentModel mapperModel, LDAPStorageProvider ldapProvider) { super(mapperModel, ldapProvider); ldapProvider.setUpdater(this); } @Override public void beforeLDAPQuery(LDAPQuery query) { query.addReturningLdapAttribute(LDAPConstants.PWD_LAST_SET); query.addReturningLdapAttribute(LDAPConstants.USER_ACCOUNT_CONTROL); // This needs to be read-only and can be set to writable just on demand query.addReturningReadOnlyLdapAttribute(LDAPConstants.PWD_LAST_SET); if (ldapProvider.getEditMode() != UserStorageProvider.EditMode.WRITABLE) { query.addReturningReadOnlyLdapAttribute(LDAPConstants.USER_ACCOUNT_CONTROL); } } @Override public LDAPOperationDecorator beforePasswordUpdate(UserModel user, LDAPObject ldapUser, PasswordUserCredentialModel password) { // Not apply policies if password is reset by admin (not by user himself) if (password.isAdminRequest()) { return null; } boolean applyDecorator = mapperModel.get(LDAP_PASSWORD_POLICY_HINTS_ENABLED, false); return applyDecorator ? new LDAPServerPolicyHintsDecorator() : null; } @Override public void passwordUpdated(UserModel user, LDAPObject ldapUser, PasswordUserCredentialModel password) { logger.debugf("Going to update userAccountControl for ldap user '%s' after successful password update", ldapUser.getDn().toString()); // Normally it's read-only ldapUser.removeReadOnlyAttributeName(LDAPConstants.PWD_LAST_SET); ldapUser.setSingleAttribute(LDAPConstants.PWD_LAST_SET, "-1"); UserAccountControl control = getUserAccountControl(ldapUser); control.remove(UserAccountControl.PASSWD_NOTREQD); control.remove(UserAccountControl.PASSWORD_EXPIRED); if (user.isEnabled()) { control.remove(UserAccountControl.ACCOUNTDISABLE); } updateUserAccountControl(true, ldapUser, control); } @Override public void passwordUpdateFailed(UserModel user, LDAPObject ldapUser, PasswordUserCredentialModel password, ModelException exception) { throw processFailedPasswordUpdateException(exception); } @Override public UserModel proxy(LDAPObject ldapUser, UserModel delegate, RealmModel realm) { return new MSADUserModelDelegate(delegate, ldapUser); } @Override public void onRegisterUserToLDAP(LDAPObject ldapUser, UserModel localUser, RealmModel realm) { } @Override public void onImportUserFromLDAP(LDAPObject ldapUser, UserModel user, RealmModel realm, boolean isCreate) { } @Override public boolean onAuthenticationFailure(LDAPObject ldapUser, UserModel user, AuthenticationException ldapException, RealmModel realm) { String exceptionMessage = ldapException.getMessage(); Matcher m = AUTH_EXCEPTION_REGEX.matcher(exceptionMessage); if (m.matches()) { String errorCode = m.group(1); return processAuthErrorCode(errorCode, user); } else { return false; } } protected boolean processAuthErrorCode(String errorCode, UserModel user) { logger.debugf("MSAD Error code is '%s' after failed LDAP login of user '%s'", errorCode, user.getUsername()); if (ldapProvider.getEditMode() == UserStorageProvider.EditMode.WRITABLE) { if (errorCode.equals("532") || errorCode.equals("773")) { // User needs to change his MSAD password. Allow him to login, but add UPDATE_PASSWORD required action if (!user.getRequiredActions().contains(UserModel.RequiredAction.UPDATE_PASSWORD.name())) { user.addRequiredAction(UserModel.RequiredAction.UPDATE_PASSWORD); } return true; } else if (errorCode.equals("533")) { // User is disabled in MSAD. Set him to disabled in KC as well if (user.isEnabled()) { user.setEnabled(false); } return true; } else if (errorCode.equals("775")) { logger.warnf("Locked user '%s' attempt to login", user.getUsername()); } } return false; } protected ModelException processFailedPasswordUpdateException(ModelException e) { if (e.getCause() == null || e.getCause().getMessage() == null) { return e; } String exceptionMessage = e.getCause().getMessage().replace('\n', ' '); logger.debugf("Failed to update password in Active Directory. Exception message: %s", exceptionMessage); exceptionMessage = exceptionMessage.toUpperCase(); Matcher m = AUTH_INVALID_NEW_PASSWORD.matcher(exceptionMessage); if (m.matches()) { String errorCode = m.group(1); String errorCode2 = m.group(2); // 52D corresponds to ERROR_PASSWORD_RESTRICTION. See https://msdn.microsoft.com/en-us/library/windows/desktop/ms681385(v=vs.85).aspx if ((errorCode.equals("53")) && errorCode2.endsWith("52D")) { ModelException me = new ModelException("invalidPasswordGenericMessage", e); return me; } } return e; } protected UserAccountControl getUserAccountControl(LDAPObject ldapUser) { String userAccountControl = ldapUser.getAttributeAsString(LDAPConstants.USER_ACCOUNT_CONTROL); long longValue = userAccountControl == null ? 0 : Long.parseLong(userAccountControl); return new UserAccountControl(longValue); } // Update user in LDAP if "updateInLDAP" is true. Otherwise it is assumed that LDAP update will be called at the end of transaction protected void updateUserAccountControl(boolean updateInLDAP, LDAPObject ldapUser, UserAccountControl accountControl) { String userAccountControlValue = String.valueOf(accountControl.getValue()); logger.debugf("Updating userAccountControl of user '%s' to value '%s'", ldapUser.getDn().toString(), userAccountControlValue); ldapUser.setSingleAttribute(LDAPConstants.USER_ACCOUNT_CONTROL, userAccountControlValue); if (updateInLDAP) { ldapProvider.getLdapIdentityStore().update(ldapUser); } } public class MSADUserModelDelegate extends TxAwareLDAPUserModelDelegate { private final LDAPObject ldapUser; public MSADUserModelDelegate(UserModel delegate, LDAPObject ldapUser) { super(delegate, ldapProvider, ldapUser); this.ldapUser = ldapUser; } @Override public boolean isEnabled() { boolean kcEnabled = super.isEnabled(); if (getPwdLastSet() > 0) { // Merge KC and MSAD return kcEnabled && !getUserAccountControl(ldapUser).has(UserAccountControl.ACCOUNTDISABLE); } else { // If new MSAD user is created and pwdLastSet is still 0, MSAD account is in disabled state. So read just from Keycloak DB. User is not able to login via MSAD anyway return kcEnabled; } } @Override public void setEnabled(boolean enabled) { // Always update DB super.setEnabled(enabled); if (ldapProvider.getEditMode() == UserStorageProvider.EditMode.WRITABLE && getPwdLastSet() > 0) { logger.debugf("Going to propagate enabled=%s for ldapUser '%s' to MSAD", enabled, ldapUser.getDn().toString()); UserAccountControl control = getUserAccountControl(ldapUser); if (enabled) { control.remove(UserAccountControl.ACCOUNTDISABLE); } else { control.add(UserAccountControl.ACCOUNTDISABLE); } ensureTransactionStarted(); updateUserAccountControl(false, ldapUser, control); } } @Override public void addRequiredAction(RequiredAction action) { String actionName = action.name(); addRequiredAction(actionName); } @Override public void addRequiredAction(String action) { // Always update DB super.addRequiredAction(action); if (ldapProvider.getEditMode() == UserStorageProvider.EditMode.WRITABLE && RequiredAction.UPDATE_PASSWORD.toString().equals(action)) { logger.debugf("Going to propagate required action UPDATE_PASSWORD to MSAD for ldap user '%s' ", ldapUser.getDn().toString()); // Normally it's read-only ldapUser.removeReadOnlyAttributeName(LDAPConstants.PWD_LAST_SET); ldapUser.setSingleAttribute(LDAPConstants.PWD_LAST_SET, "0"); ensureTransactionStarted(); } } @Override public void removeRequiredAction(RequiredAction action) { String actionName = action.name(); removeRequiredAction(actionName); } @Override public void removeRequiredAction(String action) { // Always update DB super.removeRequiredAction(action); if (ldapProvider.getEditMode() == UserStorageProvider.EditMode.WRITABLE && RequiredAction.UPDATE_PASSWORD.toString().equals(action)) { // Don't set pwdLastSet in MSAD when it is new user UserAccountControl accountControl = getUserAccountControl(ldapUser); if (accountControl.getValue() != 0 && !accountControl.has(UserAccountControl.PASSWD_NOTREQD)) { logger.debugf("Going to remove required action UPDATE_PASSWORD from MSAD for ldap user '%s' ", ldapUser.getDn().toString()); // Normally it's read-only ldapUser.removeReadOnlyAttributeName(LDAPConstants.PWD_LAST_SET); ldapUser.setSingleAttribute(LDAPConstants.PWD_LAST_SET, "-1"); ensureTransactionStarted(); } } } @Override public Set<String> getRequiredActions() { Set<String> requiredActions = super.getRequiredActions(); if (ldapProvider.getEditMode() == UserStorageProvider.EditMode.WRITABLE) { if (getPwdLastSet() == 0 || getUserAccountControl(ldapUser).has(UserAccountControl.PASSWORD_EXPIRED)) { requiredActions = new HashSet<>(requiredActions); requiredActions.add(RequiredAction.UPDATE_PASSWORD.toString()); return requiredActions; } } return requiredActions; } protected long getPwdLastSet() { String pwdLastSet = ldapUser.getAttributeAsString(LDAPConstants.PWD_LAST_SET); return pwdLastSet == null ? 0 : Long.parseLong(pwdLastSet); } } }
/** * Copyright 2010-2015. All work is copyrighted to their respective * author(s), unless otherwise stated. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.revelc.code.formatter; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugin.logging.Log; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.codehaus.plexus.resource.ResourceManager; import org.codehaus.plexus.resource.loader.FileResourceLoader; import org.codehaus.plexus.resource.loader.ResourceNotFoundException; import org.codehaus.plexus.util.DirectoryScanner; import org.codehaus.plexus.util.IOUtil; import org.codehaus.plexus.util.ReaderFactory; import org.codehaus.plexus.util.StringUtils; import org.codehaus.plexus.util.WriterFactory; import org.eclipse.jdt.core.JavaCore; import org.eclipse.jdt.core.formatter.CodeFormatter; import org.eclipse.jface.text.BadLocationException; import org.eclipse.text.edits.MalformedTreeException; import org.xml.sax.SAXException; import com.google.common.hash.Hashing; import net.revelc.code.formatter.java.JavaFormatter; import net.revelc.code.formatter.javascript.JavascriptFormatter; import net.revelc.code.formatter.model.ConfigReadException; import net.revelc.code.formatter.model.ConfigReader; /** * A Maven plugin mojo to format Java source code using the Eclipse code * formatter. * * Mojo parameters allow customizing formatting by specifying the config XML * file, line endings, compiler version, and source code locations. Reformatting * source files is avoided using an md5 hash of the content, comparing to the * original hash to the hash after formatting and a cached hash. * * @author jecki * @author Matt Blanchette * @author marvin.froeder */ @Mojo(name = "format", defaultPhase = LifecyclePhase.PROCESS_SOURCES, requiresProject = false) public class FormatterMojo extends AbstractMojo implements ConfigurationSource { /** The Constant CACHE_PROPERTIES_FILENAME. */ private static final String CACHE_PROPERTIES_FILENAME = "maven-java-formatter-cache.properties"; /** The Constant DEFAULT_INCLUDES. */ private static final String[] DEFAULT_INCLUDES = new String[] { "**/*.java", "**/*.js" }; /** * ResourceManager for retrieving the configFile resource. */ @Component(role = ResourceManager.class) private ResourceManager resourceManager; /** * Project's source directory as specified in the POM. */ @Parameter(defaultValue = "${project.build.sourceDirectory}", property = "sourceDirectory", required = true) private File sourceDirectory; /** * Project's test source directory as specified in the POM. */ @Parameter(defaultValue = "${project.build.testSourceDirectory}", property = "testSourceDirectory", required = true) private File testSourceDirectory; /** * Project's target directory as specified in the POM. */ @Parameter(defaultValue = "${project.build.directory}", readonly = true, required = true) private File targetDirectory; /** * Project's base directory. */ @Parameter(defaultValue = ".", property = "project.basedir", readonly = true, required = true) private File basedir; /** * Location of the Java source files to format. Defaults to source main and * test directories if not set. Deprecated in version 0.3. Reintroduced in * 0.4. * * @since 0.4 */ @Parameter private File[] directories; /** * List of fileset patterns for Java source locations to include in * formatting. Patterns are relative to the project source and test source * directories. When not specified, the default include is * <code>**&#47;*.java</code> * * @since 0.3 */ @Parameter(property = "formatter.includes") private String[] includes; /** * List of fileset patterns for Java source locations to exclude from * formatting. Patterns are relative to the project source and test source * directories. When not specified, there is no default exclude. * * @since 0.3 */ @Parameter private String[] excludes; /** * Java compiler source version. */ @Parameter(defaultValue = "1.5", property = "maven.compiler.source", required = true) private String compilerSource; /** * Java compiler compliance version. */ @Parameter(defaultValue = "1.5", property = "maven.compiler.source", required = true) private String compilerCompliance; /** * Java compiler target version. */ @Parameter(defaultValue = "1.5", property = "maven.compiler.target", required = true) private String compilerTargetPlatform; /** * The file encoding used to read and write source files. When not specified * and sourceEncoding also not set, default is platform file encoding. * * @since 0.3 */ @Parameter(property = "project.build.sourceEncoding", required = true) private String encoding; /** * Sets the line-ending of files after formatting. Valid values are: * <ul> * <li><b>"AUTO"</b> - Use line endings of current system</li> * <li><b>"KEEP"</b> - Preserve line endings of files, default to AUTO if * ambiguous</li> * <li><b>"LF"</b> - Use Unix and Mac style line endings</li> * <li><b>"CRLF"</b> - Use DOS and Windows style line endings</li> * <li><b>"CR"</b> - Use early Mac style line endings</li> * </ul> * * @since 0.2.0 */ @Parameter(defaultValue = "AUTO", property = "lineending", required = true) private LineEnding lineEnding; /** * File or classpath location of an Eclipse code formatter configuration xml * file to use in formatting. */ @Parameter(defaultValue = "src/config/eclipse/formatter/java.xml", property = "configfile", required = true) private String configFile; /** * File or classpath location of an Eclipse code formatter configuration xml * file to use in formatting. */ @Parameter(defaultValue = "src/config/eclipse/formatter/javascript.xml", property = "configjsfile", required = true) private String configJsFile; /** * Whether the formatting is skipped. * * @since 0.5 */ @Parameter(defaultValue = "false", alias = "skip", property = "formatter.skip") private Boolean skipFormatting; private JavaFormatter javaFormatter = new JavaFormatter(); private JavascriptFormatter jsFormatter = new JavascriptFormatter(); /** * Execute. * * @throws MojoExecutionException the mojo execution exception * @see org.apache.maven.plugin.AbstractMojo#execute() */ @Override public void execute() throws MojoExecutionException, MojoFailureException { if (this.skipFormatting) { getLog().info("Formatting is skipped"); return; } long startClock = System.currentTimeMillis(); if (StringUtils.isEmpty(this.encoding)) { this.encoding = ReaderFactory.FILE_ENCODING; getLog().warn("File encoding has not been set, using platform encoding (" + this.encoding + ") to format source files, i.e. build is platform dependent!"); } else { try { "Test Encoding".getBytes(this.encoding); } catch (UnsupportedEncodingException e) { throw new MojoExecutionException("Encoding '" + this.encoding + "' is not supported"); } getLog().info("Using '" + this.encoding + "' encoding to format source files."); } List<File> files = new ArrayList<>(); try { if (this.directories != null) { for (File directory : this.directories) { if (directory.exists() && directory.isDirectory()) { files.addAll(addCollectionFiles(directory)); } } } else { // Using defaults of source main and test dirs if (this.sourceDirectory != null && this.sourceDirectory.exists() && this.sourceDirectory.isDirectory()) { files.addAll(addCollectionFiles(this.sourceDirectory)); } if (this.testSourceDirectory != null && this.testSourceDirectory.exists() && this.testSourceDirectory.isDirectory()) { files.addAll(addCollectionFiles(this.testSourceDirectory)); } } } catch (IOException e) { throw new MojoExecutionException("Unable to find files using includes/excludes", e); } int numberOfFiles = files.size(); Log log = getLog(); log.info("Number of files to be formatted: " + numberOfFiles); if (numberOfFiles > 0) { createCodeFormatter(); ResultCollector rc = new ResultCollector(); Properties hashCache = readFileHashCacheFile(); String basedirPath = getBasedirPath(); for (int i = 0, n = files.size(); i < n; i++) { File file = files.get(i); if (file.exists()) { formatFile(file, rc, hashCache, basedirPath); } } storeFileHashCache(hashCache); long endClock = System.currentTimeMillis(); log.info("Successfully formatted: " + rc.successCount + " file(s)"); log.info("Fail to format : " + rc.failCount + " file(s)"); log.info("Skipped : " + rc.skippedCount + " file(s)"); log.info("Approximate time taken: " + ((endClock - startClock) / 1000) + "s"); } } /** * Add source files to the files list. * * @param files the files * @throws IOException Signals that an I/O exception has occurred. */ List<File> addCollectionFiles(File newBasedir) throws IOException { final DirectoryScanner ds = new DirectoryScanner(); ds.setBasedir(newBasedir); if (this.includes != null && this.includes.length > 0) { ds.setIncludes(this.includes); } else { ds.setIncludes(DEFAULT_INCLUDES); } ds.setExcludes(this.excludes); ds.addDefaultExcludes(); ds.setCaseSensitive(false); ds.setFollowSymlinks(false); ds.scan(); List<File> foundFiles = new ArrayList<>(); for (String filename : ds.getIncludedFiles()) { foundFiles.add(new File(newBasedir, filename)); } return foundFiles; } /** * Gets the basedir path. * @return the basedir path */ private String getBasedirPath() { try { return this.basedir.getCanonicalPath(); } catch (Exception e) { return ""; } } /** * Store file hash cache. * * @param props the props */ private void storeFileHashCache(Properties props) { File cacheFile = new File(this.targetDirectory, CACHE_PROPERTIES_FILENAME); try (OutputStream out = new BufferedOutputStream(new FileOutputStream(cacheFile))) { props.store(out, null); } catch (FileNotFoundException e) { getLog().warn("Cannot store file hash cache properties file", e); } catch (IOException e) { getLog().warn("Cannot store file hash cache properties file", e); } } /** * Read file hash cache file. * * @return the properties */ private Properties readFileHashCacheFile() { Properties props = new Properties(); Log log = getLog(); if (!this.targetDirectory.exists()) { this.targetDirectory.mkdirs(); } else if (!this.targetDirectory.isDirectory()) { log.warn("Something strange here as the " + "supposedly target directory is not a directory."); return props; } File cacheFile = new File(this.targetDirectory, CACHE_PROPERTIES_FILENAME); if (!cacheFile.exists()) { return props; } try (final BufferedInputStream stream = new BufferedInputStream(new FileInputStream(cacheFile))) { props.load(stream); } catch (FileNotFoundException e) { log.warn("Cannot load file hash cache properties file", e); } catch (IOException e) { log.warn("Cannot load file hash cache properties file", e); } return props; } /** * Format file. * * @param file the file * @param rc the rc * @param hashCache the hash cache * @param basedirPath the basedir path * @throws MojoFailureException * @throws MojoExecutionException */ private void formatFile(File file, ResultCollector rc, Properties hashCache, String basedirPath) throws MojoFailureException, MojoExecutionException { try { doFormatFile(file, rc, hashCache, basedirPath, false); } catch (IOException e) { rc.failCount++; getLog().warn(e); } catch (MalformedTreeException e) { rc.failCount++; getLog().warn(e); } catch (BadLocationException e) { rc.failCount++; getLog().warn(e); } } /** * Format individual file. * * @param file the file * @param rc the rc * @param hashCache the hash cache * @param basedirPath the basedir path * @throws IOException Signals that an I/O exception has occurred. * @throws BadLocationException the bad location exception * @throws MojoFailureException * @throws MojoExecutionException */ protected void doFormatFile(File file, ResultCollector rc, Properties hashCache, String basedirPath, boolean dryRun) throws IOException, BadLocationException, MojoFailureException, MojoExecutionException { Log log = getLog(); log.debug("Processing file: " + file); String code = readFileAsString(file); String originalHash = md5hash(code); String canonicalPath = file.getCanonicalPath(); String path = canonicalPath.substring(basedirPath.length()); String cachedHash = hashCache.getProperty(path); if (cachedHash != null && cachedHash.equals(originalHash)) { rc.skippedCount++; log.debug("File is already formatted."); return; } Result r; if (file.getName().endsWith(".java")) { r = this.javaFormatter.formatFile(file, this.lineEnding, dryRun); } else { r = this.jsFormatter.formatFile(file, this.lineEnding, dryRun); } switch (r) { case SKIPPED: rc.skippedCount++; break; case SUCCESS: rc.successCount++; break; case FAIL: rc.failCount++; break; default: break; } String formattedCode = readFileAsString(file); String formattedHash = md5hash(formattedCode); hashCache.setProperty(path, formattedHash); if (originalHash.equals(formattedHash)) { rc.skippedCount++; log.debug("Equal hash code. Not writing result to file."); return; } writeStringToFile(formattedCode, file); } /** * Md5hash. * * @param str the str * @return the string * @throws UnsupportedEncodingException the unsupported encoding exception */ private String md5hash(String str) throws UnsupportedEncodingException { return Hashing.md5().hashBytes(str.getBytes(this.encoding)).toString(); } /** * Read the given file and return the content as a string. * * @param file the file * @return the string * @throws IOException Signals that an I/O exception has occurred. */ private String readFileAsString(File file) throws java.io.IOException { StringBuilder fileData = new StringBuilder(1000); BufferedReader reader = null; try { reader = new BufferedReader(ReaderFactory.newReader(file, this.encoding)); char[] buf = new char[1024]; int numRead = 0; while ((numRead = reader.read(buf)) != -1) { String readData = String.valueOf(buf, 0, numRead); fileData.append(readData); buf = new char[1024]; } } finally { IOUtil.close(reader); } return fileData.toString(); } /** * Write the given string to a file. * * @param str the str * @param file the file * @throws IOException Signals that an I/O exception has occurred. */ private void writeStringToFile(String str, File file) throws IOException { if (!file.exists() && file.isDirectory()) { return; } BufferedWriter bw = null; try { bw = new BufferedWriter(WriterFactory.newWriter(file, this.encoding)); bw.write(str); } finally { IOUtil.close(bw); } } /** * Create a {@link CodeFormatter} instance to be used by this mojo. * * @throws MojoExecutionException the mojo execution exception */ private void createCodeFormatter() throws MojoExecutionException { this.javaFormatter.init(getFormattingOptions(this.configFile), this); this.jsFormatter.init(getFormattingOptions(this.configJsFile), this); } /** * Return the options to be passed when creating {@link CodeFormatter} * instance. * * @return the formatting options * @throws MojoExecutionException the mojo execution exception */ private Map<String, String> getFormattingOptions(String newConfigFile) throws MojoExecutionException { if (newConfigFile != null) { return getOptionsFromConfigFile(newConfigFile); } Map<String, String> options = new HashMap<>(); options.put(JavaCore.COMPILER_SOURCE, this.compilerSource); options.put(JavaCore.COMPILER_COMPLIANCE, this.compilerCompliance); options.put(JavaCore.COMPILER_CODEGEN_TARGET_PLATFORM, this.compilerTargetPlatform); return options; } /** * Read config file and return the config as {@link Map}. * * @return the options from config file * @throws MojoExecutionException the mojo execution exception */ private Map<String, String> getOptionsFromConfigFile(String newConfigFile) throws MojoExecutionException { InputStream configInput = null; try { this.resourceManager.addSearchPath(FileResourceLoader.ID, this.basedir.getAbsolutePath()); configInput = this.resourceManager.getResourceAsInputStream(newConfigFile); } catch (ResourceNotFoundException e) { throw new MojoExecutionException("Config file [" + newConfigFile + "] cannot be found", e); } if (configInput == null) { throw new MojoExecutionException("Config file [" + newConfigFile + "] does not exist"); } try { ConfigReader configReader = new ConfigReader(); return configReader.read(configInput); } catch (IOException e) { throw new MojoExecutionException("Cannot read config file [" + newConfigFile + "]", e); } catch (SAXException e) { throw new MojoExecutionException("Cannot parse config file [" + newConfigFile + "]", e); } catch (ConfigReadException e) { throw new MojoExecutionException(e.getMessage(), e); } finally { IOUtil.close(configInput); } } class ResultCollector { int successCount; int failCount; int skippedCount; } @Override public String getCompilerSources() { return this.compilerSource; } @Override public String getCompilerCompliance() { return this.compilerCompliance; } @Override public String getCompilerCodegenTargetPlatform() { return this.compilerTargetPlatform; } @Override public File getTargetDirectory() { return this.targetDirectory; } @Override public Charset getEncoding() { return Charset.forName(this.encoding); } }
/** * Copyright 2013 Tim Whittington * * Licensed under the The Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.archie.groktls; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.security.NoSuchProviderException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; import javax.crypto.Cipher; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLParameters; import javax.net.ssl.SSLServerSocket; import javax.net.ssl.SSLServerSocketFactory; import javax.net.ssl.SSLSocket; import javax.net.ssl.SSLSocketFactory; import org.archie.groktls.ItemFilter.FilterResult; import org.archie.groktls.cipher.CipherSuite; import org.archie.groktls.cipher.CipherSuiteFilters; import org.archie.groktls.protocol.ProtocolVariant; import org.archie.groktls.protocol.ProtocolVariantFilters; /** * Interactive tester for cipher suite and protocol variant filter specifications. * <p> * Run without arguments, cipher suite filter specs can be entered, and the results of evaluating the filters against the cipher suites * supported by the JVM running the program are displayed. * <p> * When run with an argument of <code>protocolvariant</code> or <code>pv</code>, the same interactive testing can be done for protocol * variants. */ public class InteractiveFilterSpecTester { private static final String CS_OUTPUT_FORMAT = "%-40s %-8s %-8s %-10s %-4s %3d (%-3d) %-6s %4d %s%n"; private static final String PV_OUTPUT_FORMAT = "%-20s %-8s %-5d %-5d %-6s %s%n"; private boolean ciphers = true; private boolean client = true; private String engine; private String provider; private SSLContext ctx; private final GrokTLS grok = new GrokTLS(); private boolean bare; private InteractiveFilterSpecTester() { try { this.provider = SSLContext.getDefault().getProvider().getName(); } catch (NoSuchAlgorithmException e) { throw new IllegalStateException("Unable to detect default SSLContext provider", e); } } private void start() throws NoSuchAlgorithmException, KeyManagementException { checkUnrestrictedJce(); init(); } /** * Check whether unrestricted JCE policy is installed, which will have an effect on which ciphersuites are provided (at least by the * SunJSSE provider). */ private void checkUnrestrictedJce() { try { int maxKeyLength = Cipher.getMaxAllowedKeyLength("RC5"); if (maxKeyLength != Integer.MAX_VALUE) { System.out.printf("! Unrestricted JCE policy files are not installed.%n"); System.out.printf("! Cipher suites may be limited to <= %d bit security (<= %d bit digest/mac).%n%n", maxKeyLength, (maxKeyLength * 2)); } } catch (NoSuchAlgorithmException e) { System.out.printf("Failed to check unrestricted JCE: %s%n", e.getMessage()); } } protected void init() throws KeyManagementException { try { if ((this.engine == null) || this.engine.trim().isEmpty() || this.engine.equalsIgnoreCase("default")) { this.ctx = SSLContext.getDefault(); } else { this.ctx = SSLContext.getInstance(this.engine, this.provider); this.ctx.init(null, null, null); } } catch (NoSuchProviderException e) { System.err.printf("Provider %s is not registered%n", this.provider); } catch (NoSuchAlgorithmException e) { System.err.printf("Engine %s is not registered%n", this.engine); } } private void readAndProcessInput() throws IOException, KeyManagementException, NoSuchAlgorithmException { final BufferedReader con = new BufferedReader(new InputStreamReader(System.in)); printStatus(); System.out.println("Enter a blank line to exit."); System.out.print("> "); String input = con.readLine(); while (input != null) { if (input.length() == 0) { return; } processInput(input, true); System.out.println(); System.out.print("> "); input = con.readLine(); } } private void processInput(final String input, final boolean status) throws KeyManagementException, NoSuchAlgorithmException { if ("server".equals(input)) { this.client = false; if (status) { printStatus(); } } else if ("client".equals(input)) { this.client = true; if (status) { printStatus(); } } else if (input.startsWith("proto") || "pv".equals(input)) { this.ciphers = false; if (status) { printStatus(); } } else if ("cipher".equals(input) || "cs".equals(input)) { this.ciphers = true; if (status) { printStatus(); } } else if ("consistent".equals(input)) { checkConsistency(); } else if (input.startsWith("engine")) { this.engine = input.substring("engine".length()).trim(); init(); if (status) { printStatus(); } } else if (input.startsWith("provider")) { this.provider = input.substring("provider".length()).trim(); init(); if (status) { printStatus(); } } else if (input.equals("bare")) { this.bare = true; } else if (input.equals("full")) { this.bare = false; } else { try { final ItemFilterSpecParser<?> sp = this.ciphers ? this.grok.createCipherSuiteFilterSpecParser() : this.grok .createProtocolVariantFilterSpecParser(); final ItemFilter<?> filter = sp.parse(input); final FilterResult<?> result = this.client ? filter.filter(this.ctx) : filter.filterServer(this.ctx); dump(result); } catch (final IllegalArgumentException e) { System.out.println(e.getMessage()); } } } private void printStatus() { System.out.printf("Interactive filter spec tester [%s/%s, %s, %s].%n", this.ctx.getProvider().getName(), this.ctx.getProtocol(), this.client ? "client" : "server", this.ciphers ? "cipher suite" : "protocol variant"); } public static void main(final String[] args) throws NoSuchAlgorithmException, IOException, KeyManagementException { InteractiveFilterSpecTester tst = new InteractiveFilterSpecTester(); tst.start(); boolean exit = false; for (String arg : args) { if (arg.equals("-e")) { exit = true; continue; } tst.processInput(arg, false); } if (!exit) { tst.readAndProcessInput(); } } private static void checkConsistency() { try { final Set<String> protocols = new HashSet<String>(); protocols.addAll(Arrays.asList(SSLContext.getDefault().getSupportedSSLParameters().getProtocols())); protocols.add("TLS"); protocols.add("SSL"); protocols.add("Default"); final List<Case> protocolCases = new ArrayList<Case>(); final List<Case> cipherCases = new ArrayList<Case>(); for (String proto : protocols) { try { final SSLContext ctx; if ("Default".equals(proto)) { ctx = SSLContext.getDefault(); } else { ctx = SSLContext.getInstance(proto); ctx.init(null, null, null); } ContextCases cases = getContextCases(proto, ctx); cipherCases.addAll(cases.getCipherCases()); protocolCases.addAll(cases.getProtoCases()); } catch (NoSuchAlgorithmException e) { // Continue } } ContextCases cases = getDefaultCases(); cipherCases.addAll(cases.getCipherCases()); protocolCases.addAll(cases.getProtoCases()); compare("Cipher Suites", cipherCases); System.out.println("================================================================="); System.out.println(); compare("Protocol Variants", protocolCases); } catch (Exception e) { e.printStackTrace(); } } private static class Name implements Comparable<Name> { private final String protocol; private final String name; private Name(final String protocol, final String name) { this.protocol = protocol; this.name = name; } public String getProtocol() { return this.protocol; } public String getName() { return this.name; } @Override public int compareTo(final Name o) { int proto = this.protocol.compareTo(o.protocol); return (proto == 0) ? this.name.compareTo(o.name) : proto; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((this.name == null) ? 0 : this.name.hashCode()); result = (prime * result) + ((this.protocol == null) ? 0 : this.protocol.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } Name other = (Name) obj; if (this.name == null) { if (other.name != null) { return false; } } else if (!this.name.equals(other.name)) { return false; } if (this.protocol == null) { if (other.protocol != null) { return false; } } else if (!this.protocol.equals(other.protocol)) { return false; } return true; } @Override public String toString() { return String.format("%s - %s", this.protocol, this.name); } } private static class ContextCases { private final List<Case> protoCases; private final List<Case> cipherCases; public ContextCases(final List<Case> cipherCases, final List<Case> protoCases) { this.cipherCases = cipherCases; this.protoCases = protoCases; } public List<Case> getCipherCases() { return this.cipherCases; } public List<Case> getProtoCases() { return this.protoCases; } } private static class Case { private final String[] items; private final Name name; private Case(final String protocol, final String name, final String[] items) { this.name = new Name(protocol, name); this.items = items; } public Name getName() { return this.name; } public String[] getItems() { return this.items; } } private static ContextCases getDefaultCases() throws Exception { List<Case> cipherCases = new ArrayList<Case>(); List<Case> protoCases = new ArrayList<Case>(); SSLServerSocketFactory defaultServerSocketFactory = (SSLServerSocketFactory) SSLServerSocketFactory.getDefault(); cipherCases.add(new Case("Default", "Default Ciphers from Default Server Socket Factory", defaultServerSocketFactory .getDefaultCipherSuites())); cipherCases.add(new Case("Default", "Supported Ciphers from Default Server Socket Factory ", defaultServerSocketFactory .getSupportedCipherSuites())); SSLServerSocket serverSocketFromDefault = (SSLServerSocket) defaultServerSocketFactory.createServerSocket(); cipherCases .add(new Case("Default", "Enabled Ciphers from Default Server Socket", serverSocketFromDefault.getEnabledCipherSuites())); cipherCases.add(new Case("Default", "Supported Ciphers from Default Server Socket", serverSocketFromDefault .getSupportedCipherSuites())); protoCases.add(new Case("Default", "Enabled Protocols from Default Server Socket", serverSocketFromDefault.getEnabledProtocols())); protoCases.add(new Case("Default", "Supported Protocols from Default Server Socket", serverSocketFromDefault .getSupportedProtocols())); SSLSocketFactory defaultSocketFactory = (SSLSocketFactory) SSLSocketFactory.getDefault(); cipherCases.add(new Case("Default", "Default Ciphers from Default Socket Factory", defaultSocketFactory.getDefaultCipherSuites())); cipherCases.add(new Case("Default", "Supported Ciphers from Default Socket Factory", defaultSocketFactory .getSupportedCipherSuites())); SSLSocket socketFromDefaultFactory = (SSLSocket) defaultSocketFactory.createSocket(); cipherCases.add(new Case("Default", "Enabled Ciphers from Default Socket", socketFromDefaultFactory.getEnabledCipherSuites())); cipherCases.add(new Case("Default", "Supported Ciphers from Default Socket", socketFromDefaultFactory.getSupportedCipherSuites())); cipherCases.add(new Case("Default", "Enabled Ciphers from Default Socket Parameters", socketFromDefaultFactory.getSSLParameters() .getCipherSuites())); protoCases.add(new Case("Default", "Enabled Protocols from Default Socket", socketFromDefaultFactory.getEnabledProtocols())); protoCases.add(new Case("Default", "Supported Protocols from Default Socket", socketFromDefaultFactory.getEnabledProtocols())); protoCases.add(new Case("Default", "Enabled Protocols from Default Socket Parameters", socketFromDefaultFactory .getEnabledProtocols())); return new ContextCases(cipherCases, protoCases); } private static ContextCases getContextCases(final String protocol, final SSLContext ctx) throws Exception { List<Case> cc = new ArrayList<Case>(); List<Case> pc = new ArrayList<Case>(); SSLEngine serverEngine = ctx.createSSLEngine(); serverEngine.setUseClientMode(false); SSLEngine clientEngine = ctx.createSSLEngine(); clientEngine.setUseClientMode(true); SSLServerSocketFactory serverSocketFactoryFromContext = ctx.getServerSocketFactory(); SSLSocketFactory socketFactoryFromContext = ctx.getSocketFactory(); SSLParameters defaultParamsFromContext = ctx.getDefaultSSLParameters(); SSLParameters supportedParamsFromContext = ctx.getSupportedSSLParameters(); cc.add(new Case(protocol, "Enabled Ciphers from Server Engine", serverEngine.getEnabledCipherSuites())); cc.add(new Case(protocol, "Supported Ciphers from Server Engine", serverEngine.getSupportedCipherSuites())); cc.add(new Case(protocol, "Enabled Ciphers from Server Engine Parameters", serverEngine.getSSLParameters().getCipherSuites())); pc.add(new Case(protocol, "Enabled Protocols from Server Engine", serverEngine.getEnabledProtocols())); pc.add(new Case(protocol, "Supported Protocols from Server Engine", serverEngine.getSupportedProtocols())); pc.add(new Case(protocol, "Enabled Protocols from Server Engine Parameters", serverEngine.getSSLParameters().getProtocols())); cc.add(new Case(protocol, "Enabled Ciphers from Client Engine", clientEngine.getEnabledCipherSuites())); cc.add(new Case(protocol, "Supported Ciphers from Client Engine", clientEngine.getSupportedCipherSuites())); cc.add(new Case(protocol, "Enabled Ciphers from Client Engine Parameters", clientEngine.getSSLParameters().getCipherSuites())); pc.add(new Case(protocol, "Enabled Protocols from Client Engine", clientEngine.getEnabledProtocols())); pc.add(new Case(protocol, "Supported Protocols from Client Engine", clientEngine.getSupportedProtocols())); pc.add(new Case(protocol, "Enabled Protocols from Client Engine Parameters", clientEngine.getSSLParameters().getProtocols())); cc.add(new Case(protocol, "Default Ciphers from Context Parameters", defaultParamsFromContext.getCipherSuites())); cc.add(new Case(protocol, "Supported Ciphers from Context Parameters", supportedParamsFromContext.getCipherSuites())); pc.add(new Case(protocol, "Default Protocols from Context", defaultParamsFromContext.getProtocols())); pc.add(new Case(protocol, "Supported Protocols from Context", supportedParamsFromContext.getProtocols())); cc.add(new Case(protocol, "Default Ciphers from Context Server Socket Factory", serverSocketFactoryFromContext .getDefaultCipherSuites())); cc.add(new Case(protocol, "Supported Ciphers from Context Server Socket Factory", serverSocketFactoryFromContext .getSupportedCipherSuites())); SSLServerSocket serverSocketFromContext = (SSLServerSocket) serverSocketFactoryFromContext.createServerSocket(); cc.add(new Case(protocol, "Enabled Ciphers from Context Server Socket", serverSocketFromContext.getEnabledCipherSuites())); cc.add(new Case(protocol, "Supported Ciphers from Context Server Socket", serverSocketFromContext.getSupportedCipherSuites())); cc.add(new Case(protocol, "Default Ciphers from Context Socket Factory", socketFactoryFromContext.getDefaultCipherSuites())); cc.add(new Case(protocol, "Supported Ciphers from Context Socket Factory", socketFactoryFromContext.getSupportedCipherSuites())); pc.add(new Case(protocol, "Enabled Protocols from Context Server Socket", serverSocketFromContext.getEnabledProtocols())); pc.add(new Case(protocol, "Supported Protocols from Context Server Socket", serverSocketFromContext.getSupportedProtocols())); SSLSocket socketFromContextFactory = (SSLSocket) socketFactoryFromContext.createSocket(); cc.add(new Case(protocol, "Enabled Ciphers from Context Socket", socketFromContextFactory.getEnabledCipherSuites())); cc.add(new Case(protocol, "Supported Ciphers from Context Socket", socketFromContextFactory.getSupportedCipherSuites())); cc.add(new Case(protocol, "Enabled Ciphers from Context Socket Parameters", socketFromContextFactory.getSSLParameters() .getCipherSuites())); pc.add(new Case(protocol, "Enabled Protocols from Context Socket", socketFromContextFactory.getEnabledProtocols())); pc.add(new Case(protocol, "Supported Protocols from Context Socket", socketFromContextFactory.getSupportedProtocols())); pc.add(new Case(protocol, "Enabled Protocols from Context Socket Parameters", socketFromContextFactory.getSSLParameters() .getProtocols())); return new ContextCases(cc, pc); } private static void compare(final String test, final List<Case> items) { final Set<String> superset = new TreeSet<String>(); TreeSet<Name> uniqueNames = new TreeSet<Name>(); for (Case item : items) { superset.addAll(Arrays.asList(item.getItems())); uniqueNames.add(item.getName()); } final Set<String> commonSet = new TreeSet<String>(superset); for (Case item : items) { commonSet.retainAll(Arrays.asList(item.getItems())); } if (uniqueNames.size() != items.size()) { System.err.println("Duplicate names"); } Map<Set<String>, Set<String>> groupedNames = groupByProtocol(uniqueNames); System.out.printf("Comparing %s (%d) Locations:%n", test, sum(groupedNames.values())); for (Entry<Set<String>, Set<String>> item : groupedNames.entrySet()) { System.out.printf(" %s%n", item.getKey()); for (String name : item.getValue()) { System.out.printf(" %s%n", name); } } System.out.println(); System.out.printf("Shared subset of items (%d):%n", commonSet.size()); for (String item : commonSet) { System.out.println(" " + item); } Set<String> totalOverhang = new TreeSet<String>(superset); totalOverhang.removeAll(commonSet); System.out.println(); System.out.printf("Total overhang (%d):%n", totalOverhang.size()); for (String item : totalOverhang) { System.out.println(" " + item); } System.out.println(); final Map<Set<String>, Set<Name>> consistents = new HashMap<Set<String>, Set<Name>>(); // System.out.println("Individual details:"); for (Case item : items) { Set<String> itemSet = new TreeSet<String>(Arrays.asList(item.getItems())); Set<Name> consistentWith = consistents.get(itemSet); if (consistentWith == null) { consistentWith = new TreeSet<Name>(); consistents.put(itemSet, consistentWith); } consistentWith.add(item.getName()); } System.out.println(); System.out.printf("Consistent subsets (%d):%n", consistents.size()); for (Entry<Set<String>, Set<Name>> consistentSubset : consistents.entrySet()) { Set<String> itemSet = consistentSubset.getKey(); Set<Name> itemNames = consistentSubset.getValue(); Map<Set<String>, Set<String>> namesByProtocol = groupByProtocol(itemNames); System.out.printf(" Members (%d):%n", sum(namesByProtocol.values())); for (Entry<Set<String>, Set<String>> entry : namesByProtocol.entrySet()) { System.out.printf(" %s%n", entry.getKey()); for (String name : entry.getValue()) { System.out.printf(" %s%n", name); } } System.out.printf(" Items (%d):%n", itemSet.size()); for (String item : itemSet) { System.out.println(" " + item); } Set<String> underhang = new TreeSet<String>(superset); underhang.removeAll(itemSet); Set<String> overhang = new TreeSet<String>(itemSet); overhang.removeAll(commonSet); System.out.printf(" Underhang (%d):%n", underhang.size()); for (String item : underhang) { System.out.println(" " + item); } System.out.printf(" Overhang (%d):%n", overhang.size()); for (String item : overhang) { System.out.println(" " + item); } System.out.println(); } } private static int sum(final Collection<Set<String>> values) { int count = 0; for (Set<String> set : values) { count += set.size(); } return count; } private static Map<Set<String>, Set<String>> groupByProtocol(final Set<Name> itemNames) { final Map<String, Set<String>> protosByName = new TreeMap<String, Set<String>>(); for (Name name : itemNames) { Set<String> names = protosByName.get(name.getName()); if (names == null) { names = new TreeSet<String>(); protosByName.put(name.getName(), names); } names.add(name.getProtocol()); } Map<Set<String>, Set<String>> namesByProtoGroup = new TreeMap<Set<String>, Set<String>>(new Comparator<Set<String>>() { @Override public int compare(final Set<String> o1, final Set<String> o2) { return o1.toString().compareTo(o2.toString()); } }); for (Entry<String, Set<String>> entry : protosByName.entrySet()) { Set<String> protos = entry.getValue(); String name = entry.getKey(); Set<String> namesForProtos = namesByProtoGroup.get(protos); if (namesForProtos == null) { namesForProtos = new TreeSet<String>(); namesByProtoGroup.put(protos, namesForProtos); } namesForProtos.add(name); } return namesByProtoGroup; } @SuppressWarnings("unchecked") private void dump(final FilterResult<?> result) { System.out.printf("%d matches, %d excluded, %d blacklisted. %n", result.getIncluded().size(), result.getExcluded().size(), result .getBlacklisted().size()); if (result.getIncluded().isEmpty()) { return; } if (this.ciphers) { dumpCipherSuites((FilterResult<CipherSuite>) result, this.bare); } else { dumpProtocolVariants((FilterResult<ProtocolVariant>) result, this.bare); } } private static void dumpProtocolVariants(final FilterResult<ProtocolVariant> result, final boolean bare) { if (bare) { Iterator<ProtocolVariant> pvs = result.getIncluded().iterator(); StringBuilder flat = new StringBuilder(); while(pvs.hasNext()) { ProtocolVariant pv = pvs.next(); System.out.println(pv.getName()); flat.append(pv.getName()); if (pvs.hasNext()) { flat.append(","); } } System.out.println(flat); } else { System.out.printf("%-20s %-8s %-5s %-3s %-6s %s%n", "Variant", "Family", "Major", "Minor", "Pseudo", "Unsafe"); for (final ProtocolVariant c : result.getIncluded()) { System.out.printf(PV_OUTPUT_FORMAT, c.getName(), c.getFamily(), c.getMajorVersion(), c.getMinorVersion(), c.getPseudoProtocol() == null ? "" : c.getPseudoProtocol(), ProtocolVariantFilters.isSafe(c) ? "" : "*"); } } } private static void dumpCipherSuites(final FilterResult<CipherSuite> result, final boolean bare) { if (bare) { Iterator<CipherSuite> cs = result.getIncluded().iterator(); StringBuilder flat = new StringBuilder(); while (cs.hasNext()) { CipherSuite c = cs.next(); System.out.println(c.getName()); flat.append(c.getName()); if (cs.hasNext()) { flat.append(","); } } System.out.println(); System.out.println(flat); } else { System.out.printf("%-40s %-8s %-8s %-10s %-4s %3s %-3s %-6s %4s %s%n", "Cipher", "Kx", "Au", "Enc", "Mode", "Key", "Str", "Mac", "Size", "Unsafe"); for (final CipherSuite c : result.getIncluded()) { if (c.isSignalling()) { System.out.println(c.getName()); } else { System.out.printf(CS_OUTPUT_FORMAT, c.getName(), c.getKeyExchange().getKeyAgreementAlgo(), c.getKeyExchange() .getAuthenticationAlgo(), c.getCipher().getAlgorithm(), c.getCipher().getMode() == null ? "" : c.getCipher() .getMode(), c.getCipher().getKeySize(), c.getCipher().getStrength(), c.getMac().getAlgorithm(), c.getMac() .getSize(), CipherSuiteFilters.isSafe(c) ? "" : "*"); } } } } }
/* * Copyright 2000-2010 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.roots.ui.configuration; import com.intellij.openapi.Disposable; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.MessageType; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.IconLoader; import com.intellij.openapi.wm.impl.content.GraphicsConfig; import com.intellij.ui.components.JBList; import com.intellij.ui.components.JBScrollPane; import com.intellij.ui.components.labels.LinkLabel; import com.intellij.ui.components.labels.LinkListener; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.messages.MessageBusConnection; import com.intellij.util.ui.BaseButtonBehavior; import com.intellij.util.ui.TimedDeadzone; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.ListDataEvent; import javax.swing.event.ListDataListener; import java.awt.*; import java.awt.event.ComponentAdapter; import java.awt.event.ComponentEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.geom.RoundRectangle2D; import java.util.ArrayList; import java.util.List; /** * User: spLeaner */ public class ConfigurationErrorsComponent extends JPanel implements Disposable, ListDataListener { private static final boolean ONE_LINE = true; private static final boolean MULTI_LINE = false; private static final Icon EXPAND = IconLoader.getIcon("/actions/expandall.png"); private static final Icon COLLAPSE = IconLoader.getIcon("/actions/collapseall.png"); private static final Icon FIX = IconLoader.findIcon("/actions/quickfixBulb.png"); private static final Icon IGNORE = IconLoader.findIcon("/toolbar/unknown.png"); private static final Icon NAVIGATE = IconLoader.findIcon("/general/autoscrollToSource.png"); @NonNls private static final String FIX_ACTION_NAME = "FIX"; @NonNls private static final String NAVIGATE_ACTION_NAME = "NAVIGATE"; private ConfigurationErrorsListModel myConfigurationErrorsListModel; private ErrorView myCurrentView; private OneLineErrorComponent myOneLineErrorComponent; private MultiLineErrorComponent myMultiLineErrorComponent; public ConfigurationErrorsComponent(@NotNull final Project project) { setLayout(new BorderLayout()); myConfigurationErrorsListModel = new ConfigurationErrorsListModel(project); myConfigurationErrorsListModel.addListDataListener(this); addComponentListener(new ComponentAdapter() { @Override public void componentResized(final ComponentEvent e) { revalidate(); repaint(); } }); ensureCurrentViewIs(ONE_LINE); Disposer.register(this, myConfigurationErrorsListModel); } @Override public void dispose() { if (myConfigurationErrorsListModel != null) { myConfigurationErrorsListModel.removeListDataListener(this); myConfigurationErrorsListModel = null; } } private void ensureCurrentViewIs(final boolean oneLine) { if (oneLine) { if (myCurrentView instanceof OneLineErrorComponent) return; if (myOneLineErrorComponent == null) { myOneLineErrorComponent = new OneLineErrorComponent(myConfigurationErrorsListModel) { @Override public void onViewChange() { ensureCurrentViewIs(MULTI_LINE); } }; } if (myCurrentView != null) { remove(myCurrentView.self()); } myCurrentView = myOneLineErrorComponent; } else { if (myCurrentView instanceof MultiLineErrorComponent) return; if (myMultiLineErrorComponent == null) { myMultiLineErrorComponent = new MultiLineErrorComponent(myConfigurationErrorsListModel) { @Override public void onViewChange() { ensureCurrentViewIs(ONE_LINE); } }; } if (myCurrentView != null) { remove(myCurrentView.self()); } myCurrentView = myMultiLineErrorComponent; } add(myCurrentView.self(), BorderLayout.CENTER); myCurrentView.updateView(); revalidate(); repaint(); } @Override public void intervalAdded(final ListDataEvent e) { updateCurrentView(); } @Override public void intervalRemoved(final ListDataEvent e) { updateCurrentView(); } @Override public void contentsChanged(final ListDataEvent e) { updateCurrentView(); } private void updateCurrentView() { if (myCurrentView instanceof MultiLineErrorComponent && myConfigurationErrorsListModel.getSize() == 0) { ensureCurrentViewIs(ONE_LINE); } myCurrentView.updateView(); } private interface ErrorView { void updateView(); void onViewChange(); JComponent self(); } private abstract static class MultiLineErrorComponent extends JPanel implements ErrorView { private ConfigurationErrorsListModel myModel; private JList myList = new JBList(); protected MultiLineErrorComponent(@NotNull final ConfigurationErrorsListModel model) { setLayout(new BorderLayout()); setBorder(BorderFactory.createEmptyBorder(5, 0, 5, 0)); myModel = model; myList.setModel(model); myList.setCellRenderer(new ErrorListRenderer(myList)); myList.setBackground(UIUtil.getPanelBackground()); myList.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(final MouseEvent e) { if (!e.isPopupTrigger()) { processListMouseEvent(e, true); } } }); myList.addComponentListener(new ComponentAdapter() { @Override public void componentResized(ComponentEvent e) { myList.setCellRenderer(new ErrorListRenderer(myList)); // request cell renderer size invalidation updatePreferredSize(); } }); add(new JBScrollPane(myList), BorderLayout.CENTER); add(buildToolbar(), BorderLayout.WEST); } private void processListMouseEvent(final MouseEvent e, final boolean click) { final int index = myList.locationToIndex(e.getPoint()); if (index > -1) { final Object value = myList.getModel().getElementAt(index); if (value != null && value instanceof ConfigurationError) { final ConfigurationError error = (ConfigurationError)value; final Component renderer = myList.getCellRenderer().getListCellRendererComponent(myList, value, index, false, false); if (renderer instanceof ErrorListRenderer) { final Rectangle bounds = myList.getCellBounds(index, index); renderer.setBounds(bounds); renderer.doLayout(); final Point point = e.getPoint(); point.translate(-bounds.x, -bounds.y); final Component deepestComponentAt = SwingUtilities.getDeepestComponentAt(renderer, point.x, point.y); if (deepestComponentAt instanceof ToolbarAlikeButton) { final String name = ((ToolbarAlikeButton)deepestComponentAt).getButtonName(); if (FIX_ACTION_NAME.equals(name)) { onClickFix(error, (JComponent)deepestComponentAt); } else if (NAVIGATE_ACTION_NAME.equals(name)) { error.navigate(); } else { onClickIgnore(error); } } } } } } private void onClickIgnore(@NotNull final ConfigurationError error) { error.ignore(!error.isIgnored()); final ListModel model = myList.getModel(); if (model instanceof ConfigurationErrorsListModel) { ((ConfigurationErrorsListModel)model).update(error); } } private void onClickFix(@NotNull final ConfigurationError error, JComponent component) { error.fix(component); } @Override public void addNotify() { super.addNotify(); updatePreferredSize(); } private void updatePreferredSize() { final Window window = SwingUtilities.getWindowAncestor(this); if (window != null) { final Dimension d = window.getSize(); final Dimension preferredSize = getPreferredSize(); setPreferredSize(new Dimension(preferredSize.width, d.height / 4)); setMinimumSize(new Dimension(preferredSize.width, 100)); } } private JComponent buildToolbar() { final JPanel result = new JPanel(); result.setBorder(BorderFactory.createEmptyBorder(5, 0, 0, 0)); result.setLayout(new BorderLayout()); result.add(new ToolbarAlikeButton(COLLAPSE) { { setToolTipText("Collapse"); } @Override public void onClick() { onViewChange(); } }, BorderLayout.NORTH); return result; } @Override public void updateView() { } @Override public JComponent self() { return this; } public abstract void onViewChange(); } private abstract static class ToolbarAlikeButton extends JComponent { private BaseButtonBehavior myBehavior; private Icon myIcon; private String myName; private ToolbarAlikeButton(@NotNull final Icon icon, @NotNull final String name) { this(icon); myName = name; } private ToolbarAlikeButton(@NotNull final Icon icon) { myIcon = icon; myBehavior = new BaseButtonBehavior(this, TimedDeadzone.NULL) { @Override protected void execute(MouseEvent e) { onClick(); } }; setOpaque(false); } public String getButtonName() { return myName; } public void onClick() {} @Override public Insets getInsets() { return new Insets(2, 2, 2, 2); } @Override public Dimension getPreferredSize() { return getMinimumSize(); } @Override public Dimension getMinimumSize() { final Insets insets = getInsets(); return new Dimension(myIcon.getIconWidth() + insets.left + insets.right, myIcon.getIconHeight() + insets.top + insets.bottom); } @Override public void paint(final Graphics g) { final Insets insets = getInsets(); final Dimension d = getSize(); int x = (d.width - myIcon.getIconWidth() - insets.left - insets.right) / 2; int y = (d.height - myIcon.getIconHeight() - insets.top - insets.bottom) / 2; if (myBehavior.isHovered()) { // todo } if (myBehavior.isPressedByMouse()) { x += 1; y += 1; } myIcon.paintIcon(this, g, x + insets.left, y + insets.top); } } private static class ErrorListRenderer extends JComponent implements ListCellRenderer { private boolean mySelected; private boolean myHasFocus; private JTextPane myText; private JTextPane myFakeTextPane; private JViewport myFakeViewport; private JList myList; private JPanel myButtonsPanel; private JPanel myFixGroup; private ErrorListRenderer(@NotNull final JList list) { setLayout(new BorderLayout()); setBorder(BorderFactory.createEmptyBorder(2, 2, 2, 2)); setOpaque(false); myList = list; myText = new JTextPane() { @Override public void setCursor(Cursor cursor) { super.setCursor(cursor); //onCursorChanged(cursor); } }; myButtonsPanel = new JPanel(new BorderLayout()); myButtonsPanel.setBorder(BorderFactory.createEmptyBorder(5, 3, 5, 3)); myButtonsPanel.setOpaque(false); final JPanel buttons = new JPanel(); buttons.setOpaque(false); buttons.setLayout(new BoxLayout(buttons, BoxLayout.X_AXIS)); myButtonsPanel.add(buttons, BorderLayout.NORTH); add(myButtonsPanel, BorderLayout.EAST); myFixGroup = new JPanel(); myFixGroup.setOpaque(false); myFixGroup.setLayout(new BoxLayout(myFixGroup, BoxLayout.Y_AXIS)); myFixGroup.add(new ToolbarAlikeButton(FIX, FIX_ACTION_NAME) {}); myFixGroup.add(Box.createHorizontalStrut(3)); buttons.add(myFixGroup); buttons.add(new ToolbarAlikeButton(NAVIGATE, NAVIGATE_ACTION_NAME) {}); buttons.add(Box.createHorizontalStrut(3)); buttons.add(new ToolbarAlikeButton(IGNORE, "IGNORE") {}); myFakeTextPane = new JTextPane(); myText.setBorder(BorderFactory.createEmptyBorder(3, 3, 3, 3)); myFakeTextPane.setBorder(BorderFactory.createEmptyBorder(3, 0, 3, 0)); myText.setOpaque(false); if (UIUtil.isUnderNimbusLookAndFeel()) { myText.setBackground(new Color(0, 0, 0, 0)); } myText.setEditable(false); myFakeTextPane.setEditable(false); myText.setEditorKit(UIUtil.getHTMLEditorKit()); myFakeTextPane.setEditorKit(UIUtil.getHTMLEditorKit()); myFakeViewport = new JViewport(); myFakeViewport.setView(myFakeTextPane); add(myText, BorderLayout.CENTER); } @Override public Dimension getPreferredSize() { final Container parent = myList.getParent(); if (parent != null) { myFakeTextPane.setText(myText.getText()); final Dimension size = parent.getSize(); myFakeViewport.setSize(size); final Dimension preferredSize = myFakeTextPane.getPreferredSize(); final Dimension buttonsPrefSize = myButtonsPanel.getPreferredSize(); final int maxHeight = Math.max(buttonsPrefSize.height, preferredSize.height); final Insets insets = getInsets(); return new Dimension(Math.min(size.width - 20, preferredSize.width), maxHeight + insets.top + insets.bottom); } return super.getPreferredSize(); } @Override public Component getListCellRendererComponent(final JList list, final Object value, final int index, final boolean isSelected, final boolean cellHasFocus) { final ConfigurationError error = (ConfigurationError)value; myList = list; mySelected = isSelected; myHasFocus = cellHasFocus; myFixGroup.setVisible(error.canBeFixed()); myText.setText(error.getDescription()); setBackground(error.isIgnored() ? MessageType.WARNING.getPopupBackground() : MessageType.ERROR.getPopupBackground()); return this; } @Override protected void paintComponent(Graphics g) { final Graphics2D g2d = (Graphics2D)g; final Rectangle bounds = getBounds(); final Insets insets = getInsets(); final GraphicsConfig cfg = new GraphicsConfig(g); cfg.setAntialiasing(true); final Shape shape = new RoundRectangle2D.Double(insets.left, insets.top, bounds.width - 1 - insets.left - insets.right, bounds.height - 1 - insets.top - insets.bottom, 6, 6); if (mySelected) { g2d.setColor(UIUtil.getListSelectionBackground()); g2d.fillRect(0, 0, bounds.width, bounds.height); } g2d.setColor(Color.WHITE); g2d.fill(shape); Color bgColor = getBackground(); g2d.setColor(bgColor); g2d.fill(shape); g2d.setColor(myHasFocus || mySelected ? getBackground().darker().darker() : getBackground().darker()); g2d.draw(shape); cfg.restore(); super.paintComponent(g); } } private abstract static class OneLineErrorComponent extends JComponent implements ErrorView, LinkListener { private LinkLabel myErrorsLabel = new LinkLabel(null, null); private LinkLabel myIgnoredErrorsLabel = new LinkLabel(null, null); private JLabel mySingleErrorLabel = new JLabel(); private ConfigurationErrorsListModel myModel; private OneLineErrorComponent(@NotNull final ConfigurationErrorsListModel model) { myModel = model; setLayout(new BorderLayout()); setOpaque(true); updateLabel(myErrorsLabel, MessageType.ERROR.getPopupBackground(), this); updateLabel(mySingleErrorLabel, MessageType.ERROR.getPopupBackground(), null); updateLabel(myIgnoredErrorsLabel, MessageType.WARNING.getPopupBackground(), this); } private static void updateLabel(@NotNull final JLabel label, @NotNull final Color bgColor, @Nullable final LinkListener listener) { label.setBorder(BorderFactory.createEmptyBorder(3, 5, 3, 5)); label.setOpaque(true); label.setBackground(bgColor); if (label instanceof LinkLabel) { ((LinkLabel)label).setListener(listener, null); } } @Override public void updateView() { if (myModel.getSize() == 0) { setBorder(null); } else { if (getBorder() == null) setBorder( BorderFactory.createCompoundBorder(BorderFactory.createMatteBorder(5, 0, 5, 0, UIUtil.getPanelBackground()), BorderFactory.createLineBorder(UIUtil.getPanelBackground().darker()))); } final List<ConfigurationError> errors = myModel.getErrors(); if (errors.size() > 0) { if (errors.size() == 1) { mySingleErrorLabel.setText(myModel.getErrors().get(0).getPlainTextTitle()); } else { myErrorsLabel.setText(String.format("%s errors found", errors.size())); } } final List<ConfigurationError> ignoredErrors = myModel.getIgnoredErrors(); if (ignoredErrors.size() > 0) { myIgnoredErrorsLabel.setText(String.format("%s ignored error%s", ignoredErrors.size(), ignoredErrors.size() == 1 ? "" : "s")); } removeAll(); if (errors.size() > 0) { if (errors.size() == 1) { add(wrapLabel(mySingleErrorLabel, errors.get(0)), BorderLayout.CENTER); mySingleErrorLabel.setToolTipText(errors.get(0).getDescription()); } else { add(myErrorsLabel, BorderLayout.CENTER); } } if (ignoredErrors.size() > 0) { add(myIgnoredErrorsLabel, errors.size() > 0 ? BorderLayout.EAST : BorderLayout.CENTER); } revalidate(); repaint(); } private JComponent wrapLabel(@NotNull final JLabel label, @NotNull final ConfigurationError configurationError) { final JPanel result = new JPanel(new BorderLayout()); result.setBackground(label.getBackground()); result.add(label, BorderLayout.CENTER); final JPanel buttonsPanel = new JPanel(); buttonsPanel.setOpaque(false); buttonsPanel.setLayout(new BoxLayout(buttonsPanel, BoxLayout.X_AXIS)); if (configurationError.canBeFixed()) { buttonsPanel.add(new ToolbarAlikeButton(FIX) { { setToolTipText("Fix error"); } @Override public void onClick() { final Object o = myModel.getElementAt(0); if (o instanceof ConfigurationError) { ((ConfigurationError)o).fix(this); updateView(); final Container ancestor = SwingUtilities.getAncestorOfClass(ConfigurationErrorsComponent.class, this); if (ancestor != null && ancestor instanceof JComponent) { ((JComponent)ancestor).revalidate(); ancestor.repaint(); } } } }); buttonsPanel.add(Box.createHorizontalStrut(3)); } buttonsPanel.add(new ToolbarAlikeButton(NAVIGATE) { { setToolTipText("Navigate to error"); } @Override public void onClick() { final Object o = myModel.getElementAt(0); if (o instanceof ConfigurationError) { ((ConfigurationError)o).navigate(); } } }); buttonsPanel.add(Box.createHorizontalStrut(3)); buttonsPanel.add(new ToolbarAlikeButton(IGNORE) { { setToolTipText("Ignore error"); } @Override public void onClick() { final Object o = myModel.getElementAt(0); if (o instanceof ConfigurationError) { ((ConfigurationError)o).ignore(!((ConfigurationError)o).isIgnored()); updateView(); } } }); buttonsPanel.add(Box.createHorizontalStrut(5)); result.add(buttonsPanel, BorderLayout.EAST); return result; } @Override public JComponent self() { return this; } public abstract void onViewChange(); @Override public void linkSelected(LinkLabel aSource, Object aLinkData) { onViewChange(); } } private static class ConfigurationErrorsListModel extends AbstractListModel implements ConfigurationErrors, Disposable { private MessageBusConnection myConnection; private List<ConfigurationError> myErrorsList = new ArrayList<ConfigurationError>(); private ConfigurationErrorsListModel(@NotNull final Project project) { myConnection = project.getMessageBus().connect(); myConnection.subscribe(TOPIC, this); } @Override public int getSize() { return myErrorsList.size(); } @Override public Object getElementAt(int index) { return myErrorsList.get(index); } @Override public void addError(@NotNull ConfigurationError error) { if (!myErrorsList.contains(error)) { int ndx = 0; if (error.isIgnored()) { ndx = myErrorsList.size(); } myErrorsList.add(ndx, error); fireIntervalAdded(this, ndx, ndx); } } @Override public void removeError(@NotNull ConfigurationError error) { if (myErrorsList.contains(error)) { final int ndx = myErrorsList.indexOf(error); myErrorsList.remove(ndx); fireIntervalRemoved(this, ndx, ndx); } } public List<ConfigurationError> getErrors() { return ContainerUtil.filter(myErrorsList, new Condition<ConfigurationError>() { @Override public boolean value(final ConfigurationError error) { return !error.isIgnored(); } }); } public List<ConfigurationError> getIgnoredErrors() { return ContainerUtil.filter(myErrorsList, new Condition<ConfigurationError>() { @Override public boolean value(final ConfigurationError error) { return error.isIgnored(); } }); } @Override public void dispose() { if (myConnection != null) { myConnection.disconnect(); myConnection = null; } } public void update(final ConfigurationError error) { final int ndx = myErrorsList.indexOf(error); if (ndx >= 0) { fireContentsChanged(this, ndx, ndx); } } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.connector.jmx; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.ColumnMetadata; import com.facebook.presto.spi.ColumnType; import com.facebook.presto.spi.ConnectorMetadata; import com.facebook.presto.spi.ConnectorTableMetadata; import com.facebook.presto.spi.SchemaTableName; import com.facebook.presto.spi.SchemaTablePrefix; import com.facebook.presto.spi.TableHandle; import com.google.common.base.Function; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList.Builder; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import javax.inject.Inject; import javax.management.JMException; import javax.management.MBeanAttributeInfo; import javax.management.MBeanInfo; import javax.management.MBeanServer; import javax.management.ObjectName; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static javax.management.ObjectName.WILDCARD; public class JmxMetadata implements ConnectorMetadata { public static final String SCHEMA_NAME = "jmx"; private final String connectorId; private final MBeanServer mbeanServer; @Inject public JmxMetadata(JmxConnectorId jmxConnectorId, MBeanServer mbeanServer) { this.connectorId = checkNotNull(jmxConnectorId, "jmxConnectorId is null").toString(); this.mbeanServer = checkNotNull(mbeanServer, "mbeanServer is null"); } @Override public boolean canHandle(TableHandle tableHandle) { return tableHandle instanceof JmxTableHandle && ((JmxTableHandle) tableHandle).getConnectorId().equals(connectorId); } @Override public List<String> listSchemaNames() { return ImmutableList.of(SCHEMA_NAME); } @Override public JmxTableHandle getTableHandle(SchemaTableName tableName) { checkNotNull(tableName, "tableName is null"); if (!tableName.getSchemaName().equals(SCHEMA_NAME)) { return null; } try { ObjectName objectName = Iterables.find(mbeanServer.queryNames(WILDCARD, null), objectNameEqualsIgnoreCase(new ObjectName(tableName.getTableName()))); MBeanInfo mbeanInfo = mbeanServer.getMBeanInfo(objectName); ImmutableList.Builder<JmxColumnHandle> columns = ImmutableList.builder(); int ordinalPosition = 0; columns.add(new JmxColumnHandle(connectorId, "node", ColumnType.STRING, ordinalPosition++)); for (MBeanAttributeInfo attribute : mbeanInfo.getAttributes()) { if (!attribute.isReadable()) { continue; } columns.add(new JmxColumnHandle(connectorId, attribute.getName(), getColumnType(attribute), ordinalPosition++)); } return new JmxTableHandle(connectorId, objectName.toString(), columns.build()); } catch (NoSuchElementException | JMException e) { return null; } } @Override public ConnectorTableMetadata getTableMetadata(TableHandle tableHandle) { checkNotNull(tableHandle, "tableHandle is null"); checkArgument(tableHandle instanceof JmxTableHandle, "tableHandle is not an instance of JmxTableHandle"); JmxTableHandle jmxTableHandle = (JmxTableHandle) tableHandle; return jmxTableHandle.getTableMetadata(); } @Override public List<SchemaTableName> listTables(String schemaNameOrNull) { if (schemaNameOrNull != null && !schemaNameOrNull.equals(SCHEMA_NAME)) { return ImmutableList.of(); } Builder<SchemaTableName> tableNames = ImmutableList.builder(); for (ObjectName objectName : mbeanServer.queryNames(WILDCARD, null)) { // todo remove lower case when presto supports mixed case names tableNames.add(new SchemaTableName(SCHEMA_NAME, objectName.toString().toLowerCase())); } return tableNames.build(); } @Override public ColumnHandle getColumnHandle(TableHandle tableHandle, String columnName) { checkNotNull(tableHandle, "tableHandle is null"); checkArgument(tableHandle instanceof JmxTableHandle, "tableHandle is not an instance of JmxTableHandle"); JmxTableHandle jmxTableHandle = (JmxTableHandle) tableHandle; for (JmxColumnHandle jmxColumnHandle : jmxTableHandle.getColumns()) { if (jmxColumnHandle.getColumnName().equalsIgnoreCase(columnName)) { return jmxColumnHandle; } } return null; } @Override public Map<String, ColumnHandle> getColumnHandles(TableHandle tableHandle) { checkNotNull(tableHandle, "tableHandle is null"); checkArgument(tableHandle instanceof JmxTableHandle, "tableHandle is not an instance of JmxTableHandle"); JmxTableHandle jmxTableHandle = (JmxTableHandle) tableHandle; return ImmutableMap.<String, ColumnHandle>copyOf(Maps.uniqueIndex(jmxTableHandle.getColumns(), new Function<JmxColumnHandle, String>() { @Override public String apply(JmxColumnHandle input) { return input.getColumnName().toLowerCase(); } })); } @Override public ColumnMetadata getColumnMetadata(TableHandle tableHandle, ColumnHandle columnHandle) { checkNotNull(tableHandle, "tableHandle is null"); checkArgument(tableHandle instanceof JmxTableHandle, "tableHandle is not an instance of JmxTableHandle"); checkNotNull(columnHandle, "columnHandle is null"); checkArgument(columnHandle instanceof JmxColumnHandle, "columnHandle is not an instance of JmxColumnHandle"); JmxColumnHandle jmxColumnHandle = (JmxColumnHandle) columnHandle; return jmxColumnHandle.getColumnMetadata(); } @Override public Map<SchemaTableName, List<ColumnMetadata>> listTableColumns(SchemaTablePrefix prefix) { checkNotNull(prefix, "prefix is null"); if (prefix.getSchemaName() != null && !prefix.getSchemaName().equals(SCHEMA_NAME)) { return ImmutableMap.of(); } ImmutableMap.Builder<SchemaTableName, List<ColumnMetadata>> columns = ImmutableMap.builder(); List<SchemaTableName> tableNames; if (prefix.getTableName() == null) { tableNames = listTables(prefix.getSchemaName()); } else { tableNames = ImmutableList.of(new SchemaTableName(prefix.getSchemaName(), prefix.getTableName())); } for (SchemaTableName tableName : tableNames) { JmxTableHandle tableHandle = getTableHandle(tableName); columns.put(tableName, tableHandle.getTableMetadata().getColumns()); } return columns.build(); } @Override public TableHandle createTable(ConnectorTableMetadata tableMetadata) { throw new UnsupportedOperationException(); } @Override public void dropTable(TableHandle tableHandle) { throw new UnsupportedOperationException(); } private ColumnType getColumnType(MBeanAttributeInfo attribute) { ColumnType columnType; switch (attribute.getType()) { case "boolean": case "java.lang.Boolean": columnType = ColumnType.BOOLEAN; break; case "byte": case "java.lang.Byte": case "short": case "java.lang.Short": case "int": case "java.lang.Integer": case "long": case "java.lang.Long": columnType = ColumnType.LONG; break; case "java.lang.Number": case "float": case "java.lang.Float": case "double": case "java.lang.Double": columnType = ColumnType.DOUBLE; break; default: columnType = ColumnType.STRING; break; } return columnType; } private Predicate<ObjectName> objectNameEqualsIgnoreCase(ObjectName objectName) { final String canonicalObjectName = objectName.getCanonicalName(); return new Predicate<ObjectName>() { @Override public boolean apply(ObjectName input) { return canonicalObjectName.equalsIgnoreCase(input.getCanonicalName()); } }; } }
package org.broadinstitute.hellbender.tools.walkers; import com.google.common.annotations.VisibleForTesting; import htsjdk.samtools.util.Locatable; import htsjdk.variant.variantcontext.*; import htsjdk.variant.variantcontext.writer.VariantContextWriter; import htsjdk.variant.vcf.*; import org.broadinstitute.hellbender.cmdline.argumentcollections.DbsnpArgumentCollection; import org.broadinstitute.hellbender.engine.FeatureContext; import org.broadinstitute.hellbender.engine.ReferenceContext; import org.broadinstitute.hellbender.utils.dragstr.DragstrParams; import org.broadinstitute.hellbender.tools.walkers.annotator.*; import org.broadinstitute.hellbender.tools.walkers.annotator.allelespecific.AS_RMSMappingQuality; import org.broadinstitute.hellbender.tools.walkers.genotyper.*; import org.broadinstitute.hellbender.utils.SimpleInterval; import org.broadinstitute.hellbender.utils.Utils; import org.broadinstitute.hellbender.utils.genotyper.IndexedSampleList; import org.broadinstitute.hellbender.utils.genotyper.SampleList; import org.broadinstitute.hellbender.utils.variant.GATKVCFConstants; import org.broadinstitute.hellbender.utils.variant.GATKVCFHeaderLines; import org.broadinstitute.hellbender.utils.variant.GATKVariantContextUtils; import org.broadinstitute.hellbender.utils.variant.VariantContextGetters; import java.util.*; import java.util.stream.Collectors; /** * Engine class to allow for other classes to replicate the behavior of GenotypeGVCFs. See {@link GenotypeGVCFs} for details * * Usage: * -Pass the genotype args into the constructor, which will the initialize the engine completely * Get the appropriate writer and write the appropriate header via {@link #setupVCFWriter} * Repeatedly call {@link #callRegion} to call variants in each region, and add them to your writer */ public class GenotypeGVCFsEngine { private static final String GVCF_BLOCK = "GVCFBlock"; //the annotation engine private VariantAnnotatorEngine annotationEngine = null; //the genotyping engine private GenotypingEngine<?> forceOutputGenotypingEngine = null; private MinimalGenotypingEngine genotypingEngine = null; // the INFO field annotation key names to remove private final List<String> infoFieldAnnotationKeyNamesToRemove = new ArrayList<>(); private GenotypeCalculationArgumentCollection genotypeArgs; // INFO Header names that require alt alleles final LinkedHashSet<String> infoHeaderAltAllelesLineNames = new LinkedHashSet<>(); private boolean includeNonVariants; private VCFHeader outputHeader; private SampleList samples; private DragstrParams dragStrParams; final VCFHeader inputVCFHeader; /** * Create and initialize a new GenotypeGVCFsEngine given a collection of GenotypeGVCF arguments and a VCF header * * @param annotationEngine variantAnnotatorEngine with annotations to process already added * @param genotypeArgs command-line arguments for the GenotypeGVCFs caller * @param includeNonVariants true to save INFO header names that require alt alleles * @param inputVCFHeader header for the VCF */ public GenotypeGVCFsEngine(final VariantAnnotatorEngine annotationEngine, final GenotypeCalculationArgumentCollection genotypeArgs, final boolean includeNonVariants, final VCFHeader inputVCFHeader) { this.annotationEngine = annotationEngine; this.genotypeArgs = genotypeArgs; this.includeNonVariants = includeNonVariants; this.inputVCFHeader = inputVCFHeader; initialize(); } private void initialize() { samples = new IndexedSampleList(inputVCFHeader.getGenotypeSamples()); //todo should this be getSampleNamesInOrder? // Request INFO field annotations inheriting from RankSumTest and RMSAnnotation added to remove list for ( final InfoFieldAnnotation annotation : annotationEngine.getInfoAnnotations() ) { if ( annotation instanceof RankSumTest || annotation instanceof AS_RMSMappingQuality || annotation instanceof RMSMappingQuality) { final List<String> keyNames = annotation.getKeyNames(); if ( !keyNames.isEmpty() ) { infoFieldAnnotationKeyNamesToRemove.add(keyNames.get(0)); } } } // We only want the engine to generate the AS_QUAL key if we are using AlleleSpecific annotations. genotypingEngine = new MinimalGenotypingEngine(createMinimalArgs(false), samples, annotationEngine.getInfoAnnotations().stream().anyMatch(AnnotationUtils::isAlleleSpecific), this.dragStrParams); forceOutputGenotypingEngine = new MinimalGenotypingEngine(createMinimalArgs(true), samples, annotationEngine.getInfoAnnotations().stream().anyMatch(AnnotationUtils::isAlleleSpecific)); if ( includeNonVariants ) { // Save INFO header names that require alt alleles for ( final VCFHeaderLine headerLine : inputVCFHeader.getMetaDataInInputOrder() ) { if (headerLine instanceof VCFInfoHeaderLine ) { if (((VCFInfoHeaderLine) headerLine).getCountType() == VCFHeaderLineCount.A) { infoHeaderAltAllelesLineNames.add(((VCFInfoHeaderLine) headerLine).getID()); } } } } } public VariantContext callRegion(Locatable loc, List<VariantContext> variants, ReferenceContext ref, FeatureContext features, ReferenceConfidenceVariantContextMerger merger, boolean somaticInput, double tlodThreshold, double afTolerance, final boolean outputNonVariants) //do work for apply { final List<VariantContext> variantsToProcess = getVariantSubsetToProcess(loc, variants); if (dragStrParams == null || genotypeArgs.dontUseDragstrPriors) { ref.setWindow(10, 10); //TODO this matches the gatk3 behavior but may be unnecessary } else { ref.setWindow(dragStrParams.maximumLengthInBasePairs(), dragStrParams.maximumLengthInBasePairs()); } genotypingEngine.setReferenceContext(ref); final VariantContext mergedVC = merger.merge(variantsToProcess, loc, ref.getBase(), true, false); final VariantContext regenotypedVC = somaticInput ? regenotypeSomaticVC(mergedVC, ref, features, outputNonVariants, tlodThreshold, afTolerance) : regenotypeVC(mergedVC, ref, features, outputNonVariants); return regenotypedVC; } /** * Re-genotype (and re-annotate) a combined genomic VC * @return a new VariantContext or null if the site turned monomorphic and we don't want such sites */ private VariantContext regenotypeVC(final VariantContext originalVC, final ReferenceContext ref, final FeatureContext features, boolean includeNonVariants) { Utils.nonNull(originalVC); final VariantContext result; if ( originalVC.isVariant() && originalVC.getAttributeAsInt(VCFConstants.DEPTH_KEY,0) > 0 ) { // only re-genotype polymorphic sites final VariantContext regenotypedVC = calculateGenotypes(originalVC, includeNonVariants); if (regenotypedVC == null) { return null; } if (GATKVariantContextUtils.isProperlyPolymorphic(regenotypedVC) || includeNonVariants) { // Note that reversetrimAlleles must be performed after the annotations are finalized because the reducible annotation data maps // were generated and keyed on the un reverseTrimmed alleles from the starting VariantContexts. Thus reversing the order will make // it difficult to recover the data mapping due to the keyed alleles no longer being present in the variant context. final VariantContext withGenotypingAnnotations = addGenotypingAnnotations(originalVC.getAttributes(), regenotypedVC); final VariantContext withAnnotations = annotationEngine.finalizeAnnotations(withGenotypingAnnotations, originalVC); final int[] relevantIndices = regenotypedVC.getAlleles().stream().mapToInt(a -> originalVC.getAlleles().indexOf(a)).toArray(); final VariantContext trimmed = GATKVariantContextUtils.reverseTrimAlleles(withAnnotations); final GenotypesContext updatedGTs = subsetAlleleSpecificFormatFields(outputHeader, trimmed.getGenotypes(), relevantIndices); result = new VariantContextBuilder(trimmed).genotypes(updatedGTs).make(); } else { return null; } } else { result = originalVC; } // if it turned monomorphic then we either need to ignore or fix such sites // Note that the order of these actions matters and is different for polymorphic and monomorphic sites. // For polymorphic sites we need to make sure e.g. the SB tag is sent to the annotation engine and then removed later. // For monomorphic sites we need to make sure e.g. the hom ref genotypes are created and only then are passed to the annotation engine. // We could theoretically make 2 passes to re-create the genotypes, but that gets extremely expensive with large sample sizes. if (result.isPolymorphicInSamples()) { // For polymorphic sites we need to make sure e.g. the SB tag is sent to the annotation engine and then removed later. final VariantContext reannotated = annotationEngine.annotateContext(result, features, ref, null, a -> true); return new VariantContextBuilder(reannotated).genotypes(cleanupGenotypeAnnotations(reannotated, false)).make(); } else if (includeNonVariants) { // For monomorphic sites we need to make sure e.g. the hom ref genotypes are created and only then are passed to the annotation engine. VariantContext reannotated = new VariantContextBuilder(result).genotypes(cleanupGenotypeAnnotations(result, true)).make(); reannotated = annotationEngine.annotateContext(reannotated, features, ref, null, GenotypeGVCFsEngine::annotationShouldBeSkippedForHomRefSites); return reannotated; } else { return null; } } private static boolean annotationShouldBeSkippedForHomRefSites(VariantAnnotation annotation) { return annotation instanceof RankSumTest || annotation instanceof RMSMappingQuality || annotation instanceof AS_RMSMappingQuality; } private GenotypesContext subsetAlleleSpecificFormatFields(final VCFHeader outputHeader, final GenotypesContext originalGs, final int[] relevantIndices) { final GenotypesContext newGTs = GenotypesContext.create(originalGs.size()); for (final Genotype g : originalGs) { final GenotypeBuilder gb = new GenotypeBuilder(g); final Set<String> keys = g.getExtendedAttributes().keySet(); for (final String key : keys) { final VCFFormatHeaderLine headerLine = outputHeader.getFormatHeaderLine(key); final Object attribute; if (headerLine.getCountType().equals(VCFHeaderLineCount.INTEGER) && headerLine.getCount() == 1) { attribute = g.getAnyAttribute(key); } else { attribute = ReferenceConfidenceVariantContextMerger.generateAnnotationValueVector(headerLine.getCountType(), VariantContextGetters.attributeToList(g.getAnyAttribute(key)), relevantIndices); } gb.attribute(key, attribute); } newGTs.add(gb.make()); } return newGTs; } /** * Add genotyping-based annotations to the new VC * * @param originalAttributes the non-null annotations from the original VC * @param newVC the new non-null VC * @return a non-null VC */ private VariantContext addGenotypingAnnotations(final Map<String, Object> originalAttributes, final VariantContext newVC) { // we want to carry forward the attributes from the original VC but make sure to add the MLE-based annotations and any other annotations generated by the genotyper. final Map<String, Object> attrs = new LinkedHashMap<>(originalAttributes); attrs.put(GATKVCFConstants.MLE_ALLELE_COUNT_KEY, newVC.getAttribute(GATKVCFConstants.MLE_ALLELE_COUNT_KEY)); attrs.put(GATKVCFConstants.MLE_ALLELE_FREQUENCY_KEY, newVC.getAttribute(GATKVCFConstants.MLE_ALLELE_FREQUENCY_KEY)); if (newVC.hasAttribute(GATKVCFConstants.NUMBER_OF_DISCOVERED_ALLELES_KEY)) { attrs.put(GATKVCFConstants.NUMBER_OF_DISCOVERED_ALLELES_KEY, newVC.getAttribute(GATKVCFConstants.NUMBER_OF_DISCOVERED_ALLELES_KEY)); } if (newVC.hasAttribute(GATKVCFConstants.AS_QUAL_KEY)) { attrs.put(GATKVCFConstants.AS_QUAL_KEY, newVC.getAttribute(GATKVCFConstants.AS_QUAL_KEY)); } return new VariantContextBuilder(newVC).attributes(attrs).make(); } private VariantContext calculateGenotypes(VariantContext vc, final boolean forceOutput) { return (forceOutput ? forceOutputGenotypingEngine : genotypingEngine).calculateGenotypes(vc, null, Collections.emptyList()); } /** * Re-genotype (and re-annotate) a combined genomic VC * @return a new VariantContext or null if the site turned monomorphic and we don't want such sites */ private VariantContext regenotypeSomaticVC(final VariantContext originalVC, final ReferenceContext ref, final FeatureContext features, boolean includeNonVariants, double tlodThreshold, double afTolerance) { Utils.nonNull(originalVC); final VariantContext result; if ( originalVC.isVariant() && originalVC.getAttributeAsInt(VCFConstants.DEPTH_KEY,0) > 0 ) { result = callSomaticGenotypes(originalVC, tlodThreshold, afTolerance); } else if (includeNonVariants) { result = originalVC; } else { result = null; } return result; } /** * Drop low quality alleles and call genotypes * CombineGVCFs will convert calls to no-call (of varying ploidy, as is the case in somatic) * * @param vc input VariantContext with no-called genotypes * @return a VC with called genotypes and low quality alleles removed, may be null */ private VariantContext callSomaticGenotypes(final VariantContext vc, double tlodThreshold, double afTolerance) { final List<Genotype> newGenotypes = new ArrayList<>(); final GenotypesContext genotypes = vc.getGenotypes(); final double[] perAlleleLikelihoodSums = new double[vc.getAlleles().size()]; //needs the ref for the subsetting utils for(final Genotype g : genotypes) { GenotypeBuilder gb = new GenotypeBuilder(g); final double[] tlodArray = VariantContextGetters.getAttributeAsDoubleArray(g, GATKVCFConstants.TUMOR_LOG_10_ODDS_KEY, () -> null, 0.0); final double[] variantAFArray = VariantContextGetters.getAttributeAsDoubleArray(g, GATKVCFConstants.ALLELE_FRACTION_KEY, () -> null, 0.0); double variantAFtotal = 0; final List<Allele> calledAlleles = new ArrayList<>(); for(int i = 0; i < vc.getAlleles().size()-1; i++) { variantAFtotal += variantAFArray[i]; if (tlodArray[i] > tlodThreshold) { calledAlleles.add(vc.getAlternateAllele(i)); perAlleleLikelihoodSums[i+1] += tlodArray[i]; } } //hack for weird Mutect2 ploidy -- if the variant is non-homoplasmic, call the reference allele too if(variantAFtotal < 1-afTolerance && (!g.hasAD() || g.getAD()[0] > 0)) { calledAlleles.add(0, vc.getReference()); } //"ploidy" gets set according to the size of the alleles List in the Genotype gb.alleles(calledAlleles); newGenotypes.add(gb.make()); } final VariantContextBuilder builder = new VariantContextBuilder(vc); final VariantContext regenotypedVC = builder.genotypes(newGenotypes).make(); final int maxAltAlleles = genotypingEngine.getConfiguration().genotypeArgs.MAX_ALTERNATE_ALLELES; List<Allele> allelesToKeep; //we need to make sure all alleles pass the tlodThreshold allelesToKeep = new ArrayList<>(perAlleleLikelihoodSums.length-1); allelesToKeep.add(vc.getReference()); for (int i = 1; i < perAlleleLikelihoodSums.length; i++) { if (perAlleleLikelihoodSums[i] > tlodThreshold) { allelesToKeep.add(vc.getAlternateAllele(i-1)); } } if (regenotypedVC.getAlternateAlleles().size() > maxAltAlleles) { allelesToKeep = AlleleSubsettingUtils.filterToMaxNumberOfAltAllelesBasedOnScores(maxAltAlleles, allelesToKeep, perAlleleLikelihoodSums); } if (allelesToKeep.size() == 1) { return null; } //if we didn't drop alleles then we're done! if (allelesToKeep.size() == regenotypedVC.getAlleles().size()) { return regenotypedVC; } final int[] relevantIndices = allelesToKeep.stream().mapToInt(a -> regenotypedVC.getAlleles().indexOf(a)).toArray(); //do another pass over genotypes to drop the alleles that aren't called final GenotypesContext reducedGenotypes = AlleleSubsettingUtils.subsetSomaticAlleles(outputHeader, regenotypedVC.getGenotypes(), allelesToKeep, relevantIndices); final VariantContext subsetVC = builder.alleles(allelesToKeep).genotypes(reducedGenotypes).make(); final VariantContext trimmedVC = GATKVariantContextUtils.trimAlleles(subsetVC, true, true); if (GATKVariantContextUtils.isProperlyPolymorphic(trimmedVC)) { return trimmedVC; } else { return null; } } // If includeNonVariants is set, we're using group-by-locus traversal. To match GATK3 GenotypeGVCFs, // see if there is a variant in the overlapping group that starts exactly at the locus start position, and if so // prioritize and process only that variant. Otherwise process all of the overlapping variants. private List<VariantContext> getVariantSubsetToProcess(final Locatable loc, List<VariantContext> preProcessedVariants) { if (includeNonVariants) { final List<VariantContext> matchingStart = preProcessedVariants.stream().filter(vc -> vc.getStart() == loc.getStart()).collect(Collectors.toList()); if (matchingStart.size() == 0) { return preProcessedVariants; } else if (matchingStart.size() == 1) { return matchingStart; } // since this tool only accepts a single input source, there should never be // more than one variant at a given starting locus throw new IllegalStateException( String.format( "Variant input contains more than one variant starting at location: %s", new SimpleInterval(matchingStart.get(0)))); } else { return preProcessedVariants; } } /** * Creates a StandardCallerArgumentCollection with appropriate values filled in from the arguments in this walker */ private StandardCallerArgumentCollection createMinimalArgs(final boolean forceOutput) { final StandardCallerArgumentCollection args = new StandardCallerArgumentCollection(); args.genotypeArgs = genotypeArgs.clone(); //whether to emit non-variant sites is not contained in genotypeArgs and must be passed to args separately //Note: GATK3 uses OutputMode.EMIT_ALL_CONFIDENT_SITES when includeNonVariants is requested //GATK4 uses EMIT_ALL_ACTIVE_SITES to ensure LowQual sites are emitted. args.outputMode = forceOutput ? OutputMode.EMIT_ALL_ACTIVE_SITES : OutputMode.EMIT_VARIANTS_ONLY; return args; } /** * Create a VCF header in the writer * * @param vcfWriter * @return a VCF writer */ public VariantContextWriter setupVCFWriter(Set<VCFHeaderLine> defaultToolVCFHeaderLines, boolean keepCombined, DbsnpArgumentCollection dbsnp, VariantContextWriter vcfWriter) { final Set<VCFHeaderLine> headerLines = new LinkedHashSet<>(inputVCFHeader.getMetaDataInInputOrder()); headerLines.addAll(defaultToolVCFHeaderLines); // Remove GCVFBlocks headerLines.removeIf(vcfHeaderLine -> vcfHeaderLine.getKey().startsWith(GVCF_BLOCK)); headerLines.addAll(annotationEngine.getVCFAnnotationDescriptions(false)); headerLines.addAll(genotypingEngine.getAppropriateVCFInfoHeaders()); // add headers for annotations added by this tool headerLines.add(GATKVCFHeaderLines.getInfoLine(GATKVCFConstants.MLE_ALLELE_COUNT_KEY)); headerLines.add(GATKVCFHeaderLines.getInfoLine(GATKVCFConstants.MLE_ALLELE_FREQUENCY_KEY)); headerLines.add(GATKVCFHeaderLines.getFormatLine(GATKVCFConstants.REFERENCE_GENOTYPE_QUALITY)); headerLines.add(VCFStandardHeaderLines.getInfoLine(VCFConstants.DEPTH_KEY)); // needed for gVCFs without DP tags if (keepCombined) { headerLines.add(GATKVCFHeaderLines.getInfoLine(GATKVCFConstants.AS_QUAL_KEY)); headerLines.add(GATKVCFHeaderLines.getInfoLine(GATKVCFConstants.AS_RAW_QUAL_APPROX_KEY)); } if ( dbsnp.dbsnp != null ) { VCFStandardHeaderLines.addStandardInfoLines(headerLines, true, VCFConstants.DBSNP_KEY); } headerLines.add(GATKVCFHeaderLines.getFilterLine(GATKVCFConstants.LOW_QUAL_FILTER_NAME)); final Set<String> sampleNameSet = samples.asSetOfSamples(); outputHeader = new VCFHeader(headerLines, new TreeSet<>(sampleNameSet)); vcfWriter.writeHeader(outputHeader); return vcfWriter; } /** * Cleans up genotype-level annotations that need to be updated. * 1. move MIN_DP to DP if present * 2. propagate DP to AD if not present * 3. remove SB if present * 4. change the PGT value from "0|1" to "1|1" for homozygous variant genotypes * 5. move GQ to RGQ if the site is monomorphic * * @param vc the VariantContext with the Genotypes to fix * @param createRefGTs if true we will also create proper hom ref genotypes since we assume the site is monomorphic * @return a new set of Genotypes */ @VisibleForTesting static List<Genotype> cleanupGenotypeAnnotations(final VariantContext vc, final boolean createRefGTs) { final GenotypesContext oldGTs = vc.getGenotypes(); final List<Genotype> recoveredGs = new ArrayList<>(oldGTs.size()); for ( final Genotype oldGT : oldGTs ) { final Map<String, Object> attrs = new HashMap<>(oldGT.getExtendedAttributes()); final GenotypeBuilder builder = new GenotypeBuilder(oldGT); int depth = oldGT.hasDP() ? oldGT.getDP() : 0; // move the MIN_DP to DP if ( oldGT.hasExtendedAttribute(GATKVCFConstants.MIN_DP_FORMAT_KEY) ) { depth = parseInt(oldGT.getAnyAttribute(GATKVCFConstants.MIN_DP_FORMAT_KEY)); builder.DP(depth); attrs.remove(GATKVCFConstants.MIN_DP_FORMAT_KEY); } attrs.remove(GATKVCFConstants.STRAND_BIAS_BY_SAMPLE_KEY); // update PGT for hom vars if ( oldGT.isHomVar() && oldGT.hasExtendedAttribute(GATKVCFConstants.HAPLOTYPE_CALLER_PHASING_GT_KEY) ) { attrs.put(GATKVCFConstants.HAPLOTYPE_CALLER_PHASING_GT_KEY, GenotypeGVCFs.PHASED_HOM_VAR_STRING); } // create AD if it's not there if ( !oldGT.hasAD() && vc.isVariant() ) { final int[] AD = new int[vc.getNAlleles()]; AD[0] = depth; builder.AD(AD); } if ( createRefGTs ) { // move the GQ to RGQ if (oldGT.hasGQ()) { builder.noGQ(); attrs.put(GATKVCFConstants.REFERENCE_GENOTYPE_QUALITY, oldGT.getGQ()); } //keep 0 depth samples and 0 GQ samples as no-call if (depth > 0 && oldGT.hasGQ() && oldGT.getGQ() > 0) { final List<Allele> refAlleles = Collections.nCopies(oldGT.getPloidy(), vc.getReference()); builder.alleles(refAlleles); } // also, the PLs are technically no longer usable builder.noPL(); } recoveredGs.add(builder.noAttributes().attributes(attrs).make()); } return recoveredGs; } private static int parseInt(Object attribute){ if( attribute instanceof String) { return Integer.parseInt((String)attribute); } else if ( attribute instanceof Number){ return ((Number) attribute).intValue(); } else { throw new IllegalArgumentException("Expected a Number or a String but found something else."); } } }
//======================================================================== // //File: $RCSfile: UndoRedoTestGenerics.java,v $ //Version: $Revision: 1.5 $ //Modified: $Date: 2013/05/10 04:30:25 $ // //(c) Copyright 2005-2014 by Mentor Graphics Corp. All rights reserved. // //======================================================================== // Licensed under the Apache License, Version 2.0 (the "License"); you may not // use this file except in compliance with the License. You may obtain a copy // of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations under // the License. //======================================================================== package org.xtuml.bp.core.test; import java.awt.geom.Point2D; import java.io.IOException; import org.eclipse.core.resources.IFile; import org.eclipse.core.runtime.CoreException; import org.eclipse.gef.tools.AbstractTool; import org.eclipse.jface.action.Action; import org.eclipse.swt.widgets.Display; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.xtuml.bp.core.Association_c; import org.xtuml.bp.core.Attribute_c; import org.xtuml.bp.core.CorePlugin; import org.xtuml.bp.core.End_c; import org.xtuml.bp.core.ModelClass_c; import org.xtuml.bp.core.Ooaofooa; import org.xtuml.bp.core.Package_c; import org.xtuml.bp.core.common.TransactionManager; import org.xtuml.bp.core.ui.DeleteAction; import org.xtuml.bp.core.ui.Selection; import org.xtuml.bp.core.util.OoaofooaUtil; import org.xtuml.bp.test.common.CanvasEditorUtils; import org.xtuml.bp.test.common.CanvasTestUtils; import org.xtuml.bp.test.common.OrderedRunner; import org.xtuml.bp.test.common.UITestingUtilities; import org.xtuml.bp.ui.canvas.FloatingText_c; import org.xtuml.bp.ui.canvas.Graphnode_c; import org.xtuml.bp.ui.canvas.Model_c; import org.xtuml.bp.ui.canvas.Shape_c; import org.xtuml.bp.ui.canvas.test.CanvasTest; import org.xtuml.bp.ui.canvas.test.CanvasTestUtilities; import org.xtuml.bp.ui.canvas.test.util.MouseUtil; import org.xtuml.bp.ui.canvas.util.ConnectorUtil; import org.xtuml.bp.ui.canvas.util.GraphNodeUtil; import org.xtuml.bp.ui.graphics.editor.GraphicalEditor; /** * Contains tests that exercise the functionality for the undo/redo of * model changes. */ @RunWith(OrderedRunner.class) public class UndoRedoTestGenerics extends CanvasTest { /** * Whether the first test of this class is the one that's currently * being run. */ private static boolean initialized = false; /** * That used throughout these tests. */ private static Model_c model; /** * That used throughout these tests. */ private static Selection selection = Selection.getInstance(); /** * That used throughout these tests. */ private static TransactionManager transactionManager; /** * That used throughout these tests. */ private static Package_c subsystem; private static boolean generateResults = false; String test_id = null; public UndoRedoTestGenerics() { super(null, null); } /* (non-Javadoc) * @see junit.framework.TestCase#setUp() */ @Before public void setUp() throws Exception { super.setUp(); // if it's the first test of this class that's being setup if (!initialized) { // turn model persistence off for these tests, as it isn't necessary Ooaofooa.setPersistEnabled(false); CorePlugin.disableParseAllOnResourceChange(); // setup the test project and import our test model from it //ensureAvailableAndLoaded("Models", "odms1", false, true); loadProject("odms"); initialized = true; subsystem = Package_c.PackageInstance(modelRoot, new Package_by_name_c("Odms")); transactionManager = subsystem.getTransactionManager(); // open the odms subsystem diagram GraphicalEditor editor = CanvasTestUtils .openPackageCanvasEditor(subsystem); model = editor.getModel(); } } @After public void tearDown() throws Exception { super.tearDown(); // turn model persistence off for these tests // if a test that turned on persistence fails, make sure it's // turned off Ooaofooa.setPersistEnabled(false); } // @Test // public void testUnfoRedoTest() throws Exception{ // doTestUndoRedoOfDeletionOfMultipleModelElementTypes(); // doTestUndoRedoOfClassMovement(); // doTestClearingOfUndoRedoStacksOnFileChange(); // doTestRevertPersisted(); // doTestUndoDeletionOfSubSystem(); // } /** * Deletes in one action a pair of classes connected by an association, * as well as an association not connected to either class (to ensure * that deletion of multiple model element types is being tested), * then undoes the deletion, checking that the elements are reinstated. * Then, this test redoes the deletion, checking that the elements * are once again gone. These steps test the code for reverting model * element creations and deletions (including deletions of multiple * model element types), as well as for reverting relates/unrelates. */ @Test public void testUndoRedoOfDeletionOfMultipleModelElementTypes() { // select the classes we are going to delete in the odms subsystem final String className1 = "Robot"; ModelClass_c clazz1 = OoaofooaUtil.getClass(subsystem, className1); final String className2 = "Disk Transfer"; ModelClass_c clazz2 = OoaofooaUtil.getClass(subsystem, className2); selection.clear(); selection.addToSelection(clazz1); selection.addToSelection(clazz2); // find (but don't select) the R9 association, which connects the two // classes found above Association_c r9 = OoaofooaUtil.getAssociation(modelRoot, 9); // find and select the R5 association, which is connected to neither // of the above classes Association_c r5 = OoaofooaUtil.getAssociation(modelRoot, 5); selection.addToSelection(r5); // remember how many attributes the first class has, which we'll // test against, below int numAttributes = Attribute_c.getManyO_ATTRsOnR102(clazz1).length; // delete the classes and associations new DeleteAction(null).run(); // check that the classes are now deleted assertTrue("Class(es) not deleted", OoaofooaUtil.getClass(subsystem, className1) == null && OoaofooaUtil.getClass(subsystem, className2) == null); // check that the R9 association was also deleted, since it was // connected to the deleted classes assertTrue("R9 not deleted", OoaofooaUtil.getAssociation(modelRoot, 9) == null); // check that the R5 association was deleted assertTrue("R5 not deleted", OoaofooaUtil.getAssociation(modelRoot, 5) == null); // undo the deletion transactionManager.getUndoAction().run(); // check that the classes are now part of the subsystem again assertTrue("Undo of deletion did not bring back class(es)", OoaofooaUtil.getClass(subsystem, className1).equals(clazz1) && OoaofooaUtil.getClass(subsystem, className2).equals( clazz2)); // check that the first class has the same number of attributes // as it did before the deletion assertEquals("Number of attributes not the same as before", Attribute_c .getManyO_ATTRsOnR102(clazz1).length, numAttributes); // check that the R9 association is now part of the subsystem again, // and that it applies to the two classes assertTrue("Undo of deletion did not bring back R9", OoaofooaUtil .getAssociation(clazz1, 9).equals(r9) && OoaofooaUtil.getAssociation(clazz2, 9).equals(r9)); // check that the R9 association's connector texts are intact FloatingText_c startText = ConnectorUtil.getText(graphicsModelRoot, r9, End_c.Start), middleText = ConnectorUtil.getText( graphicsModelRoot, r9, End_c.Middle), endText = ConnectorUtil .getText(graphicsModelRoot, r9, End_c.End); assertTrue("R9 connector's texts not intact after undo of deletion", startText != null && middleText != null && endText != null && startText != middleText && middleText != endText && endText != startText); while (Display.getCurrent().readAndDispatch()); // check that the R5 association is now part of the subsystem again assertTrue("Undo of deletion did not bring back R5", OoaofooaUtil .getAssociation(modelRoot, 5).equals(r5)); // redo the deletion transactionManager.getRedoAction().run(); // check that the classes are now gone again assertTrue("Deletion redo did not delete class(es)", OoaofooaUtil .getClass(subsystem, className1) == null && OoaofooaUtil.getClass(subsystem, className2) == null); // check that the R9 and R5 associations are now gone again assertTrue("Deletion redo did not delete association(s)", OoaofooaUtil .getAssociation(modelRoot, 9) == null && OoaofooaUtil.getAssociation(modelRoot, 5) == null); } /** * Moves a class a certain distance across the canvas, then undoes * the movement, checking that the class is moved back to its * original position. Then, this test redoes the movement, * checking that the class finishes at its moved position. These * steps test the code for reverting attribute-value changes * that occur to graphics instances. */ @Test public void testUndoRedoOfClassMovement() { // record where the class we are going to move currently is // on the canvas final String className = "Online Location"; ModelClass_c clazz = OoaofooaUtil.getClass(subsystem, className); Graphnode_c node = GraphNodeUtil.getNode(graphicsModelRoot, clazz); Point2D.Float originalPosition = GraphNodeUtil.getPosition(node); UITestingUtilities.revealElementInGraphicalEditor(Shape_c .getOneGD_SHPOnR19(node)); // drag the class a certain distance across the canvas Point2D.Float center = GraphNodeUtil.getCenter(node); MouseUtil.doPress(model, center); Point2D.Float moveTo = new Point2D.Float(center.x + 50, center.y + 50); MouseUtil.doMove(model, moveTo); MouseUtil.doRelease(model, moveTo); // record the class's new position on the canvas Point2D.Float movedPosition = GraphNodeUtil.getPosition(node); // check that an actual movement occurred, to help validate this test assertTrue("Movement had no effect", !movedPosition .equals(originalPosition)); // undo the movement of the class transactionManager.getUndoAction().run(); // check that the class is now back at its original position Point2D.Float undoPosition = GraphNodeUtil.getPosition(node); assertEquals("Undo did not move class back to its original position", undoPosition, originalPosition); // redo the movement of the class transactionManager.getRedoAction().run(); // check that the class is now back at its moved position Point2D.Float redoPosition = GraphNodeUtil.getPosition(node); assertEquals("Redo did not move class back to its moved position", redoPosition, movedPosition); } /** * Tests that the undo and redo stacks are cleared for a model-root * whose persistence file has been changed outside of normal tool use. */ @Test public void testClearingOfUndoRedoStacksOnFileChange() throws CoreException { // undo the movement of the class done during the last test, // to get both the undo and redo actions enabled transactionManager.getUndoAction().run(); // check that both the undo and redo actions are enabled Action undoAction = transactionManager.getUndoAction(); Action redoAction = transactionManager.getRedoAction(); assertTrue("Undo and redo actions aren't both enabled", undoAction .isEnabled() && redoAction.isEnabled()); // touch the model's persistence file, to get Eclipse to think it's // changed somehow ((IFile) modelRoot.getPersistenceFile()).touch(null); // check that both the undo and redo actions are disabled assertTrue("Undo and redo actions aren't both disabled", !undoAction .isEnabled() && !redoAction.isEnabled()); } /** * Test that reverts are persisted */ @Test public void testRevertPersisted() throws CoreException { // persistence is needed for this test Ooaofooa.setPersistEnabled(true); AbstractTool tool = UITestingUtilities.getTool("Classes", "Class"); UITestingUtilities.activateTool(tool); CanvasTestUtilities.doMouseMove(100, 100); CanvasTestUtilities.doMousePress(100, 100); CanvasTestUtilities.doMouseMove(200, 200); CanvasTestUtilities.doMouseRelease(200, 200); ModelClass_c modelClass = ModelClass_c.ModelClassInstance(modelRoot, new ModelClass_by_name_c("Unnamed Class")); assertNotNull(modelClass); IFile classFile = modelClass.getFile(); // make sure the class was persisted TigerNatureTestGenerics tnt = new TigerNatureTestGenerics(); try { assertTrue("create was not persisted.", tnt.checkIfPersisted( project, modelClass, tnt.getClassString(modelClass))); } catch (IOException e) { fail("Unable to open class file to check persistence"); } Selection.getInstance().clear(); Selection.getInstance().addToSelection(modelClass); DeleteAction da = (DeleteAction) CorePlugin.getDeleteAction(); da.run(); // make sure the class deletion was persisted assertFalse("Delete was not persisted.", classFile.exists()); // undo the class deletion transactionManager.getUndoAction().run(); try { assertTrue("Undo revert was not persisted.", tnt.checkIfPersisted( project, modelClass, tnt.getClassString(modelClass))); } catch (IOException e) { fail("Unable to open class file to check persistence " + e.getMessage()); } // redo the class deletion transactionManager.getRedoAction().run(); assertFalse("Delete was not persisted.", classFile.exists()); // Persistence isn't needed for other tests Ooaofooa.setPersistEnabled(false); } /** Issue 2443 test * Delete Subsystem, then undo the deletion, checking that the elements are * reinstated. Then, this test redo the deletion, checking that the elements * are once again gone. These steps test the code for reverting model * element creation and deletion (including deletion of graphical elements) */ @Test public void testUndoDeletionOfSubSystem() { test_id = "1"; // select the Subsystem we are going to delete in the odms1 Domain Package_c dom = Package_c.PackageInstance(modelRoot, new Package_by_name_c("odms")); Package_c ss = Package_c.PackageInstance(dom.getModelRoot(), new Package_by_name_c("Odms")); GraphicalEditor baseEditor = CanvasEditorUtils .openEditorWithShapeOf(ss); resultNamePostFix = "before_delete-1"; validateOrGenerateResults(baseEditor, generateResults); selection.clear(); selection.addToSelection(ss); // delete the SubSystem CorePlugin.getDeleteAction().run(); // check that the SubSystem is now deleted assertTrue("Subsystem not deleted", Package_c.PackageInstance(dom .getModelRoot(), new Package_by_name_c("Odms")) == null); resultNamePostFix = "after_delete"; validateOrGenerateResults(baseEditor, generateResults); // undo the deletion transactionManager.getUndoAction().run(); // check that the SubSystemis is restored assertTrue("Undo of deletion did not bring back subsystem", Package_c .PackageInstance(dom.getModelRoot(), new Package_by_name_c("Odms")).equals(ss)); resultNamePostFix = "before_delete-2"; validateOrGenerateResults(baseEditor, generateResults); //Redo Subsystem delete transactionManager.getRedoAction().run(); assertTrue("Subsystem not deleted", Package_c.PackageInstance(dom .getModelRoot(), new Package_by_name_c("Odms")) == null); resultNamePostFix = "after_delete2"; validateOrGenerateResults(baseEditor, generateResults); resultNamePostFix = ""; } protected String getResultName() { return "UndoRedo" + "_" + test_id; } }
package net.seesharpsoft.intellij.plugins.csv; import com.intellij.lang.*; import com.intellij.lexer.Lexer; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.fileTypes.FileTypeRegistry; import com.intellij.openapi.project.Project; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.psi.TokenType; import com.intellij.psi.impl.source.DummyHolder; import com.intellij.psi.impl.source.DummyHolderFactory; import com.intellij.psi.impl.source.tree.FileElement; import com.intellij.psi.tree.IElementType; import com.intellij.psi.util.PsiTreeUtil; import net.seesharpsoft.intellij.lang.FileParserDefinition; import net.seesharpsoft.intellij.plugins.csv.components.CsvFileAttributes; import net.seesharpsoft.intellij.plugins.csv.psi.CsvField; import net.seesharpsoft.intellij.plugins.csv.psi.CsvFile; import net.seesharpsoft.intellij.plugins.csv.psi.CsvRecord; import net.seesharpsoft.intellij.plugins.csv.psi.CsvTypes; import net.seesharpsoft.intellij.plugins.csv.settings.CsvEditorSettings; import net.seesharpsoft.intellij.plugins.psv.PsvFileType; import net.seesharpsoft.intellij.plugins.tsv.TsvFileType; import org.jetbrains.annotations.NotNull; import java.util.HashMap; import java.util.Map; import java.util.function.Function; public final class CsvHelper { // replaces PsiElementFactory.SERVICE.getInstance(element.getProject()).createDummyHolder("<undefined>", CsvTypes.FIELD, null); // https://github.com/SeeSharpSoft/intellij-csv-validator/issues/4 public static PsiElement createEmptyCsvField(PsiFile psiFile) { final Project project = psiFile.getProject(); final String text = "<undefined>"; final IElementType type = CsvTypes.FIELD; final PsiManager psiManager = PsiManager.getInstance(project); final DummyHolder dummyHolder = DummyHolderFactory.createHolder(psiManager, null); final FileElement fileElement = dummyHolder.getTreeElement(); final FileParserDefinition parserDefinition = (FileParserDefinition) LanguageParserDefinitions.INSTANCE.forLanguage(CsvLanguage.INSTANCE); final Lexer lexer = parserDefinition.createLexer(psiFile); final PsiBuilder psiBuilder = PsiBuilderFactory.getInstance().createBuilder(project, fileElement, lexer, CsvLanguage.INSTANCE, text); final ASTNode node = parserDefinition.createParser(project).parse(type, psiBuilder); fileElement.rawAddChildren((com.intellij.psi.impl.source.tree.TreeElement) node); return node.getPsi(); } public static boolean isCsvFile(String extension) { if (extension == null) { return false; } // simple check to always in include the defaults even if association was removed switch(extension.toLowerCase()) { case "csv": case "tsv": case "tab": case "psv": return true; default: // but also consider other extensions that are associated manually FileType fileType = FileTypeRegistry.getInstance().getFileTypeByExtension(extension); return fileType == CsvFileType.INSTANCE || fileType == TsvFileType.INSTANCE || fileType == PsvFileType.INSTANCE; } } public static boolean isCsvFile(Project project, VirtualFile file) { if (project == null || file == null) { return false; } final Language language = LanguageUtil.getLanguageForPsi(project, file); return language != null && language.isKindOf(CsvLanguage.INSTANCE); } public static boolean isCsvFile(PsiFile file) { if (file == null) { return false; } return isCsvFile(file.getProject(), file.getOriginalFile().getVirtualFile()); } public static IElementType getElementType(PsiElement element) { return element == null || element.getNode() == null ? null : element.getNode().getElementType(); } public static PsiElement getParentFieldElement(final PsiElement element) { PsiElement currentElement = element; IElementType elementType = CsvHelper.getElementType(currentElement); if (elementType == CsvTypes.COMMA || elementType == CsvTypes.CRLF) { currentElement = currentElement.getPrevSibling(); elementType = CsvHelper.getElementType(currentElement); } if (elementType == CsvTypes.RECORD) { currentElement = currentElement.getLastChild(); elementType = CsvHelper.getElementType(currentElement); } if (elementType == TokenType.WHITE_SPACE) { if (CsvHelper.getElementType(currentElement.getParent()) == CsvTypes.FIELD) { currentElement = currentElement.getParent(); } else if (CsvHelper.getElementType(currentElement.getPrevSibling()) == CsvTypes.FIELD) { currentElement = currentElement.getPrevSibling(); } else if (CsvHelper.getElementType(currentElement.getNextSibling()) == CsvTypes.FIELD) { currentElement = currentElement.getNextSibling(); } else { currentElement = null; } } else { while (currentElement != null && elementType != CsvTypes.FIELD) { currentElement = currentElement.getParent(); elementType = CsvHelper.getElementType(currentElement); } } return currentElement; } public static PsiElement getPreviousCRLF(final PsiElement element) { PsiElement currentElement = element; while (currentElement != null) { if (CsvHelper.getElementType(currentElement) == CsvTypes.CRLF) { break; } currentElement = currentElement.getPrevSibling(); } return currentElement; } public static PsiElement getNextCRLF(final PsiElement element) { PsiElement currentElement = element; while (currentElement != null) { if (CsvHelper.getElementType(currentElement) == CsvTypes.CRLF) { break; } currentElement = currentElement.getNextSibling(); } return currentElement; } public static PsiElement getPreviousSeparator(PsiElement fieldElement) { PsiElement current = fieldElement; while (current != null) { if (CsvHelper.getElementType(current) == CsvTypes.COMMA) { break; } current = current.getPrevSibling(); } return current; } public static PsiElement getNextSeparator(PsiElement fieldElement) { PsiElement current = fieldElement; while (current != null) { if (CsvHelper.getElementType(current) == CsvTypes.COMMA) { break; } current = current.getNextSibling(); } return current; } public static int getFieldStartOffset(PsiElement field) { PsiElement separator = CsvHelper.getPreviousSeparator(field); if (separator == null) { separator = getPreviousCRLF(field.getParent()); } return separator == null ? 0 : separator.getTextOffset() + separator.getTextLength(); } public static int getFieldEndOffset(PsiElement field) { PsiElement separator = CsvHelper.getNextSeparator(field); if (separator == null) { separator = getNextCRLF(field.getParent()); } return separator == null ? field.getContainingFile().getTextLength() : separator.getTextOffset(); } public static VirtualFile getVirtualFile(PsiFile psiFile) { return psiFile == null ? null : psiFile.getOriginalFile().getVirtualFile(); } public static Project getProject(PsiFile psiFile) { return psiFile == null ? null : psiFile.getProject(); } public static @NotNull CsvValueSeparator getValueSeparator(CsvFile csvFile) { return getValueSeparator(csvFile.getContainingFile()); } public static @NotNull CsvValueSeparator getValueSeparator(PsiFile psiFile) { return getValueSeparator(getProject(psiFile), getVirtualFile(psiFile)); } public static @NotNull CsvValueSeparator getValueSeparator(Project project, VirtualFile virtualFile) { return project == null ? CsvEditorSettings.getInstance().getDefaultValueSeparator() : CsvFileAttributes.getInstance(project).getValueSeparator(project, virtualFile); } public static boolean hasValueSeparatorAttribute(@NotNull PsiFile psiFile) { return CsvFileAttributes.getInstance(getProject(psiFile)).hasValueSeparatorAttribute(getProject(psiFile), getVirtualFile(psiFile)); } public static @NotNull CsvEscapeCharacter getEscapeCharacter(CsvFile csvFile) { return getEscapeCharacter(csvFile.getContainingFile()); } public static @NotNull CsvEscapeCharacter getEscapeCharacter(PsiFile psiFile) { return getEscapeCharacter(getProject(psiFile), getVirtualFile(psiFile)); } public static @NotNull CsvEscapeCharacter getEscapeCharacter(Project project, VirtualFile virtualFile) { return project == null ? CsvEditorSettings.getInstance().getDefaultEscapeCharacter() : CsvFileAttributes.getInstance(project).getEscapeCharacter(project, virtualFile); } public static boolean hasEscapeCharacterAttribute(@NotNull PsiFile psiFile) { return CsvFileAttributes.getInstance(getProject(psiFile)).hasEscapeCharacterAttribute(getProject(psiFile), getVirtualFile(psiFile)); } public static CsvColumnInfoMap<PsiElement> createColumnInfoMap(CsvFile csvFile) { CsvEscapeCharacter escapeCharacter = getEscapeCharacter(csvFile); Map<Integer, CsvColumnInfo<PsiElement>> columnInfoMap = new HashMap<>(); CsvRecord[] records = PsiTreeUtil.getChildrenOfType(csvFile, CsvRecord.class); int row = 0; boolean hasComments = false; for (CsvRecord record : records) { // skip comment records if (record.getComment() != null) { hasComments = true; continue; } int column = 0; for (CsvField field : record.getFieldList()) { Integer length = CsvHelper.getMaxTextLineLength(unquoteCsvValue(field.getText(), escapeCharacter)); if (!columnInfoMap.containsKey(column)) { columnInfoMap.put(column, new CsvColumnInfo(column, length, row)); } else if (columnInfoMap.get(column).getMaxLength() < length) { columnInfoMap.get(column).setMaxLength(length, row); } columnInfoMap.get(column).addElement(field, row, getFieldStartOffset(field), getFieldEndOffset(field)); ++column; } ++row; } return new CsvColumnInfoMap(columnInfoMap, PsiTreeUtil.hasErrorElements(csvFile), hasComments); } public static String unquoteCsvValue(String content, CsvEscapeCharacter escapeCharacter) { if (content == null) { return ""; } String result = content.trim(); if (result.length() > 1 && result.startsWith("\"") && result.endsWith("\"")) { result = result.substring(1, result.length() - 1); } if (escapeCharacter != CsvEscapeCharacter.QUOTE) { result = result.replaceAll("(?:" + escapeCharacter.getRegexPattern() + ")" + escapeCharacter.getRegexPattern(), escapeCharacter.getRegexPattern()); } result = result.replaceAll("(?:" + escapeCharacter.getRegexPattern() + ")\"", "\""); return result; } private static boolean isQuotingRequired(String content, CsvEscapeCharacter escapeCharacter, CsvValueSeparator valueSeparator) { return content != null && (content.contains(valueSeparator.getCharacter()) || content.contains("\"") || content.contains("\n") || content.contains(escapeCharacter.getCharacter()) || content.startsWith(" ") || content.endsWith(" ")); } public static String quoteCsvField(String content, CsvEscapeCharacter escapeCharacter, CsvValueSeparator valueSeparator, boolean quotingEnforced) { if (content == null) { return ""; } if (quotingEnforced || isQuotingRequired(content, escapeCharacter, valueSeparator)) { String result = content; if (escapeCharacter != CsvEscapeCharacter.QUOTE) { result = result.replaceAll(escapeCharacter.getRegexPattern(), escapeCharacter.getRegexPattern() + escapeCharacter.getRegexPattern()); } result = result.replaceAll("\"", escapeCharacter.getRegexPattern() + "\""); return "\"" + result + "\""; } return content; } public static <T> T[][] deepCopy(T[][] matrix) { return java.util.Arrays.stream(matrix).map(el -> el.clone()).toArray($ -> matrix.clone()); } public static int getMaxTextLineLength(String text, @NotNull Function<String, Integer> calcCallback) { if (text == null) { return 0; } int maxLength = -1; for (String line : text.split("(\\r?\\n|\\r)+")) { int length = calcCallback.apply(line); if (length > maxLength) { maxLength = length; } } return maxLength; } public static int getMaxTextLineLength(String text) { return getMaxTextLineLength(text, input -> input == null ? 0 : input.length()); } private CsvHelper() { // static utility class } }
/****************************************************************************** * $URL: https://source.sakaiproject.org/svn/master/trunk/header.java $ * $Id: header.java 307632 2014-03-31 15:29:37Z azeckoski@unicon.net $ ****************************************************************************** * * Copyright (c) 2003-2014 The Apereo Foundation * * Licensed under the Educational Community License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://opensource.org/licenses/ecl2 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *****************************************************************************/ package org.sakaiproject.memory.impl; import net.sf.ehcache.CacheException; import net.sf.ehcache.CacheManager; import net.sf.ehcache.config.*; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.component.api.ServerConfigurationService; import org.springframework.beans.factory.DisposableBean; import org.springframework.beans.factory.FactoryBean; import org.springframework.beans.factory.InitializingBean; import org.springframework.core.io.Resource; import org.springframework.util.ClassUtils; import org.springframework.util.ReflectionUtils; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Method; /** * NOTE: This file was modeled after org/springframework/cache/ehcache/EhCacheManagerFactoryBean.java from URL * http://grepcode.com/file_/repo1.maven.org/maven2/org.springframework/spring-context-support/3.2.3.RELEASE/org/springframework/cache/ehcache/EhCacheManagerFactoryBean.java/?v=source * * @author rlong Bob Long rlong@unicon.net * @author azeckoski Aaron Zeckoski azeckoski@unicon.net * * ORIGINAL JAVADOC BELOW: * {@link FactoryBean} that exposes an EhCache {@link net.sf.ehcache.CacheManager} * instance (independent or shared), configured from a specified config location. * * <p>If no config location is specified, a CacheManager will be configured from * "ehcache.xml" in the root of the class path (that is, default EhCache initialization * - as defined in the EhCache docs - will apply). * * <p>Setting up a separate EhCacheManagerFactoryBean is also advisable when using * EhCacheFactoryBean, as it provides a (by default) independent CacheManager instance * and cares for proper shutdown of the CacheManager. EhCacheManagerFactoryBean is * also necessary for loading EhCache configuration from a non-default config location. * * <p>Note: As of Spring 3.0, Spring's EhCache support requires EhCache 1.3 or higher. * As of Spring 3.2, we recommend using EhCache 2.1 or higher. * * @author Dmitriy Kopylenko * @author Juergen Hoeller */ public class SakaiCacheManagerFactoryBean implements FactoryBean<CacheManager>, InitializingBean, DisposableBean { // Check whether EhCache 2.1+ CacheManager.create(Configuration) method is available... private static final Method createWithConfiguration = ClassUtils.getMethodIfAvailable(CacheManager.class, "create", Configuration.class); /** cache defaults **/ private final static String DEFAULT_CACHE_SERVER_URL = "localhost:9510"; private final static int DEFAULT_CACHE_TIMEOUT = 600; // 10 mins private final static int DEFAULT_CACHE_MAX_OBJECTS = 10000; protected final Log logger = LogFactory.getLog(getClass()); protected ServerConfigurationService serverConfigurationService; private Resource configLocation; private boolean shared = false; private String cacheManagerName = "Sakai"; private CacheManager cacheManager; private Boolean cacheEnabled; public SakaiCacheManagerFactoryBean() { } public SakaiCacheManagerFactoryBean(ServerConfigurationService serverConfigurationService) { this.serverConfigurationService = serverConfigurationService; this.cacheManagerName = "SakaiTest"; try { this.afterPropertiesSet(); } catch (Exception e) { throw new RuntimeException(e); } } /** * Set the location of the EhCache config file. A typical value is "/WEB-INF/ehcache.xml". * <p>Default is "ehcache.xml" in the root of the class path, or if not found, * "ehcache-failsafe.xml" in the EhCache jar (default EhCache initialization). * @see net.sf.ehcache.CacheManager#create(java.io.InputStream) * @see net.sf.ehcache.CacheManager#CacheManager(java.io.InputStream) */ public void setConfigLocation(Resource configLocation) { this.configLocation = configLocation; } /** * Set whether the EhCache CacheManager should be shared (as a singleton at the VM level) * or independent (typically local within the application). Default is "false", creating * an independent instance. * @see net.sf.ehcache.CacheManager#create() * @see net.sf.ehcache.CacheManager#CacheManager() */ public void setShared(boolean shared) { this.shared = shared; } /** * Set the name of the EhCache CacheManager (if a specific name is desired). * @see net.sf.ehcache.CacheManager#setName(String) */ public void setCacheManagerName(String cacheManagerName) { this.cacheManagerName = cacheManagerName; } /** * Creates a CacheConfiguration based on the cache name. * Any Cache properties below that are not set will use the default values * Valid properties include: maxSize, timeToIdle, timeToLive, eternal * Defaults: maxSize=10000, timeToIdle=600, timeToLive=600, eternal=false * Configure cluster caches using: memory.cluster.{cacheName}.{property)={value} * * @param clusterCacheName the full name of the cache (e.g. org.sakaiproject.event.impl.ClusterEventTracking.eventsCache) * @return Terracotta cluster cache configuration */ private CacheConfiguration createClusterCacheConfiguration(String clusterCacheName) { String clusterConfigName = "memory.cluster."+clusterCacheName; CacheConfiguration clusterCache = new CacheConfiguration( clusterCacheName, serverConfigurationService.getInt(clusterConfigName + ".maxSize", DEFAULT_CACHE_MAX_OBJECTS)); boolean isEternal = serverConfigurationService.getBoolean(clusterConfigName + ".eternal", false); if (isEternal) { clusterCache.eternal(true).timeToIdleSeconds(0).timeToLiveSeconds(0); } else { clusterCache.eternal(false) .timeToIdleSeconds(serverConfigurationService.getInt(clusterConfigName + ".timeToIdle", DEFAULT_CACHE_TIMEOUT)) .timeToLiveSeconds(serverConfigurationService.getInt(clusterConfigName + ".timeToLive", DEFAULT_CACHE_TIMEOUT)); } clusterCache.terracotta(new TerracottaConfiguration() .nonstop(new NonstopConfiguration() .timeoutBehavior(new TimeoutBehaviorConfiguration() .type(TimeoutBehaviorConfiguration.LOCAL_READS_TYPE_NAME)) .enabled(true))); // Make sure we don't go to local disk clusterCache.overflowToOffHeap(false); // Required to control the L2 cache size in terracotta itself, default should be adequate clusterCache.maxElementsOnDisk(10000); return clusterCache; } /** * This is the init method * If using Terracotta, enable caching via sakai.properties and ensure the Terracotta server is reachable * Use '-Dcom.tc.tc.config.total.timeout=10000' to specify how long we should try to connect to the TC server */ public void afterPropertiesSet() throws IOException { logger.info("Initializing EhCache CacheManager"); InputStream is = (this.configLocation != null ? this.configLocation.getInputStream() : null); if (this.cacheEnabled == null) { this.cacheEnabled = serverConfigurationService.getBoolean("memory.cluster.enabled", false); } try { Configuration configuration = (is != null) ? ConfigurationFactory.parseConfiguration(is) : ConfigurationFactory.parseConfiguration(); configuration.setName(this.cacheManagerName); // force the sizeof calculations to not generate lots of warnings OR degrade server performance configuration.getSizeOfPolicyConfiguration().maxDepthExceededBehavior(SizeOfPolicyConfiguration.MaxDepthExceededBehavior.ABORT); configuration.getSizeOfPolicyConfiguration().maxDepth(100); // Setup the Terracotta cluster config TerracottaClientConfiguration terracottaConfig = new TerracottaClientConfiguration(); // use Terracotta server if running and available if (this.cacheEnabled) { logger.info("Attempting to load cluster caching using Terracotta at: "+ serverConfigurationService.getString("memory.cluster.server.urls", DEFAULT_CACHE_SERVER_URL)+"."); // set the URL to the server String[] serverUrls = serverConfigurationService.getStrings("memory.cluster.server.urls"); // create comma-separated string of URLs String serverUrlsString = StringUtils.join(serverUrls, ","); terracottaConfig.setUrl(serverUrlsString); terracottaConfig.setRejoin(true); configuration.addTerracottaConfig(terracottaConfig); // retrieve the names of all caches that will be managed by Terracotta and create cache configurations for them String[] caches = serverConfigurationService.getStrings("memory.cluster.names"); if (ArrayUtils.isNotEmpty(caches)) { for (String cacheName : caches) { CacheConfiguration cacheConfiguration = this.createClusterCacheConfiguration(cacheName); if (cacheConfiguration != null) { configuration.addCache(cacheConfiguration); } } } // create new cache manager with the above configuration if (this.shared) { this.cacheManager = (CacheManager) ReflectionUtils.invokeMethod(createWithConfiguration, null, configuration); } else { this.cacheManager = new CacheManager(configuration); } } else { // This block contains the original code from org/springframework/cache/ehcache/EhCacheManagerFactoryBean.java // A bit convoluted for EhCache 1.x/2.0 compatibility. // To be much simpler once we require EhCache 2.1+ logger.info("Attempting to load default cluster caching."); configuration.addTerracottaConfig(terracottaConfig); if (this.cacheManagerName != null) { if (this.shared && createWithConfiguration == null) { // No CacheManager.create(Configuration) method available before EhCache 2.1; // can only set CacheManager name after creation. this.cacheManager = (is != null ? CacheManager.create(is) : CacheManager.create()); this.cacheManager.setName(this.cacheManagerName); } else { configuration.setName(this.cacheManagerName); if (this.shared) { this.cacheManager = (CacheManager) ReflectionUtils.invokeMethod(createWithConfiguration, null, configuration); } else { this.cacheManager = new CacheManager(configuration); } } } else if (this.shared) { // For strict backwards compatibility: use simplest possible constructors... this.cacheManager = (is != null ? CacheManager.create(is) : CacheManager.create()); } else { this.cacheManager = (is != null ? new CacheManager(is) : new CacheManager()); } } } catch (CacheException ce) { // this is thrown if we can't connect to the Terracotta server on initialization if (this.cacheEnabled && this.cacheManager == null) { logger.error("You have cluster caching enabled in sakai.properties, but do not have a Terracotta server running at "+ serverConfigurationService.getString("memory.cluster.server.urls", DEFAULT_CACHE_SERVER_URL)+ ". Please ensure the server is running and available.", ce); // use the default cache instead this.cacheEnabled = false; afterPropertiesSet(); } else { logger.error("An error occurred while creating the cache manager: ", ce); } } finally { if (is != null) { is.close(); } } } public CacheManager getObject() { return this.cacheManager; } public Class<? extends CacheManager> getObjectType() { return (this.cacheManager != null ? this.cacheManager.getClass() : CacheManager.class); } public boolean isSingleton() { return true; } public void destroy() { logger.info("Shutting down EhCache CacheManager"); this.cacheManager.shutdown(); } public void setServerConfigurationService(ServerConfigurationService serverConfigurationService) { this.serverConfigurationService = serverConfigurationService; } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.util.xml.tree.actions; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.DefaultActionGroup; import com.intellij.openapi.application.ApplicationBundle; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.ui.popup.ListPopup; import com.intellij.psi.xml.XmlFile; import com.intellij.ui.treeStructure.SimpleNode; import com.intellij.util.ReflectionUtil; import com.intellij.util.xml.DomElement; import com.intellij.util.xml.MergedObject; import com.intellij.util.xml.TypeNameManager; import com.intellij.util.xml.DomUtil; import com.intellij.util.xml.reflect.DomCollectionChildDescription; import com.intellij.util.xml.tree.BaseDomElementNode; import com.intellij.util.xml.tree.DomElementsGroupNode; import com.intellij.util.xml.tree.DomModelTreeView; import com.intellij.util.xml.ui.actions.AddDomElementAction; import com.intellij.util.xml.ui.actions.DefaultAddAction; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.lang.reflect.Type; import java.util.List; /** * User: Sergey.Vasiliev */ public class AddElementInCollectionAction extends AddDomElementAction { private DomModelTreeView myTreeView; public AddElementInCollectionAction() { } public AddElementInCollectionAction(final DomModelTreeView treeView) { myTreeView = treeView; } protected DomModelTreeView getTreeView(AnActionEvent e) { if (myTreeView != null) return myTreeView; return (DomModelTreeView)e.getDataContext().getData(DomModelTreeView.DOM_MODEL_TREE_VIEW_KEY); } protected boolean isEnabled(final AnActionEvent e) { final DomModelTreeView treeView = getTreeView(e); final boolean enabled = treeView != null; e.getPresentation().setEnabled(enabled); return enabled; } protected void showPopup(final ListPopup groupPopup, final AnActionEvent e) { if (myTreeView == null) { if (e.getPlace().equals(DomModelTreeView.DOM_MODEL_TREE_VIEW_POPUP)) { groupPopup.showInCenterOf(getTreeView(e).getTree()); } else { groupPopup.showInBestPositionFor(e.getDataContext()); } } else { super.showPopup(groupPopup, e); } } @NotNull protected DomCollectionChildDescription[] getDomCollectionChildDescriptions(final AnActionEvent e) { final DomModelTreeView view = getTreeView(e); SimpleNode node = view.getTree().getSelectedNode(); if (node instanceof BaseDomElementNode) { List<DomCollectionChildDescription> consolidated = ((BaseDomElementNode)node).getConsolidatedChildrenDescriptions(); if (consolidated.size() > 0) { return consolidated.toArray(DomCollectionChildDescription.EMPTY_ARRAY); } } final DomElementsGroupNode groupNode = getDomElementsGroupNode(view); return groupNode == null ? DomCollectionChildDescription.EMPTY_ARRAY : new DomCollectionChildDescription[]{groupNode.getChildDescription()}; } protected DomElement getParentDomElement(final AnActionEvent e) { final DomModelTreeView view = getTreeView(e); SimpleNode node = view.getTree().getSelectedNode(); if (node instanceof BaseDomElementNode) { if (((BaseDomElementNode)node).getConsolidatedChildrenDescriptions().size() > 0) { return ((BaseDomElementNode)node).getDomElement(); } } final DomElementsGroupNode groupNode = getDomElementsGroupNode(view); return groupNode == null ? null : groupNode.getDomElement(); } protected JComponent getComponent(AnActionEvent e) { return getTreeView(e); } protected boolean showAsPopup() { return true; } protected String getActionText(final AnActionEvent e) { String text = ApplicationBundle.message("action.add"); if (e.getPresentation().isEnabled()) { final DomElementsGroupNode selectedNode = getDomElementsGroupNode(getTreeView(e)); if (selectedNode != null) { final Type type = selectedNode.getChildDescription().getType(); text += " " + TypeNameManager.getTypeName(ReflectionUtil.getRawType(type)); } } return text; } @Nullable private static DomElementsGroupNode getDomElementsGroupNode(final DomModelTreeView treeView) { SimpleNode simpleNode = treeView.getTree().getSelectedNode(); while (simpleNode != null) { if (simpleNode instanceof DomElementsGroupNode) return (DomElementsGroupNode)simpleNode; simpleNode = simpleNode.getParent(); } return null; } protected AnAction createAddingAction(final AnActionEvent e, final String name, final Icon icon, final Type type, final DomCollectionChildDescription description) { final DomElement parentDomElement = getParentDomElement(e); if (parentDomElement instanceof MergedObject) { final List<DomElement> implementations = (List<DomElement>)((MergedObject)parentDomElement).getImplementations(); final DefaultActionGroup actionGroup = new DefaultActionGroup(name, true); for (DomElement implementation : implementations) { final XmlFile xmlFile = DomUtil.getFile(implementation); actionGroup.add(new MyDefaultAddAction(implementation, xmlFile.getName(), xmlFile.getIcon(0), e, type, description)); } return actionGroup; } return new MyDefaultAddAction(parentDomElement, name, icon, e, type, description); } private class MyDefaultAddAction extends DefaultAddAction { // we need this properties, don't remove it (shared dataContext assertion) private final DomElement myParent; private final DomModelTreeView myView; private final Type myType; private final DomCollectionChildDescription myDescription; public MyDefaultAddAction(final DomElement parent, final String name, final Icon icon, final AnActionEvent e, final Type type, final DomCollectionChildDescription description) { super(name, name, icon); myType = type; myDescription = description; myParent = parent; myView = getTreeView(e); } protected Type getElementType() { return myType; } protected DomCollectionChildDescription getDomCollectionChildDescription() { return myDescription; } protected DomElement getParentDomElement() { return myParent; } protected void afterAddition(final DomElement newElement) { final DomElement copy = newElement.createStableCopy(); ApplicationManager.getApplication().invokeLater(new Runnable() { public void run() { myView.setSelectedDomElement(copy); } }); } } }
/** * Copyright (C) 2016 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.strata.market.surface; import java.io.Serializable; import java.util.Map; import java.util.NoSuchElementException; import java.util.Set; import org.joda.beans.Bean; import org.joda.beans.BeanBuilder; import org.joda.beans.BeanDefinition; import org.joda.beans.ImmutableBean; import org.joda.beans.JodaBeanUtils; import org.joda.beans.MetaProperty; import org.joda.beans.Property; import org.joda.beans.PropertyDefinition; import org.joda.beans.impl.direct.DirectFieldsBeanBuilder; import org.joda.beans.impl.direct.DirectMetaBean; import org.joda.beans.impl.direct.DirectMetaProperty; import org.joda.beans.impl.direct.DirectMetaPropertyMap; import com.opengamma.strata.market.ValueType; import com.opengamma.strata.market.param.ParameterMetadata; /** * Simple parameter metadata containing the x and y values and type. */ @BeanDefinition(builderScope = "private") public final class SimpleSurfaceParameterMetadata implements ParameterMetadata, ImmutableBean, Serializable { /** * The type of the x-value. */ @PropertyDefinition(validate = "notNull") private final ValueType xValueType; /** * The x-value. */ @PropertyDefinition private final double xValue; /** * The type of the y-value. */ @PropertyDefinition(validate = "notNull") private final ValueType yValueType; /** * The y-value. */ @PropertyDefinition private final double yValue; //------------------------------------------------------------------------- /** * Obtains an instance specifying information about the x-value. * * @param xValueType the x-value type * @param xValue the x-value * @param yValueType the x-value type * @param yValue the x-value * @return the parameter metadata based on the date and label */ public static SimpleSurfaceParameterMetadata of( ValueType xValueType, double xValue, ValueType yValueType, double yValue) { return new SimpleSurfaceParameterMetadata(xValueType, xValue, yValueType, yValue); } //------------------------------------------------------------------------- @Override public String getLabel() { return xValueType + "=" + xValue + ", " + yValueType + "=" + yValue; } @Override public String getIdentifier() { return getLabel(); } //------------------------- AUTOGENERATED START ------------------------- ///CLOVER:OFF /** * The meta-bean for {@code SimpleSurfaceParameterMetadata}. * @return the meta-bean, not null */ public static SimpleSurfaceParameterMetadata.Meta meta() { return SimpleSurfaceParameterMetadata.Meta.INSTANCE; } static { JodaBeanUtils.registerMetaBean(SimpleSurfaceParameterMetadata.Meta.INSTANCE); } /** * The serialization version id. */ private static final long serialVersionUID = 1L; private SimpleSurfaceParameterMetadata( ValueType xValueType, double xValue, ValueType yValueType, double yValue) { JodaBeanUtils.notNull(xValueType, "xValueType"); JodaBeanUtils.notNull(yValueType, "yValueType"); this.xValueType = xValueType; this.xValue = xValue; this.yValueType = yValueType; this.yValue = yValue; } @Override public SimpleSurfaceParameterMetadata.Meta metaBean() { return SimpleSurfaceParameterMetadata.Meta.INSTANCE; } @Override public <R> Property<R> property(String propertyName) { return metaBean().<R>metaProperty(propertyName).createProperty(this); } @Override public Set<String> propertyNames() { return metaBean().metaPropertyMap().keySet(); } //----------------------------------------------------------------------- /** * Gets the type of the x-value. * @return the value of the property, not null */ public ValueType getXValueType() { return xValueType; } //----------------------------------------------------------------------- /** * Gets the x-value. * @return the value of the property */ public double getXValue() { return xValue; } //----------------------------------------------------------------------- /** * Gets the type of the y-value. * @return the value of the property, not null */ public ValueType getYValueType() { return yValueType; } //----------------------------------------------------------------------- /** * Gets the y-value. * @return the value of the property */ public double getYValue() { return yValue; } //----------------------------------------------------------------------- @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj != null && obj.getClass() == this.getClass()) { SimpleSurfaceParameterMetadata other = (SimpleSurfaceParameterMetadata) obj; return JodaBeanUtils.equal(xValueType, other.xValueType) && JodaBeanUtils.equal(xValue, other.xValue) && JodaBeanUtils.equal(yValueType, other.yValueType) && JodaBeanUtils.equal(yValue, other.yValue); } return false; } @Override public int hashCode() { int hash = getClass().hashCode(); hash = hash * 31 + JodaBeanUtils.hashCode(xValueType); hash = hash * 31 + JodaBeanUtils.hashCode(xValue); hash = hash * 31 + JodaBeanUtils.hashCode(yValueType); hash = hash * 31 + JodaBeanUtils.hashCode(yValue); return hash; } @Override public String toString() { StringBuilder buf = new StringBuilder(160); buf.append("SimpleSurfaceParameterMetadata{"); buf.append("xValueType").append('=').append(xValueType).append(',').append(' '); buf.append("xValue").append('=').append(xValue).append(',').append(' '); buf.append("yValueType").append('=').append(yValueType).append(',').append(' '); buf.append("yValue").append('=').append(JodaBeanUtils.toString(yValue)); buf.append('}'); return buf.toString(); } //----------------------------------------------------------------------- /** * The meta-bean for {@code SimpleSurfaceParameterMetadata}. */ public static final class Meta extends DirectMetaBean { /** * The singleton instance of the meta-bean. */ static final Meta INSTANCE = new Meta(); /** * The meta-property for the {@code xValueType} property. */ private final MetaProperty<ValueType> xValueType = DirectMetaProperty.ofImmutable( this, "xValueType", SimpleSurfaceParameterMetadata.class, ValueType.class); /** * The meta-property for the {@code xValue} property. */ private final MetaProperty<Double> xValue = DirectMetaProperty.ofImmutable( this, "xValue", SimpleSurfaceParameterMetadata.class, Double.TYPE); /** * The meta-property for the {@code yValueType} property. */ private final MetaProperty<ValueType> yValueType = DirectMetaProperty.ofImmutable( this, "yValueType", SimpleSurfaceParameterMetadata.class, ValueType.class); /** * The meta-property for the {@code yValue} property. */ private final MetaProperty<Double> yValue = DirectMetaProperty.ofImmutable( this, "yValue", SimpleSurfaceParameterMetadata.class, Double.TYPE); /** * The meta-properties. */ private final Map<String, MetaProperty<?>> metaPropertyMap$ = new DirectMetaPropertyMap( this, null, "xValueType", "xValue", "yValueType", "yValue"); /** * Restricted constructor. */ private Meta() { } @Override protected MetaProperty<?> metaPropertyGet(String propertyName) { switch (propertyName.hashCode()) { case -868509005: // xValueType return xValueType; case -777049127: // xValue return xValue; case -1065022510: // yValueType return yValueType; case -748419976: // yValue return yValue; } return super.metaPropertyGet(propertyName); } @Override public BeanBuilder<? extends SimpleSurfaceParameterMetadata> builder() { return new SimpleSurfaceParameterMetadata.Builder(); } @Override public Class<? extends SimpleSurfaceParameterMetadata> beanType() { return SimpleSurfaceParameterMetadata.class; } @Override public Map<String, MetaProperty<?>> metaPropertyMap() { return metaPropertyMap$; } //----------------------------------------------------------------------- /** * The meta-property for the {@code xValueType} property. * @return the meta-property, not null */ public MetaProperty<ValueType> xValueType() { return xValueType; } /** * The meta-property for the {@code xValue} property. * @return the meta-property, not null */ public MetaProperty<Double> xValue() { return xValue; } /** * The meta-property for the {@code yValueType} property. * @return the meta-property, not null */ public MetaProperty<ValueType> yValueType() { return yValueType; } /** * The meta-property for the {@code yValue} property. * @return the meta-property, not null */ public MetaProperty<Double> yValue() { return yValue; } //----------------------------------------------------------------------- @Override protected Object propertyGet(Bean bean, String propertyName, boolean quiet) { switch (propertyName.hashCode()) { case -868509005: // xValueType return ((SimpleSurfaceParameterMetadata) bean).getXValueType(); case -777049127: // xValue return ((SimpleSurfaceParameterMetadata) bean).getXValue(); case -1065022510: // yValueType return ((SimpleSurfaceParameterMetadata) bean).getYValueType(); case -748419976: // yValue return ((SimpleSurfaceParameterMetadata) bean).getYValue(); } return super.propertyGet(bean, propertyName, quiet); } @Override protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) { metaProperty(propertyName); if (quiet) { return; } throw new UnsupportedOperationException("Property cannot be written: " + propertyName); } } //----------------------------------------------------------------------- /** * The bean-builder for {@code SimpleSurfaceParameterMetadata}. */ private static final class Builder extends DirectFieldsBeanBuilder<SimpleSurfaceParameterMetadata> { private ValueType xValueType; private double xValue; private ValueType yValueType; private double yValue; /** * Restricted constructor. */ private Builder() { } //----------------------------------------------------------------------- @Override public Object get(String propertyName) { switch (propertyName.hashCode()) { case -868509005: // xValueType return xValueType; case -777049127: // xValue return xValue; case -1065022510: // yValueType return yValueType; case -748419976: // yValue return yValue; default: throw new NoSuchElementException("Unknown property: " + propertyName); } } @Override public Builder set(String propertyName, Object newValue) { switch (propertyName.hashCode()) { case -868509005: // xValueType this.xValueType = (ValueType) newValue; break; case -777049127: // xValue this.xValue = (Double) newValue; break; case -1065022510: // yValueType this.yValueType = (ValueType) newValue; break; case -748419976: // yValue this.yValue = (Double) newValue; break; default: throw new NoSuchElementException("Unknown property: " + propertyName); } return this; } @Override public Builder set(MetaProperty<?> property, Object value) { super.set(property, value); return this; } @Override public Builder setString(String propertyName, String value) { setString(meta().metaProperty(propertyName), value); return this; } @Override public Builder setString(MetaProperty<?> property, String value) { super.setString(property, value); return this; } @Override public Builder setAll(Map<String, ? extends Object> propertyValueMap) { super.setAll(propertyValueMap); return this; } @Override public SimpleSurfaceParameterMetadata build() { return new SimpleSurfaceParameterMetadata( xValueType, xValue, yValueType, yValue); } //----------------------------------------------------------------------- @Override public String toString() { StringBuilder buf = new StringBuilder(160); buf.append("SimpleSurfaceParameterMetadata.Builder{"); buf.append("xValueType").append('=').append(JodaBeanUtils.toString(xValueType)).append(',').append(' '); buf.append("xValue").append('=').append(JodaBeanUtils.toString(xValue)).append(',').append(' '); buf.append("yValueType").append('=').append(JodaBeanUtils.toString(yValueType)).append(',').append(' '); buf.append("yValue").append('=').append(JodaBeanUtils.toString(yValue)); buf.append('}'); return buf.toString(); } } ///CLOVER:ON //-------------------------- AUTOGENERATED END -------------------------- }
/*================================================================================ Copyright (c) 2012 Steve Jin. All Rights Reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of VMware, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL VMWARE, INC. OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ================================================================================*/ package com.vmware.vim25; /** * @author Steve Jin (http://www.doublecloud.org) * @version 5.1 */ @SuppressWarnings("all") public class VirtualMachineConfigSummary extends DynamicData { public String name; public boolean template; public String vmPathName; public Integer memorySizeMB; public Integer cpuReservation; public Integer memoryReservation; public Integer numCpu; public Integer numEthernetCards; public Integer numVirtualDisks; public String uuid; public String instanceUuid; public String guestId; public String guestFullName; public String annotation; public VAppProductInfo product; public Boolean installBootRequired; public FaultToleranceConfigInfo ftInfo; public ManagedByInfo managedBy; public String getName() { return this.name; } public boolean isTemplate() { return this.template; } public String getVmPathName() { return this.vmPathName; } public Integer getMemorySizeMB() { return this.memorySizeMB; } public Integer getCpuReservation() { return this.cpuReservation; } public Integer getMemoryReservation() { return this.memoryReservation; } public Integer getNumCpu() { return this.numCpu; } public Integer getNumEthernetCards() { return this.numEthernetCards; } public Integer getNumVirtualDisks() { return this.numVirtualDisks; } public String getUuid() { return this.uuid; } public String getInstanceUuid() { return this.instanceUuid; } public String getGuestId() { return this.guestId; } public String getGuestFullName() { return this.guestFullName; } public String getAnnotation() { return this.annotation; } public VAppProductInfo getProduct() { return this.product; } public Boolean getInstallBootRequired() { return this.installBootRequired; } public FaultToleranceConfigInfo getFtInfo() { return this.ftInfo; } public ManagedByInfo getManagedBy() { return this.managedBy; } public void setName(String name) { this.name=name; } public void setTemplate(boolean template) { this.template=template; } public void setVmPathName(String vmPathName) { this.vmPathName=vmPathName; } public void setMemorySizeMB(Integer memorySizeMB) { this.memorySizeMB=memorySizeMB; } public void setCpuReservation(Integer cpuReservation) { this.cpuReservation=cpuReservation; } public void setMemoryReservation(Integer memoryReservation) { this.memoryReservation=memoryReservation; } public void setNumCpu(Integer numCpu) { this.numCpu=numCpu; } public void setNumEthernetCards(Integer numEthernetCards) { this.numEthernetCards=numEthernetCards; } public void setNumVirtualDisks(Integer numVirtualDisks) { this.numVirtualDisks=numVirtualDisks; } public void setUuid(String uuid) { this.uuid=uuid; } public void setInstanceUuid(String instanceUuid) { this.instanceUuid=instanceUuid; } public void setGuestId(String guestId) { this.guestId=guestId; } public void setGuestFullName(String guestFullName) { this.guestFullName=guestFullName; } public void setAnnotation(String annotation) { this.annotation=annotation; } public void setProduct(VAppProductInfo product) { this.product=product; } public void setInstallBootRequired(Boolean installBootRequired) { this.installBootRequired=installBootRequired; } public void setFtInfo(FaultToleranceConfigInfo ftInfo) { this.ftInfo=ftInfo; } public void setManagedBy(ManagedByInfo managedBy) { this.managedBy=managedBy; } }
package net.dirtyfilthy.bouncycastle.jce.provider; import net.dirtyfilthy.bouncycastle.crypto.CipherParameters; import net.dirtyfilthy.bouncycastle.crypto.InvalidCipherTextException; import net.dirtyfilthy.bouncycastle.crypto.agreement.DHBasicAgreement; import net.dirtyfilthy.bouncycastle.crypto.agreement.ECDHBasicAgreement; import net.dirtyfilthy.bouncycastle.crypto.digests.SHA1Digest; import net.dirtyfilthy.bouncycastle.crypto.engines.IESEngine; import net.dirtyfilthy.bouncycastle.crypto.generators.KDF2BytesGenerator; import net.dirtyfilthy.bouncycastle.crypto.macs.HMac; import net.dirtyfilthy.bouncycastle.crypto.params.IESParameters; import net.dirtyfilthy.bouncycastle.jce.interfaces.ECPrivateKey; import net.dirtyfilthy.bouncycastle.jce.interfaces.ECPublicKey; import net.dirtyfilthy.bouncycastle.jce.interfaces.IESKey; import net.dirtyfilthy.bouncycastle.jce.provider.asymmetric.ec.ECUtil; import net.dirtyfilthy.bouncycastle.jce.spec.IESParameterSpec; import javax.crypto.BadPaddingException; import javax.crypto.Cipher; import javax.crypto.IllegalBlockSizeException; import javax.crypto.NoSuchPaddingException; import javax.crypto.interfaces.DHPrivateKey; import java.io.ByteArrayOutputStream; import java.security.AlgorithmParameters; import java.security.InvalidAlgorithmParameterException; import java.security.InvalidKeyException; import java.security.Key; import java.security.SecureRandom; import java.security.spec.AlgorithmParameterSpec; public class JCEIESCipher extends WrapCipherSpi { private IESEngine cipher; private int state = -1; private ByteArrayOutputStream buffer = new ByteArrayOutputStream(); private AlgorithmParameters engineParam = null; private IESParameterSpec engineParams = null; // // specs we can handle. // private Class[] availableSpecs = { IESParameterSpec.class }; public JCEIESCipher( IESEngine engine) { cipher = engine; } protected int engineGetBlockSize() { return 0; } protected byte[] engineGetIV() { return null; } protected int engineGetKeySize( Key key) { if (!(key instanceof IESKey)) { throw new IllegalArgumentException("must be passed IE key"); } IESKey ieKey = (IESKey)key; if (ieKey.getPrivate() instanceof DHPrivateKey) { DHPrivateKey k = (DHPrivateKey)ieKey.getPrivate(); return k.getX().bitLength(); } else if (ieKey.getPrivate() instanceof ECPrivateKey) { ECPrivateKey k = (ECPrivateKey)ieKey.getPrivate(); return k.getD().bitLength(); } throw new IllegalArgumentException("not an IE key!"); } protected int engineGetOutputSize( int inputLen) { if (state == Cipher.ENCRYPT_MODE || state == Cipher.WRAP_MODE) { return buffer.size() + inputLen + 20; /* SHA1 MAC size */ } else if (state == Cipher.DECRYPT_MODE || state == Cipher.UNWRAP_MODE) { return buffer.size() + inputLen - 20; } else { throw new IllegalStateException("cipher not initialised"); } } protected AlgorithmParameters engineGetParameters() { if (engineParam == null) { if (engineParams != null) { String name = "IES"; try { engineParam = AlgorithmParameters.getInstance(name, "DFBC"); engineParam.init(engineParams); } catch (Exception e) { throw new RuntimeException(e.toString()); } } } return engineParam; } protected void engineSetMode( String mode) { throw new IllegalArgumentException("can't support mode " + mode); } protected void engineSetPadding( String padding) throws NoSuchPaddingException { throw new NoSuchPaddingException(padding + " unavailable with RSA."); } protected void engineInit( int opmode, Key key, AlgorithmParameterSpec params, SecureRandom random) throws InvalidKeyException, InvalidAlgorithmParameterException { if (!(key instanceof IESKey)) { throw new InvalidKeyException("must be passed IES key"); } if (params == null && (opmode == Cipher.ENCRYPT_MODE || opmode == Cipher.WRAP_MODE)) { // // if nothing is specified we set up for a 128 bit mac, with // 128 bit derivation vectors. // byte[] d = new byte[16]; byte[] e = new byte[16]; if (random == null) { random = new SecureRandom(); } random.nextBytes(d); random.nextBytes(e); params = new IESParameterSpec(d, e, 128); } else if (!(params instanceof IESParameterSpec)) { throw new InvalidAlgorithmParameterException("must be passed IES parameters"); } IESKey ieKey = (IESKey)key; CipherParameters pubKey; CipherParameters privKey; if (ieKey.getPublic() instanceof ECPublicKey) { pubKey = ECUtil.generatePublicKeyParameter(ieKey.getPublic()); privKey = ECUtil.generatePrivateKeyParameter(ieKey.getPrivate()); } else { pubKey = DHUtil.generatePublicKeyParameter(ieKey.getPublic()); privKey = DHUtil.generatePrivateKeyParameter(ieKey.getPrivate()); } this.engineParams = (IESParameterSpec)params; IESParameters p = new IESParameters(engineParams.getDerivationV(), engineParams.getEncodingV(), engineParams.getMacKeySize()); this.state = opmode; buffer.reset(); switch (opmode) { case Cipher.ENCRYPT_MODE: case Cipher.WRAP_MODE: cipher.init(true, privKey, pubKey, p); break; case Cipher.DECRYPT_MODE: case Cipher.UNWRAP_MODE: cipher.init(false, privKey, pubKey, p); break; default: System.out.println("eeek!"); } } protected void engineInit( int opmode, Key key, AlgorithmParameters params, SecureRandom random) throws InvalidKeyException, InvalidAlgorithmParameterException { AlgorithmParameterSpec paramSpec = null; if (params != null) { for (int i = 0; i != availableSpecs.length; i++) { try { paramSpec = params.getParameterSpec(availableSpecs[i]); break; } catch (Exception e) { continue; } } if (paramSpec == null) { throw new InvalidAlgorithmParameterException("can't handle parameter " + params.toString()); } } engineParam = params; engineInit(opmode, key, paramSpec, random); } protected void engineInit( int opmode, Key key, SecureRandom random) throws InvalidKeyException { if (opmode == Cipher.ENCRYPT_MODE || opmode == Cipher.WRAP_MODE) { try { engineInit(opmode, key, (AlgorithmParameterSpec)null, random); return; } catch (InvalidAlgorithmParameterException e) { // fall through... } } throw new IllegalArgumentException("can't handle null parameter spec in IES"); } protected byte[] engineUpdate( byte[] input, int inputOffset, int inputLen) { buffer.write(input, inputOffset, inputLen); return null; } protected int engineUpdate( byte[] input, int inputOffset, int inputLen, byte[] output, int outputOffset) { buffer.write(input, inputOffset, inputLen); return 0; } protected byte[] engineDoFinal( byte[] input, int inputOffset, int inputLen) throws IllegalBlockSizeException, BadPaddingException { if (inputLen != 0) { buffer.write(input, inputOffset, inputLen); } try { byte[] buf = buffer.toByteArray(); buffer.reset(); return cipher.processBlock(buf, 0, buf.length); } catch (InvalidCipherTextException e) { throw new BadPaddingException(e.getMessage()); } } protected int engineDoFinal( byte[] input, int inputOffset, int inputLen, byte[] output, int outputOffset) throws IllegalBlockSizeException, BadPaddingException { if (inputLen != 0) { buffer.write(input, inputOffset, inputLen); } try { byte[] buf = buffer.toByteArray(); buffer.reset(); buf = cipher.processBlock(buf, 0, buf.length); System.arraycopy(buf, 0, output, outputOffset, buf.length); return buf.length; } catch (InvalidCipherTextException e) { throw new BadPaddingException(e.getMessage()); } } /** * classes that inherit from us. */ static public class BrokenECIES extends JCEIESCipher { public BrokenECIES() { super(new IESEngine( new ECDHBasicAgreement(), new BrokenKDF2BytesGenerator(new SHA1Digest()), new HMac(new SHA1Digest()))); } } static public class BrokenIES extends JCEIESCipher { public BrokenIES() { super(new IESEngine( new DHBasicAgreement(), new BrokenKDF2BytesGenerator(new SHA1Digest()), new HMac(new SHA1Digest()))); } } static public class ECIES extends JCEIESCipher { public ECIES() { super(new IESEngine( new ECDHBasicAgreement(), new KDF2BytesGenerator(new SHA1Digest()), new HMac(new SHA1Digest()))); } } static public class IES extends JCEIESCipher { public IES() { super(new IESEngine( new DHBasicAgreement(), new KDF2BytesGenerator(new SHA1Digest()), new HMac(new SHA1Digest()))); } } }
package eu.steakholders.bingo.classroombingo; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.os.Handler; import android.support.design.widget.Snackbar; import android.support.v4.app.Fragment; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.util.Log; import android.view.View; import android.view.Menu; import android.view.MenuItem; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.ListView; import android.widget.RelativeLayout; import android.widget.Spinner; import com.android.volley.Response; import com.android.volley.VolleyError; import java.util.HashMap; import java.util.List; //https://steakholders.eu/api/v1/?format=api //https://steakholders.eu/api-docs/v1/api-docs import java.util.ArrayList; import java.util.Map; import eu.steakholders.bingo.api.Game; import eu.steakholders.bingo.api.GameType; import eu.steakholders.bingo.api.Place; import eu.steakholders.bingo.api.PrimaryCategory; import eu.steakholders.bingo.api.SecondaryCategory; public class MainActivity extends AppCompatActivity { //Debug TAG for log private static final String TAG = "DEBUG"; //Fragments private JoinGameFragment joinGameFragment; private CreateGameFragment createGameFragment; public static final String GAME_OBJECT = "gameobject"; public static final String NICKNAME = "nickname"; //Layout variable private RelativeLayout mainPage; //Game object selected private Game gameObject; private String nickname; //Elements in layout private Spinner gameTypeSpinner; private Spinner placeSpinner; private Spinner primaryCatSpinner; private Spinner secondaryCatSpinner; private ListView existingGamesListView; //ArrayAdapter for spinners private ArrayAdapter<String> gameTypeAdapter; private ArrayAdapter<String> placeAdapter; private ArrayAdapter<String> primaryAdapter; private ArrayAdapter<String> secondaryAdapter; private ArrayAdapter<String> gameListAdapter; //Selected game to join private String gameName; //Lists private List<Object> gameTypesList; private List<Object> placesList; private List<Object> primaryList; private List<Object> secondaryList; private List<Object> existingGameList; //Spinner arrays private List<String> placesNames; private List<String> gameNames; private List<String> primaryNames; private List<String> secondaryNames; private List<Game> existingGames; private List<String> filteredGameNames; //Mapping from name to id private Map<String, Integer> gameTypeMap; private Map<String, Integer> placesMap; private Map<String, Integer> primaryMap; private Map<String, Integer> secondaryMap; private Map<String, Game> existingGamesMap; //Selected items in spinners private String selectedGameType; private String selectedPlace; private String selectedPrimary; private String selectedSecondary; /** * * @param savedInstanceState */ @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.fragment_splash); final Handler handler = new Handler(); handler.postDelayed(new Runnable() { @Override public void run() { init(); } }, 3000); } /** * Initializes all dropdowns, lists, maps and gets info from the server */ private void init() { setContentView(R.layout.activity_main); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); //Init layout variable mainPage = (RelativeLayout) findViewById(R.id.main_frame); //Init lists placesNames = new ArrayList<>(); gameNames = new ArrayList<>(); primaryNames = new ArrayList<>(); secondaryNames = new ArrayList<>(); existingGames = new ArrayList<>(); //filteredGameNames = new ArrayList<>(); //Init maps gameTypeMap = new HashMap<>(); placesMap = new HashMap<>(); primaryMap = new HashMap<>(); secondaryMap = new HashMap<>(); existingGamesMap = new HashMap<>(); //Init spinner variables gameTypeSpinner = (Spinner) findViewById(R.id.spinner_gt); placeSpinner = (Spinner) findViewById(R.id.spinner_place); primaryCatSpinner = (Spinner) findViewById(R.id.spinner_pc); secondaryCatSpinner = (Spinner) findViewById(R.id.spinner_sc); gameTypeSpinner.setOnItemSelectedListener(new UpdateGameFilterListener()); placeSpinner.setOnItemSelectedListener(new UpdateGameFilterListener()); primaryCatSpinner.setOnItemSelectedListener(new UpdateGameFilterListener()); secondaryCatSpinner.setOnItemSelectedListener(new UpdateGameFilterListener()); existingGamesListView = (ListView) findViewById(R.id.existingGameList); existingGamesListView.setOnItemClickListener(new ExistingGameListListener()); //Adding stuff to spinners and lists addGameTypes(this); addPlaces(this); addPrimary(this); addSecondary(this); getGameList(this); } /** * Private listener class for game listView */ private class ExistingGameListListener implements AdapterView.OnItemClickListener { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { setSelectedGameInfo((String) existingGamesListView.getItemAtPosition(position)); } } /** * Private listener class for spinners to update gameList */ private class UpdateGameFilterListener implements AdapterView.OnItemSelectedListener { @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { if(gameTypeSpinner != null && gameTypeSpinner.getSelectedItem() != null && placeSpinner != null && placeSpinner.getSelectedItem() != null && primaryCatSpinner != null && primaryCatSpinner.getSelectedItem() != null && secondaryCatSpinner != null && secondaryCatSpinner.getSelectedItem() != null){ filterGames(); } } @Override public void onNothingSelected(AdapterView<?> parent) { filterGames(); } } /** * Inflates menu * @param menu * @return */ @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_main, menu); return true; } /** * Handle action bar item clicks here. The action bar will automatically handle clicks on the Home/Up button, * so long as you specify a parent activity in AndroidManifest.xml. * * @param item * @return */ @Override public boolean onOptionsItemSelected(MenuItem item) { // int id = item.getItemId(); //noinspection SimplifiableIfStatement if (id == R.id.action_settings) { return true; } return super.onOptionsItemSelected(item); } /** * Initialize joinGameFragment and hides main * @param view */ public void goToJoinGame(View view){ if(checkFields("join")){ removeFragment(view); // Check that the activity is using the layout version with // the fragment_container FrameLayout if (findViewById(R.id.fragment_container_main) != null) { hideMain(); // Create a new Fragment to be placed in the activity layout joinGameFragment = new JoinGameFragment(); // In case this activity was started with special instructions from an // Intent, pass the Intent's extras to the fragment as arguments joinGameFragment.setArguments(getIntent().getExtras()); joinGameFragment.setGameName(gameName); // Add the fragment to the 'fragment_container' FrameLayout getSupportFragmentManager().beginTransaction() .add(R.id.fragment_container_main, joinGameFragment).addToBackStack("join").commit(); } } } /** * Handles click for joinGameFragment JOIN GAME button and moves to GameActivity * @param view */ public void joinGame(View view){ joinGameFragment.setNickName(); if(joinGameFragment.getFlag()){ nickname = joinGameFragment.getNickName(); Intent intent = new Intent(this, GameActivity.class); intent.putExtra(MainActivity.GAME_OBJECT, gameObject); intent.putExtra(MainActivity.NICKNAME, nickname); startActivity(intent); }else{ Snackbar snackbar = Snackbar .make(mainPage, "Did you add a nickname?", Snackbar.LENGTH_LONG); snackbar.show(); } } /** * Initialize createGameFragment and hides main * @param view */ public void goToCreateGame(View view){ if(checkFields("create")){ removeFragment(view); // Check that the activity is using the layout version with // the fragment_container FrameLayout if (findViewById(R.id.fragment_container_main) != null) { hideMain(); // Create a new Fragment to be placed in the activity layout createGameFragment = new CreateGameFragment(); //Transfer data createGameFragment.setFlag(); createGameFragment.setGameNames(gameNames); createGameFragment.setPlacesNames(placesNames); createGameFragment.setPrimaryNames(primaryNames); createGameFragment.setSecondaryNames(secondaryNames); createGameFragment.setGameTypeMap(gameTypeMap); createGameFragment.setPlacesMap(placesMap); createGameFragment.setPrimaryMap(primaryMap); createGameFragment.setSecondaryMap(secondaryMap); createGameFragment.setSelectedGameTypePos(gameTypeAdapter.getPosition(gameTypeSpinner.getSelectedItem().toString())); createGameFragment.setSelectedPlacePos(placeAdapter.getPosition(placeSpinner.getSelectedItem().toString())); createGameFragment.setSelectedPrimaryPos(primaryAdapter.getPosition(primaryCatSpinner.getSelectedItem().toString())); createGameFragment.setSelectedSecondaryPos(secondaryAdapter.getPosition(secondaryCatSpinner.getSelectedItem().toString())); // In case this activity was started with special instructions from an // Intent, pass the Intent's extras to the fragment as arguments createGameFragment.setArguments(getIntent().getExtras()); // Add the fragment to the 'fragment_container' FrameLayout getSupportFragmentManager().beginTransaction() .add(R.id.fragment_container_main, createGameFragment).addToBackStack("create").commit(); } } } /** * Creates new game and sends to server, then runs joinGame() * @param view */ public void newGame(final View view){ createGameFragment.makeGame(); if(createGameFragment.getFlag()){ gameObject = createGameFragment.getGame(); gameName = gameObject.getName(); gameObject.save(this, new Response.Listener<Object>() { @Override public void onResponse(Object object) { gameObject = (Game) object; Log.d(TAG, object.toString()); goToJoinGame(view); } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { Log.e(TAG, error.toString()); Snackbar snackbar = Snackbar .make(mainPage, "Please fill in all the fields correctly!", Snackbar.LENGTH_LONG); snackbar.show(); } }); getGameList(this); } } /** * Removes a fragment and shows main * @param view */ public void removeFragment(View view){ Fragment fragment = getSupportFragmentManager().findFragmentById(R.id.fragment_container_main); if (fragment != null) { getSupportFragmentManager().beginTransaction().remove(fragment).commit(); showMain(); } } /** * Method to hide main */ public void hideMain(){ if (mainPage.getVisibility() == View.VISIBLE){ mainPage.setVisibility(View.INVISIBLE); } } /** * Method to show main */ public void showMain(){ if(mainPage.getVisibility() == View.INVISIBLE){ mainPage.setVisibility(View.VISIBLE); } } /** * Get and Add gameTypes to dropdown * @param context */ @SuppressWarnings("unchecked") public void addGameTypes(final Context context){ GameType.getAll(this, new Response.Listener<Object>() { @Override public void onResponse(Object object) { gameTypesList = (ArrayList<Object>) object; for(Object o: gameTypesList){ GameType temp = (GameType) o; gameNames.add(temp.getName()); gameTypeMap.put(temp.getName(), temp.getId()); gameTypeAdapter = new ArrayAdapter<>(context, android.R.layout.simple_spinner_item, gameNames); gameTypeAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); gameTypeSpinner.setAdapter(gameTypeAdapter); //Log.d(TAG, gameTypeMap.toString()); } } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { Log.e(TAG, error.toString()); } }); } /** * Get and Add places to dropdown * @param context */ @SuppressWarnings("unchecked") public void addPlaces(final Context context){ Place.getAll(this, new Response.Listener<Object>() { @Override public void onResponse(Object object) { placesList = (ArrayList<Object>) object; for(Object o: placesList){ Place temp = (Place) o; placesNames.add(temp.getName()); placesMap.put(temp.getName(), temp.getId()); placeAdapter = new ArrayAdapter<>(context, android.R.layout.simple_spinner_item, placesNames); placeAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); placeSpinner.setAdapter(placeAdapter); //Log.d(TAG, placesMap.toString()); } } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { Log.e(TAG, error.toString()); } }); } /** * Get and Add primary category to dropdown * @param context */ @SuppressWarnings("unchecked") public void addPrimary(final Context context){ PrimaryCategory.getAll(this, new Response.Listener<Object>() { @Override public void onResponse(Object object) { primaryList = (ArrayList<Object>) object; for(Object o: primaryList){ PrimaryCategory temp = (PrimaryCategory) o; primaryNames.add(temp.getName()); primaryMap.put(temp.getName(), temp.getId()); primaryAdapter = new ArrayAdapter<>(context, android.R.layout.simple_spinner_item, primaryNames); primaryAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); primaryCatSpinner.setAdapter(primaryAdapter); //Log.d(TAG, primaryMap.toString()); } } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { Log.e(TAG, error.toString()); } }); } /** * Get and Add secondary category to dropdown * @param context */ @SuppressWarnings("unchecked") public void addSecondary(final Context context){ SecondaryCategory.getAll(this, new Response.Listener<Object>() { @Override public void onResponse(Object object) { secondaryList = (ArrayList<Object>) object; for(Object o: secondaryList){ SecondaryCategory temp = (SecondaryCategory) o; secondaryNames.add(temp.getName()); secondaryMap.put(temp.getName(), temp.getId()); secondaryAdapter = new ArrayAdapter<>(context, android.R.layout.simple_spinner_item, secondaryNames); secondaryAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); secondaryCatSpinner.setAdapter(secondaryAdapter); //Log.d(TAG, secondaryMap.toString()); } } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { Log.e(TAG, error.toString()); } }); } /** * Get and Add games to list * @param context */ @SuppressWarnings("unchecked") public void getGameList(final Context context){ existingGameList = new ArrayList<>(); existingGames = new ArrayList<>(); existingGamesMap = new HashMap<>(); Game.getAll(this, new Response.Listener<Object>() { @Override public void onResponse(Object object) { existingGameList = (ArrayList<Object>) object; for(Object o: existingGameList){ Game temp = (Game) o; existingGames.add(temp); existingGamesMap.put(temp.getName(), temp); //Log.d(TAG, existingGames.toString()); if(gameTypeSpinner != null && gameTypeSpinner.getSelectedItem() != null && placeSpinner != null && placeSpinner.getSelectedItem() != null && primaryCatSpinner != null && primaryCatSpinner.getSelectedItem() != null && secondaryCatSpinner != null && secondaryCatSpinner.getSelectedItem() != null){ filterGames(); } } } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { Log.e(TAG, error.toString()); } }); } /** *Gets all values from spinners and updates game list based on those */ public void filterGames(){ filteredGameNames = new ArrayList<>(); selectedGameType = gameTypeSpinner.getSelectedItem().toString(); selectedPlace = placeSpinner.getSelectedItem().toString(); selectedPrimary = primaryCatSpinner.getSelectedItem().toString(); selectedSecondary = secondaryCatSpinner.getSelectedItem().toString(); int gameTypeID = gameTypeMap.get(selectedGameType); int placeID = placesMap.get(selectedPlace); int primaryID = primaryMap.get(selectedPrimary); int secondaryID = secondaryMap.get(selectedSecondary); if (secondaryID == 1){ for(Game g: existingGames){ if(g.getPlaceId() == placeID && g.getGameTypeId() == gameTypeID && g.getPrimaryCategoryId() == primaryID){ filteredGameNames.add(g.getName()); } } }else{ for(Game g: existingGames){ if(g.getPlaceId() == placeID && g.getGameTypeId() == gameTypeID && g.getPrimaryCategoryId() == primaryID && g.getSecondaryCategoryId() == secondaryID ){ filteredGameNames.add(g.getName()); } } } gameListAdapter = new ArrayAdapter<>(this, android.R.layout.simple_list_item_1, filteredGameNames); existingGamesListView.setAdapter(gameListAdapter); } /** * Get game info from selected game in game list * @param gameName */ public void setSelectedGameInfo(String gameName){ this.gameName = gameName; this.gameObject = existingGamesMap.get(gameName); } /** * Check if any info is missing * @param button = string on buttons * @return */ public boolean checkFields(String button){ switch (button) { case "create": return true; case "join": if (gameName != null) { return true; } break; default: System.out.println("Something went wrong"); break; } Snackbar snackbar = Snackbar .make(mainPage, "Did you select a game to join? If none exists consider creating your own!", Snackbar.LENGTH_LONG); snackbar.show(); return false; } /** * Override back button to work with fragments */ @Override public void onBackPressed(){ //Override back button for now if(getSupportFragmentManager().getBackStackEntryCount() == 0){ return; } showMain(); super.onBackPressed(); } }
package org.semanticweb.elk.reasoner.indexing.classes; /*- * #%L * ELK Reasoner Core * $Id:$ * $HeadURL:$ * %% * Copyright (C) 2011 - 2017 Department of Computer Science, University of Oxford * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.semanticweb.elk.owl.interfaces.ElkClassAssertionAxiom; import org.semanticweb.elk.owl.interfaces.ElkDeclarationAxiom; import org.semanticweb.elk.owl.interfaces.ElkDifferentIndividualsAxiom; import org.semanticweb.elk.owl.interfaces.ElkDisjointClassesAxiom; import org.semanticweb.elk.owl.interfaces.ElkDisjointUnionAxiom; import org.semanticweb.elk.owl.interfaces.ElkEquivalentClassesAxiom; import org.semanticweb.elk.owl.interfaces.ElkEquivalentObjectPropertiesAxiom; import org.semanticweb.elk.owl.interfaces.ElkObjectPropertyAssertionAxiom; import org.semanticweb.elk.owl.interfaces.ElkObjectPropertyDomainAxiom; import org.semanticweb.elk.owl.interfaces.ElkObjectPropertyRangeAxiom; import org.semanticweb.elk.owl.interfaces.ElkReflexiveObjectPropertyAxiom; import org.semanticweb.elk.owl.interfaces.ElkSameIndividualAxiom; import org.semanticweb.elk.owl.interfaces.ElkSubClassOfAxiom; import org.semanticweb.elk.owl.interfaces.ElkSubObjectPropertyOfAxiom; import org.semanticweb.elk.owl.interfaces.ElkTransitiveObjectPropertyAxiom; import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkClassAssertionAxiomConversion; import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkDeclarationAxiomConversion; import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkDifferentIndividualsAxiomBinaryConversion; import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkDifferentIndividualsAxiomNaryConversion; import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkDisjointClassesAxiomBinaryConversion; import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkDisjointClassesAxiomNaryConversion; import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkDisjointUnionAxiomBinaryConversion; import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkDisjointUnionAxiomEquivalenceConversion; import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkDisjointUnionAxiomNaryConversion; import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkDisjointUnionAxiomOwlNothingConversion; import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkDisjointUnionAxiomSubClassConversion; import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkEquivalentClassesAxiomEquivalenceConversion; import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkEquivalentClassesAxiomSubClassConversion; import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkEquivalentObjectPropertiesAxiomConversion; import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkObjectPropertyAssertionAxiomConversion; import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkObjectPropertyDomainAxiomConversion; import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkObjectPropertyRangeAxiomConversion; import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkReflexiveObjectPropertyAxiomConversion; import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkSameIndividualAxiomConversion; import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkSubClassOfAxiomConversion; import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkSubObjectPropertyOfAxiomConversion; import org.semanticweb.elk.reasoner.indexing.model.ModifiableElkTransitiveObjectPropertyAxiomConversion; import org.semanticweb.elk.reasoner.indexing.model.ModifiableIndexedAxiomInference; import org.semanticweb.elk.reasoner.indexing.model.ModifiableIndexedClass; import org.semanticweb.elk.reasoner.indexing.model.ModifiableIndexedClassExpression; import org.semanticweb.elk.reasoner.indexing.model.ModifiableIndexedClassExpressionList; import org.semanticweb.elk.reasoner.indexing.model.ModifiableIndexedEntity; import org.semanticweb.elk.reasoner.indexing.model.ModifiableIndexedIndividual; import org.semanticweb.elk.reasoner.indexing.model.ModifiableIndexedObjectIntersectionOf; import org.semanticweb.elk.reasoner.indexing.model.ModifiableIndexedObjectProperty; import org.semanticweb.elk.reasoner.indexing.model.ModifiableIndexedPropertyChain; /** * A {@link ModifiableIndexedAxiomInference.Factory} that delegates the methods * to a given factory and optionally filters the result; * * @author Yevgeny Kazakov * */ public class ModifiableIndexedAxiomInferenceDelegatingFactory implements ModifiableIndexedAxiomInference.Factory { private final ModifiableIndexedAxiomInference.Factory delegate_; public ModifiableIndexedAxiomInferenceDelegatingFactory( ModifiableIndexedAxiomInference.Factory delegate) { this.delegate_ = delegate; } @SuppressWarnings("static-method") protected <T extends ModifiableIndexedAxiomInference> T filter(T input) { return input; } @Override public ModifiableElkSubClassOfAxiomConversion getElkSubClassOfAxiomConversion( ElkSubClassOfAxiom originalAxiom, ModifiableIndexedClassExpression subClass, ModifiableIndexedClassExpression superClass) { return filter(delegate_.getElkSubClassOfAxiomConversion(originalAxiom, subClass, superClass)); } @Override public ModifiableElkDeclarationAxiomConversion getElkDeclarationAxiomConversion( ElkDeclarationAxiom originalAxiom, ModifiableIndexedEntity entity) { return filter(delegate_.getElkDeclarationAxiomConversion(originalAxiom, entity)); } @Override public ModifiableElkSameIndividualAxiomConversion getElkSameIndividualAxiomConversion( ElkSameIndividualAxiom originalAxiom, int subIndividualPosition, int superIndividualPosition, ModifiableIndexedIndividual subIndividual, ModifiableIndexedIndividual superIndividual) { return filter(delegate_.getElkSameIndividualAxiomConversion( originalAxiom, subIndividualPosition, superIndividualPosition, subIndividual, superIndividual)); } @Override public ModifiableElkClassAssertionAxiomConversion getElkClassAssertionAxiomConversion( ElkClassAssertionAxiom originalAxiom, ModifiableIndexedIndividual instance, ModifiableIndexedClassExpression type) { return filter(delegate_.getElkClassAssertionAxiomConversion( originalAxiom, instance, type)); } @Override public ModifiableElkDisjointUnionAxiomNaryConversion getElkDisjointUnionAxiomNaryConversion( ElkDisjointUnionAxiom originalAxiom, ModifiableIndexedClassExpressionList disjointClasses) { return filter(delegate_.getElkDisjointUnionAxiomNaryConversion( originalAxiom, disjointClasses)); } @Override public ModifiableElkDisjointUnionAxiomBinaryConversion getElkDisjointUnionAxiomBinaryConversion( ElkDisjointUnionAxiom originalAxiom, int firstDisjunctPosition, int secondDisjunctPosition, ModifiableIndexedObjectIntersectionOf conjunction, ModifiableIndexedClass bottom) { return filter(delegate_.getElkDisjointUnionAxiomBinaryConversion( originalAxiom, firstDisjunctPosition, secondDisjunctPosition, conjunction, bottom)); } @Override public ModifiableElkDisjointUnionAxiomSubClassConversion getElkDisjointUnionAxiomSubClassConversion( ElkDisjointUnionAxiom originalAxiom, int disjunctPosition, ModifiableIndexedClassExpression disjunct, ModifiableIndexedClass definedClass) { return filter(delegate_.getElkDisjointUnionAxiomSubClassConversion( originalAxiom, disjunctPosition, disjunct, definedClass)); } @Override public ModifiableElkDisjointClassesAxiomNaryConversion getElkDisjointClassesAxiomNaryConversion( ElkDisjointClassesAxiom originalAxiom, ModifiableIndexedClassExpressionList disjointClasses) { return filter(delegate_.getElkDisjointClassesAxiomNaryConversion( originalAxiom, disjointClasses)); } @Override public ModifiableElkDisjointClassesAxiomBinaryConversion getElkDisjointClassesAxiomBinaryConversion( ElkDisjointClassesAxiom originalAxiom, int firstClassPosition, int secondClassPosition, ModifiableIndexedObjectIntersectionOf conjunction, ModifiableIndexedClass bottom) { return filter(delegate_.getElkDisjointClassesAxiomBinaryConversion( originalAxiom, firstClassPosition, secondClassPosition, conjunction, bottom)); } @Override public ModifiableElkDisjointUnionAxiomOwlNothingConversion getElkDisjointUnionAxiomOwlNothingConversion( ElkDisjointUnionAxiom originalAxiom, ModifiableIndexedClass definedClass, ModifiableIndexedClass bottom) { return filter(delegate_.getElkDisjointUnionAxiomOwlNothingConversion( originalAxiom, definedClass, bottom)); } @Override public ModifiableElkObjectPropertyDomainAxiomConversion getElkObjectPropertyDomainAxiomConversion( ElkObjectPropertyDomainAxiom originalAxiom, ModifiableIndexedClassExpression subClass, ModifiableIndexedClassExpression superClass) { return filter(delegate_.getElkObjectPropertyDomainAxiomConversion( originalAxiom, subClass, superClass)); } @Override public ModifiableElkObjectPropertyRangeAxiomConversion getElkObjectPropertyRangeAxiomConversion( ElkObjectPropertyRangeAxiom originalAxiom, ModifiableIndexedObjectProperty property, ModifiableIndexedClassExpression range) { return filter(delegate_.getElkObjectPropertyRangeAxiomConversion( originalAxiom, property, range)); } @Override public ModifiableElkSubObjectPropertyOfAxiomConversion getElkSubObjectPropertyOfAxiomConversion( ElkSubObjectPropertyOfAxiom originalAxiom, ModifiableIndexedPropertyChain subPropertyChain, ModifiableIndexedObjectProperty superProperty) { return filter(delegate_.getElkSubObjectPropertyOfAxiomConversion( originalAxiom, subPropertyChain, superProperty)); } @Override public ModifiableElkDisjointUnionAxiomEquivalenceConversion getElkDisjointUnionAxiomEquivalenceConversion( ElkDisjointUnionAxiom originalAxiom, ModifiableIndexedClass definedClass, ModifiableIndexedClassExpression definition) { return filter(delegate_.getElkDisjointUnionAxiomEquivalenceConversion( originalAxiom, definedClass, definition)); } @Override public ModifiableElkObjectPropertyAssertionAxiomConversion getElkObjectPropertyAssertionAxiomConversion( ElkObjectPropertyAssertionAxiom originalAxiom, ModifiableIndexedClassExpression subClass, ModifiableIndexedClassExpression superClass) { return filter(delegate_.getElkObjectPropertyAssertionAxiomConversion( originalAxiom, subClass, superClass)); } @Override public ModifiableElkReflexiveObjectPropertyAxiomConversion getElkReflexiveObjectPropertyAxiomConversion( ElkReflexiveObjectPropertyAxiom originalAxiom, ModifiableIndexedClassExpression subClass, ModifiableIndexedClassExpression superClass) { return filter(delegate_.getElkReflexiveObjectPropertyAxiomConversion( originalAxiom, subClass, superClass)); } @Override public ModifiableElkEquivalentClassesAxiomSubClassConversion getElkEquivalentClassesAxiomSubClassConversion( ElkEquivalentClassesAxiom originalAxiom, int subClassPosition, int superClassPosition, ModifiableIndexedClassExpression subClass, ModifiableIndexedClassExpression superClass) { return filter(delegate_.getElkEquivalentClassesAxiomSubClassConversion( originalAxiom, subClassPosition, superClassPosition, subClass, superClass)); } @Override public ModifiableElkDifferentIndividualsAxiomNaryConversion getElkDifferentIndividualsAxiomNaryConversion( ElkDifferentIndividualsAxiom originalAxiom, ModifiableIndexedClassExpressionList differentIndividuals) { return filter(delegate_.getElkDifferentIndividualsAxiomNaryConversion( originalAxiom, differentIndividuals)); } @Override public ModifiableElkDifferentIndividualsAxiomBinaryConversion getElkDifferentIndividualsAxiomBinaryConversion( ElkDifferentIndividualsAxiom originalAxiom, int firstIndividualPosition, int secondIndividualPosition, ModifiableIndexedObjectIntersectionOf conjunction, ModifiableIndexedClass bottom) { return filter(delegate_.getElkDifferentIndividualsAxiomBinaryConversion( originalAxiom, firstIndividualPosition, secondIndividualPosition, conjunction, bottom)); } @Override public ModifiableElkEquivalentClassesAxiomEquivalenceConversion getElkEquivalentClassesAxiomEquivalenceConversion( ElkEquivalentClassesAxiom originalAxiom, int firstMemberPosition, int secondMemberPosition, ModifiableIndexedClassExpression firstMember, ModifiableIndexedClassExpression secondMember) { return filter( delegate_.getElkEquivalentClassesAxiomEquivalenceConversion( originalAxiom, firstMemberPosition, secondMemberPosition, firstMember, secondMember)); } @Override public ModifiableElkTransitiveObjectPropertyAxiomConversion getElkTransitiveObjectPropertyAxiomConversion( ElkTransitiveObjectPropertyAxiom originalAxiom, ModifiableIndexedPropertyChain subPropertyChain, ModifiableIndexedObjectProperty superProperty) { return filter(delegate_.getElkTransitiveObjectPropertyAxiomConversion( originalAxiom, subPropertyChain, superProperty)); } @Override public ModifiableElkEquivalentObjectPropertiesAxiomConversion getElkEquivalentObjectPropertiesAxiomConversion( ElkEquivalentObjectPropertiesAxiom originalAxiom, int subPropertyPosition, int superPropertyPosition, ModifiableIndexedObjectProperty subProperty, ModifiableIndexedObjectProperty superProperty) { return filter(delegate_.getElkEquivalentObjectPropertiesAxiomConversion( originalAxiom, subPropertyPosition, superPropertyPosition, subProperty, superProperty)); } }
package com.android.hotspot2.est; import android.net.Network; import android.util.Base64; import android.util.Log; import com.android.hotspot2.OMADMAdapter; import com.android.hotspot2.asn1.Asn1Class; import com.android.hotspot2.asn1.Asn1Constructed; import com.android.hotspot2.asn1.Asn1Decoder; import com.android.hotspot2.asn1.Asn1ID; import com.android.hotspot2.asn1.Asn1Integer; import com.android.hotspot2.asn1.Asn1Object; import com.android.hotspot2.asn1.Asn1Oid; import com.android.hotspot2.asn1.OidMappings; import com.android.hotspot2.osu.HTTPHandler; import com.android.hotspot2.osu.OSUSocketFactory; import com.android.hotspot2.osu.commands.GetCertData; import com.android.hotspot2.pps.HomeSP; import com.android.hotspot2.utils.HTTPMessage; import com.android.hotspot2.utils.HTTPResponse; import com.android.org.bouncycastle.asn1.ASN1Encodable; import com.android.org.bouncycastle.asn1.ASN1EncodableVector; import com.android.org.bouncycastle.asn1.ASN1Set; import com.android.org.bouncycastle.asn1.DERBitString; import com.android.org.bouncycastle.asn1.DEREncodableVector; import com.android.org.bouncycastle.asn1.DERIA5String; import com.android.org.bouncycastle.asn1.DERObjectIdentifier; import com.android.org.bouncycastle.asn1.DERPrintableString; import com.android.org.bouncycastle.asn1.DERSet; import com.android.org.bouncycastle.asn1.x509.Attribute; import com.android.org.bouncycastle.jce.PKCS10CertificationRequest; import com.android.org.bouncycastle.jce.spec.ECNamedCurveGenParameterSpec; import java.io.ByteArrayInputStream; import java.io.IOException; import java.net.URL; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.security.AlgorithmParameters; import java.security.GeneralSecurityException; import java.security.KeyPair; import java.security.KeyPairGenerator; import java.security.KeyStore; import java.security.PrivateKey; import java.security.cert.CertificateFactory; import java.security.cert.X509Certificate; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import javax.net.ssl.KeyManager; import javax.security.auth.x500.X500Principal; //import com.android.org.bouncycastle.jce.provider.BouncyCastleProvider; public class ESTHandler implements AutoCloseable { private static final String TAG = "HS2EST"; private static final int MinRSAKeySize = 2048; private static final String CACERT_PATH = "/cacerts"; private static final String CSR_PATH = "/csrattrs"; private static final String SIMPLE_ENROLL_PATH = "/simpleenroll"; private static final String SIMPLE_REENROLL_PATH = "/simplereenroll"; private final URL mURL; private final String mUser; private final byte[] mPassword; private final OSUSocketFactory mSocketFactory; private final OMADMAdapter mOMADMAdapter; private final List<X509Certificate> mCACerts = new ArrayList<>(); private final List<X509Certificate> mClientCerts = new ArrayList<>(); private PrivateKey mClientKey; public ESTHandler(GetCertData certData, Network network, OMADMAdapter omadmAdapter, KeyManager km, KeyStore ks, HomeSP homeSP, int flowType) throws IOException, GeneralSecurityException { mURL = new URL(certData.getServer()); mUser = certData.getUserName(); mPassword = certData.getPassword(); mSocketFactory = OSUSocketFactory.getSocketFactory(ks, homeSP, flowType, network, mURL, km, true); mOMADMAdapter = omadmAdapter; } @Override public void close() throws IOException { } public List<X509Certificate> getCACerts() { return mCACerts; } public List<X509Certificate> getClientCerts() { return mClientCerts; } public PrivateKey getClientKey() { return mClientKey; } private static String indent(int amount) { char[] indent = new char[amount * 2]; Arrays.fill(indent, ' '); return new String(indent); } public void execute(boolean reenroll) throws IOException, GeneralSecurityException { URL caURL = new URL(mURL.getProtocol(), mURL.getHost(), mURL.getPort(), mURL.getFile() + CACERT_PATH); HTTPResponse response; try (HTTPHandler httpHandler = new HTTPHandler(StandardCharsets.ISO_8859_1, mSocketFactory, mUser, mPassword)) { response = httpHandler.doGetHTTP(caURL); if (!"application/pkcs7-mime".equals(response.getHeaders(). get(HTTPMessage.ContentTypeHeader))) { throw new IOException("Unexpected Content-Type: " + response.getHeaders().get(HTTPMessage.ContentTypeHeader)); } ByteBuffer octetBuffer = response.getBinaryPayload(); Collection<Asn1Object> pkcs7Content1 = Asn1Decoder.decode(octetBuffer); for (Asn1Object asn1Object : pkcs7Content1) { Log.d(TAG, "---"); Log.d(TAG, asn1Object.toString()); } Log.d(TAG, CACERT_PATH); mCACerts.addAll(unpackPkcs7(octetBuffer)); for (X509Certificate certificate : mCACerts) { Log.d(TAG, "CA-Cert: " + certificate.getSubjectX500Principal()); } /* byte[] octets = new byte[octetBuffer.remaining()]; octetBuffer.duplicate().get(octets); for (byte b : octets) { System.out.printf("%02x ", b & 0xff); } Log.d(TAG, ); */ /* + BC try { byte[] octets = new byte[octetBuffer.remaining()]; octetBuffer.duplicate().get(octets); ASN1InputStream asnin = new ASN1InputStream(octets); for (int n = 0; n < 100; n++) { ASN1Primitive object = asnin.readObject(); if (object == null) { break; } parseObject(object, 0); } } catch (Throwable t) { t.printStackTrace(); } Collection<Asn1Object> pkcs7Content = Asn1Decoder.decode(octetBuffer); for (Asn1Object asn1Object : pkcs7Content) { Log.d(TAG, asn1Object); } if (pkcs7Content.size() != 1) { throw new IOException("Unexpected pkcs 7 container: " + pkcs7Content.size()); } Asn1Constructed pkcs7Root = (Asn1Constructed) pkcs7Content.iterator().next(); Iterator<Asn1ID> certPath = Arrays.asList(Pkcs7CertPath).iterator(); Asn1Object certObject = pkcs7Root.findObject(certPath); if (certObject == null || certPath.hasNext()) { throw new IOException("Failed to find cert; returned object " + certObject + ", path " + (certPath.hasNext() ? "short" : "exhausted")); } ByteBuffer certOctets = certObject.getPayload(); if (certOctets == null) { throw new IOException("No cert payload in: " + certObject); } byte[] certBytes = new byte[certOctets.remaining()]; certOctets.get(certBytes); CertificateFactory certFactory = CertificateFactory.getInstance("X.509"); Certificate cert = certFactory.generateCertificate(new ByteArrayInputStream(certBytes)); Log.d(TAG, "EST Cert: " + cert); */ URL csrURL = new URL(mURL.getProtocol(), mURL.getHost(), mURL.getPort(), mURL.getFile() + CSR_PATH); response = httpHandler.doGetHTTP(csrURL); octetBuffer = response.getBinaryPayload(); byte[] csrData = buildCSR(octetBuffer, mOMADMAdapter, httpHandler); /**/ Collection<Asn1Object> o = Asn1Decoder.decode(ByteBuffer.wrap(csrData)); Log.d(TAG, "CSR:"); Log.d(TAG, o.iterator().next().toString()); Log.d(TAG, "End CSR."); /**/ URL enrollURL = new URL(mURL.getProtocol(), mURL.getHost(), mURL.getPort(), mURL.getFile() + (reenroll ? SIMPLE_REENROLL_PATH : SIMPLE_ENROLL_PATH)); String data = Base64.encodeToString(csrData, Base64.DEFAULT); octetBuffer = httpHandler.exchangeBinary(enrollURL, data, "application/pkcs10"); Collection<Asn1Object> pkcs7Content2 = Asn1Decoder.decode(octetBuffer); for (Asn1Object asn1Object : pkcs7Content2) { Log.d(TAG, "---"); Log.d(TAG, asn1Object.toString()); } mClientCerts.addAll(unpackPkcs7(octetBuffer)); for (X509Certificate cert : mClientCerts) { Log.d(TAG, cert.toString()); } } } private static final Asn1ID sSEQUENCE = new Asn1ID(Asn1Decoder.TAG_SEQ, Asn1Class.Universal); private static final Asn1ID sCTXT0 = new Asn1ID(0, Asn1Class.Context); private static final int PKCS7DataVersion = 1; private static final int PKCS7SignedDataVersion = 3; private static List<X509Certificate> unpackPkcs7(ByteBuffer pkcs7) throws IOException, GeneralSecurityException { Collection<Asn1Object> pkcs7Content = Asn1Decoder.decode(pkcs7); if (pkcs7Content.size() != 1) { throw new IOException("Unexpected pkcs 7 container: " + pkcs7Content.size()); } Asn1Object data = pkcs7Content.iterator().next(); if (!data.isConstructed() || !data.matches(sSEQUENCE)) { throw new IOException("Expected SEQ OF, got " + data.toSimpleString()); } else if (data.getChildren().size() != 2) { throw new IOException("Expected content info to have two children, got " + data.getChildren().size()); } Iterator<Asn1Object> children = data.getChildren().iterator(); Asn1Object contentType = children.next(); if (!contentType.equals(Asn1Oid.PKCS7SignedData)) { throw new IOException("Content not PKCS7 signed data"); } Asn1Object content = children.next(); if (!content.isConstructed() || !content.matches(sCTXT0)) { throw new IOException("Expected [CONTEXT 0] with one child, got " + content.toSimpleString() + ", " + content.getChildren().size()); } Asn1Object signedData = content.getChildren().iterator().next(); Map<Integer, Asn1Object> itemMap = new HashMap<>(); for (Asn1Object item : signedData.getChildren()) { if (itemMap.put(item.getTag(), item) != null && item.getTag() != Asn1Decoder.TAG_SET) { throw new IOException("Duplicate item in SignedData: " + item.toSimpleString()); } } Asn1Object versionObject = itemMap.get(Asn1Decoder.TAG_INTEGER); if (versionObject == null || !(versionObject instanceof Asn1Integer)) { throw new IOException("Bad or missing PKCS7 version: " + versionObject); } int pkcs7version = (int) ((Asn1Integer) versionObject).getValue(); Asn1Object innerContentInfo = itemMap.get(Asn1Decoder.TAG_SEQ); if (innerContentInfo == null || !innerContentInfo.isConstructed() || !innerContentInfo.matches(sSEQUENCE) || innerContentInfo.getChildren().size() != 1) { throw new IOException("Bad or missing PKCS7 contentInfo"); } Asn1Object contentID = innerContentInfo.getChildren().iterator().next(); if (pkcs7version == PKCS7DataVersion && !contentID.equals(Asn1Oid.PKCS7Data) || pkcs7version == PKCS7SignedDataVersion && !contentID.equals(Asn1Oid.PKCS7SignedData)) { throw new IOException("Inner PKCS7 content (" + contentID + ") not expected for version " + pkcs7version); } Asn1Object certWrapper = itemMap.get(0); if (certWrapper == null || !certWrapper.isConstructed() || !certWrapper.matches(sCTXT0)) { throw new IOException("Expected [CONTEXT 0], got: " + certWrapper); } List<X509Certificate> certList = new ArrayList<>(certWrapper.getChildren().size()); CertificateFactory certFactory = CertificateFactory.getInstance("X.509"); for (Asn1Object certObject : certWrapper.getChildren()) { ByteBuffer certOctets = ((Asn1Constructed) certObject).getEncoding(); if (certOctets == null) { throw new IOException("No cert payload in: " + certObject); } byte[] certBytes = new byte[certOctets.remaining()]; certOctets.get(certBytes); certList.add((X509Certificate) certFactory. generateCertificate(new ByteArrayInputStream(certBytes))); } return certList; } private byte[] buildCSR(ByteBuffer octetBuffer, OMADMAdapter omadmAdapter, HTTPHandler httpHandler) throws IOException, GeneralSecurityException { //Security.addProvider(new BouncyCastleProvider()); Log.d(TAG, "/csrattrs:"); /* byte[] octets = new byte[octetBuffer.remaining()]; octetBuffer.duplicate().get(octets); for (byte b : octets) { System.out.printf("%02x ", b & 0xff); } */ Collection<Asn1Object> csrs = Asn1Decoder.decode(octetBuffer); for (Asn1Object asn1Object : csrs) { Log.d(TAG, asn1Object.toString()); } if (csrs.size() != 1) { throw new IOException("Unexpected object count in CSR attributes response: " + csrs.size()); } Asn1Object sequence = csrs.iterator().next(); if (sequence.getClass() != Asn1Constructed.class) { throw new IOException("Unexpected CSR attribute container: " + sequence); } String keyAlgo = null; Asn1Oid keyAlgoOID = null; String sigAlgo = null; String curveName = null; Asn1Oid pubCrypto = null; int keySize = -1; Map<Asn1Oid, ASN1Encodable> idAttributes = new HashMap<>(); for (Asn1Object child : sequence.getChildren()) { if (child.getTag() == Asn1Decoder.TAG_OID) { Asn1Oid oid = (Asn1Oid) child; OidMappings.SigEntry sigEntry = OidMappings.getSigEntry(oid); if (sigEntry != null) { sigAlgo = sigEntry.getSigAlgo(); keyAlgoOID = sigEntry.getKeyAlgo(); keyAlgo = OidMappings.getJCEName(keyAlgoOID); } else if (oid.equals(OidMappings.sPkcs9AtChallengePassword)) { byte[] tlsUnique = httpHandler.getTLSUnique(); if (tlsUnique != null) { idAttributes.put(oid, new DERPrintableString( Base64.encodeToString(tlsUnique, Base64.DEFAULT))); } else { Log.w(TAG, "Cannot retrieve TLS unique channel binding"); } } } else if (child.getTag() == Asn1Decoder.TAG_SEQ) { Asn1Oid oid = null; Set<Asn1Oid> oidValues = new HashSet<>(); List<Asn1Object> values = new ArrayList<>(); for (Asn1Object attributeSeq : child.getChildren()) { if (attributeSeq.getTag() == Asn1Decoder.TAG_OID) { oid = (Asn1Oid) attributeSeq; } else if (attributeSeq.getTag() == Asn1Decoder.TAG_SET) { for (Asn1Object value : attributeSeq.getChildren()) { if (value.getTag() == Asn1Decoder.TAG_OID) { oidValues.add((Asn1Oid) value); } else { values.add(value); } } } } if (oid == null) { throw new IOException("Invalid attribute, no OID"); } if (oid.equals(OidMappings.sExtensionRequest)) { for (Asn1Oid subOid : oidValues) { if (OidMappings.isIDAttribute(subOid)) { if (subOid.equals(OidMappings.sMAC)) { idAttributes.put(subOid, new DERIA5String(omadmAdapter.getMAC())); } else if (subOid.equals(OidMappings.sIMEI)) { idAttributes.put(subOid, new DERIA5String(omadmAdapter.getImei())); } else if (subOid.equals(OidMappings.sMEID)) { idAttributes.put(subOid, new DERBitString(omadmAdapter.getMeid())); } else if (subOid.equals(OidMappings.sDevID)) { idAttributes.put(subOid, new DERPrintableString(omadmAdapter.getDevID())); } } } } else if (OidMappings.getCryptoID(oid) != null) { pubCrypto = oid; if (!values.isEmpty()) { for (Asn1Object value : values) { if (value.getTag() == Asn1Decoder.TAG_INTEGER) { keySize = (int) ((Asn1Integer) value).getValue(); } } } if (oid.equals(OidMappings.sAlgo_EC)) { if (oidValues.isEmpty()) { throw new IOException("No ECC curve name provided"); } for (Asn1Oid value : oidValues) { curveName = OidMappings.getJCEName(value); if (curveName != null) { break; } } if (curveName == null) { throw new IOException("Found no ECC curve for " + oidValues); } } } } } if (keyAlgoOID == null) { throw new IOException("No public key algorithm specified"); } if (pubCrypto != null && !pubCrypto.equals(keyAlgoOID)) { throw new IOException("Mismatching key algorithms"); } if (keyAlgoOID.equals(OidMappings.sAlgo_RSA)) { if (keySize < MinRSAKeySize) { if (keySize >= 0) { Log.i(TAG, "Upgrading suggested RSA key size from " + keySize + " to " + MinRSAKeySize); } keySize = MinRSAKeySize; } } Log.d(TAG, String.format("pub key '%s', signature '%s', ECC curve '%s', id-atts %s", keyAlgo, sigAlgo, curveName, idAttributes)); /* Ruckus: SEQUENCE: OID=1.2.840.113549.1.1.11 (algo_id_sha256WithRSAEncryption) RFC-7030: SEQUENCE: OID=1.2.840.113549.1.9.7 (challengePassword) SEQUENCE: OID=1.2.840.10045.2.1 (algo_id_ecPublicKey) SET: OID=1.3.132.0.34 (secp384r1) SEQUENCE: OID=1.2.840.113549.1.9.14 (extensionRequest) SET: OID=1.3.6.1.1.1.1.22 (mac-address) OID=1.2.840.10045.4.3.3 (eccdaWithSHA384) 1L, 3L, 6L, 1L, 1L, 1L, 1L, 22 */ // ECC Does not appear to be supported currently KeyPairGenerator kpg = KeyPairGenerator.getInstance(keyAlgo); if (curveName != null) { AlgorithmParameters algorithmParameters = AlgorithmParameters.getInstance(keyAlgo); algorithmParameters.init(new ECNamedCurveGenParameterSpec(curveName)); kpg.initialize(algorithmParameters .getParameterSpec(ECNamedCurveGenParameterSpec.class)); } else { kpg.initialize(keySize); } KeyPair kp = kpg.generateKeyPair(); X500Principal subject = new X500Principal("CN=Android, O=Google, C=US"); mClientKey = kp.getPrivate(); // !!! Map the idAttributes into an ASN1Set of values to pass to // the PKCS10CertificationRequest - this code is using outdated BC classes and // has *not* been tested. ASN1Set attributes; if (!idAttributes.isEmpty()) { ASN1EncodableVector payload = new DEREncodableVector(); for (Map.Entry<Asn1Oid, ASN1Encodable> entry : idAttributes.entrySet()) { DERObjectIdentifier type = new DERObjectIdentifier(entry.getKey().toOIDString()); ASN1Set values = new DERSet(entry.getValue()); Attribute attribute = new Attribute(type, values); payload.add(attribute); } attributes = new DERSet(payload); } else { attributes = null; } return new PKCS10CertificationRequest(sigAlgo, subject, kp.getPublic(), attributes, mClientKey).getEncoded(); } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.jetbrains.jsonSchema.widget; import com.intellij.icons.AllIcons; import com.intellij.json.JsonBundle; import com.intellij.openapi.options.ShowSettingsUtil; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.popup.ListPopupStepEx; import com.intellij.openapi.ui.popup.ListSeparator; import com.intellij.openapi.ui.popup.PopupStep; import com.intellij.openapi.ui.popup.util.BaseListPopupStep; import com.intellij.openapi.util.NlsContexts.PopupTitle; import com.intellij.openapi.util.text.HtmlBuilder; import com.intellij.openapi.util.text.HtmlChunk; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VfsUtilCore; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.ui.scale.JBUIScale; import com.intellij.util.ui.EmptyIcon; import com.intellij.util.ui.StatusText; import com.jetbrains.jsonSchema.JsonSchemaMappingsProjectConfiguration; import com.jetbrains.jsonSchema.UserDefinedJsonSchemaConfiguration; import com.jetbrains.jsonSchema.extension.JsonSchemaInfo; import com.jetbrains.jsonSchema.ide.JsonSchemaService; import com.jetbrains.jsonSchema.settings.mappings.JsonSchemaMappingsConfigurable; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.util.Collections; import java.util.List; import java.util.Objects; import static com.intellij.openapi.util.NlsContexts.Tooltip; import static com.jetbrains.jsonSchema.widget.JsonSchemaStatusPopup.*; public class JsonSchemaInfoPopupStep extends BaseListPopupStep<JsonSchemaInfo> implements ListPopupStepEx<JsonSchemaInfo> { private final Project myProject; @Nullable private final VirtualFile myVirtualFile; @NotNull private final JsonSchemaService myService; private static final Icon EMPTY_ICON = JBUIScale.scaleIcon(EmptyIcon.create(AllIcons.General.Add.getIconWidth())); public JsonSchemaInfoPopupStep(@NotNull List<JsonSchemaInfo> allSchemas, @NotNull Project project, @Nullable VirtualFile virtualFile, @NotNull JsonSchemaService service, @Nullable @PopupTitle String title) { super(title, allSchemas); myProject = project; myVirtualFile = virtualFile; myService = service; } @NotNull @Override public String getTextFor(JsonSchemaInfo value) { return value == null ? "" : value.getDescription(); } @Override public Icon getIconFor(JsonSchemaInfo value) { if (value == ADD_MAPPING) { return AllIcons.General.Add; } if (value == EDIT_MAPPINGS) { return AllIcons.Actions.Edit; } if (value == LOAD_REMOTE) { return AllIcons.Actions.Refresh; } return EMPTY_ICON; } @Nullable @Override public ListSeparator getSeparatorAbove(JsonSchemaInfo value) { List<JsonSchemaInfo> values = getValues(); int index = values.indexOf(value); if (index - 1 >= 0) { JsonSchemaInfo info = values.get(index - 1); if (info == EDIT_MAPPINGS || info == ADD_MAPPING) { return new ListSeparator(JsonBundle.message("schema.widget.registered.schemas")); } if (value.getProvider() == null && info.getProvider() != null) { return new ListSeparator(JsonBundle.message("schema.widget.store.schemas")); } } return null; } @Override public PopupStep onChosen(JsonSchemaInfo selectedValue, boolean finalChoice) { if (finalChoice) { if (selectedValue == EDIT_MAPPINGS || selectedValue == ADD_MAPPING) { return doFinalStep(() -> runSchemaEditorForCurrentFile()); } else if (selectedValue == LOAD_REMOTE) { return doFinalStep(() -> myService.triggerUpdateRemote()); } else { setMapping(selectedValue, myVirtualFile, myProject); return doFinalStep(() -> myService.reset()); } } return PopupStep.FINAL_CHOICE; } protected void runSchemaEditorForCurrentFile() { assert myVirtualFile != null: "override this method to do without a virtual file!"; ShowSettingsUtil.getInstance().showSettingsDialog(myProject, JsonSchemaMappingsConfigurable.class, (configurable) -> { // For some reason, JsonSchemaMappingsConfigurable.reset is called right after this callback, leading to resetting the customization. // Workaround: move this logic inside JsonSchemaMappingsConfigurable.reset. configurable.setInitializer(() -> { JsonSchemaMappingsProjectConfiguration mappings = JsonSchemaMappingsProjectConfiguration.getInstance(myProject); UserDefinedJsonSchemaConfiguration configuration = mappings.findMappingForFile(myVirtualFile); if (configuration == null) { configuration = configurable.addProjectSchema(); String relativePath = VfsUtilCore.getRelativePath(myVirtualFile, myProject.getBaseDir()); configuration.patterns.add(new UserDefinedJsonSchemaConfiguration.Item( relativePath == null ? myVirtualFile.getUrl() : relativePath, false, false)); } configurable.selectInTree(configuration); }); }); } @Override public boolean isSpeedSearchEnabled() { return true; } @Nullable @Override public @Tooltip String getTooltipTextFor(JsonSchemaInfo value) { return getDoc(value); } @Nullable private static @Tooltip String getDoc(JsonSchemaInfo schema) { if (schema == null) return null; if (schema.getName() == null) return schema.getDocumentation(); if (schema.getDocumentation() == null) return schema.getName(); return new HtmlBuilder() .append(HtmlChunk.tag("b").addText(schema.getName())) .append(HtmlChunk.br()) .appendRaw(schema.getDocumentation()).toString(); } @Override public void setEmptyText(@NotNull StatusText emptyText) { } @Override public PopupStep onChosen(JsonSchemaInfo selectedValue, boolean finalChoice, int eventModifiers) { return onChosen(selectedValue, finalChoice); } protected void setMapping(@Nullable JsonSchemaInfo selectedValue, @Nullable VirtualFile virtualFile, @NotNull Project project) { assert virtualFile != null: "override this method to do without a virtual file!"; JsonSchemaMappingsProjectConfiguration configuration = JsonSchemaMappingsProjectConfiguration.getInstance(project); VirtualFile projectBaseDir = project.getBaseDir(); UserDefinedJsonSchemaConfiguration mappingForFile = configuration.findMappingForFile(virtualFile); if (mappingForFile != null) { for (UserDefinedJsonSchemaConfiguration.Item pattern : mappingForFile.patterns) { if (Objects.equals(VfsUtil.findRelativeFile(projectBaseDir, pattern.getPathParts()), virtualFile) || virtualFile.getUrl().equals(UserDefinedJsonSchemaConfiguration.Item.neutralizePath(pattern.getPath()))) { mappingForFile.patterns.remove(pattern); if (mappingForFile.patterns.size() == 0 && mappingForFile.isApplicationDefined()) { configuration.removeConfiguration(mappingForFile); } else { mappingForFile.refreshPatterns(); } break; } } } if (selectedValue == null) return; String path = projectBaseDir == null ? null : VfsUtilCore.getRelativePath(virtualFile, projectBaseDir); if (path == null) { path = virtualFile.getUrl(); } UserDefinedJsonSchemaConfiguration existing = configuration.findMappingBySchemaInfo(selectedValue); UserDefinedJsonSchemaConfiguration.Item item = new UserDefinedJsonSchemaConfiguration.Item(path, false, false); if (existing != null) { if (!existing.patterns.contains(item)) { existing.patterns.add(item); existing.refreshPatterns(); } } else { configuration.addConfiguration(new UserDefinedJsonSchemaConfiguration(selectedValue.getDescription(), selectedValue.getSchemaVersion(), selectedValue.getUrl(project), true, Collections.singletonList(item))); } } }
/* =========================================================================== Copyright (c) 2013 3PillarGlobal Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sub-license, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. =========================================================================== */ package org.brickred.socialauth.plugin.linkedin; import java.io.Serializable; import java.util.logging.Logger; import org.brickred.socialauth.Career; import org.brickred.socialauth.Education; import org.brickred.socialauth.Position; import org.brickred.socialauth.Recommendation; import org.brickred.socialauth.exception.ServerDataException; import org.brickred.socialauth.exception.SocialAuthException; import org.brickred.socialauth.plugin.CareerPlugin; import org.brickred.socialauth.util.DateComponents; import org.brickred.socialauth.util.ProviderSupport; import org.brickred.socialauth.util.Response; import org.brickred.socialauth.util.XMLParseUtil; import org.w3c.dom.Element; import org.w3c.dom.NodeList; /** * Career plugin implementation for linkedin * * @author tarun.nagpal * */ public class CareerPluginImpl implements CareerPlugin, Serializable { private static final long serialVersionUID = -1733773634998485298L; private ProviderSupport providerSupport; private final Logger LOG = Logger.getLogger(this.getClass().getName()); private static final String PROFILE_URL = "http://api.linkedin.com/v1/people/~:(id,phone-numbers,headline,educations,positions,recommendations-received)"; public CareerPluginImpl(final ProviderSupport providerSupport) { this.providerSupport = providerSupport; } @Override public Career getCareerDetails() throws Exception { LOG.info("Fetching career details from " + PROFILE_URL); Response serviceResponse = null; try { serviceResponse = providerSupport.api(PROFILE_URL); } catch (Exception ie) { throw new SocialAuthException( "Failed to retrieve the career details from " + PROFILE_URL, ie); } Element root; try { root = XMLParseUtil.loadXmlResource(serviceResponse .getInputStream()); } catch (Exception e) { throw new ServerDataException( "Failed to parse the career details from response." + PROFILE_URL, e); } Career career = null; if (root != null) { career = new Career(); Education[] educationsArr = null; Position[] positionsArr = null; Recommendation[] recommendationsArr = null; String headline = XMLParseUtil.getElementData(root, "headline"); career.setHeadline(headline); String id = XMLParseUtil.getElementData(root, "id"); career.setId(id); // get educations NodeList educations = root.getElementsByTagName("education"); if (educations != null && educations.getLength() > 0) { LOG.fine("Educations count " + educations.getLength()); educationsArr = new Education[educations.getLength()]; for (int i = 0; i < educations.getLength(); i++) { Education educationObj = new Education(); Element educationEl = (Element) educations.item(i); String schoolName = XMLParseUtil.getElementData( educationEl, "school-name"); if (schoolName != null) { educationObj.setSchoolName(schoolName); } String degree = XMLParseUtil.getElementData(educationEl, "degree"); if (degree != null) { educationObj.setDegree(degree); } String fieldOfStudy = XMLParseUtil.getElementData( educationEl, "field-of-study"); if (fieldOfStudy != null) { educationObj.setFieldOfStudy(fieldOfStudy); } NodeList sd = educationEl .getElementsByTagName("start-date"); if (sd != null && sd.getLength() > 0) { String year = XMLParseUtil.getElementData( (Element) sd.item(0), "year"); if (year != null) { DateComponents comp = new DateComponents(); comp.setYear(Integer.parseInt(year)); educationObj.setStartDate(comp); } } NodeList ed = educationEl.getElementsByTagName("end-date"); if (ed != null && ed.getLength() > 0) { String year = XMLParseUtil.getElementData( (Element) ed.item(0), "year"); if (year != null) { DateComponents comp = new DateComponents(); comp.setYear(Integer.parseInt(year)); educationObj.setEndDate(comp); } } educationsArr[i] = educationObj; } } // get positions NodeList positions = root.getElementsByTagName("position"); if (positions != null && positions.getLength() > 0) { LOG.fine("Positions count " + positions.getLength()); positionsArr = new Position[positions.getLength()]; for (int i = 0; i < positions.getLength(); i++) { Position positionnObj = new Position(); Element positionEl = (Element) positions.item(i); String pid = XMLParseUtil.getElementData(positionEl, "id"); if (pid != null) { positionnObj.setPositionId(pid); } String title = XMLParseUtil.getElementData(positionEl, "title"); if (title != null) { positionnObj.setTitle(title); } String isCurrent = XMLParseUtil.getElementData(positionEl, "is-current"); if (isCurrent != null) { positionnObj.setCurrentCompany(Boolean .valueOf(isCurrent)); } NodeList sd = positionEl.getElementsByTagName("start-date"); if (sd != null && sd.getLength() > 0) { String year = XMLParseUtil.getElementData( (Element) sd.item(0), "year"); if (year != null) { DateComponents comp = new DateComponents(); comp.setYear(Integer.parseInt(year)); positionnObj.setStartDate(comp); } } NodeList ed = positionEl.getElementsByTagName("end-date"); if (ed != null && ed.getLength() > 0) { String year = XMLParseUtil.getElementData( (Element) ed.item(0), "year"); if (year != null) { DateComponents comp = new DateComponents(); comp.setYear(Integer.parseInt(year)); positionnObj.setEndDate(comp); } } NodeList companyNodes = positionEl .getElementsByTagName("company"); if (companyNodes != null && companyNodes.getLength() > 0) { Element company = (Element) companyNodes.item(0); String compid = XMLParseUtil.getElementData(company, "id"); if (compid != null) { positionnObj.setCompanyId(compid); } String compName = XMLParseUtil.getElementData(company, "name"); if (compName != null) { positionnObj.setCompanyName(compName); } String industry = XMLParseUtil.getElementData(company, "industry"); if (industry != null) { positionnObj.setIndustry(industry); } String type = XMLParseUtil.getElementData(company, "type"); if (type != null) { positionnObj.setCompanyType(type); } } positionsArr[i] = positionnObj; } } // getRecommendation NodeList recommendations = root .getElementsByTagName("recommendation"); if (recommendations != null && recommendations.getLength() > 0) { LOG.fine("Recommendations count " + recommendations.getLength()); recommendationsArr = new Recommendation[recommendations .getLength()]; for (int i = 0; i < recommendations.getLength(); i++) { Recommendation recommendationObj = new Recommendation(); Element recommendationEl = (Element) recommendations .item(i); String rid = XMLParseUtil.getElementData(recommendationEl, "id"); if (rid != null) { recommendationObj.setRecommendationId(rid); } String text = XMLParseUtil.getElementData(recommendationEl, "recommendation-text"); if (text != null) { recommendationObj.setRecommendationText(text); } String code = XMLParseUtil.getElementData(recommendationEl, "code"); if (code != null) { recommendationObj.setRecommendationType(code); } NodeList recommenderNodes = recommendationEl .getElementsByTagName("recommender"); if (recommenderNodes != null && recommenderNodes.getLength() > 0) { Element recommenderEl = (Element) recommenderNodes .item(0); String recommenderId = XMLParseUtil.getElementData( recommenderEl, "id"); if (recommenderId != null) { recommendationObj.setRecommenderId(recommenderId); } String fname = XMLParseUtil.getElementData( recommenderEl, "first-name"); if (fname != null) { recommendationObj.setRecommenderFirstName(fname); } String lname = XMLParseUtil.getElementData( recommenderEl, "last-name"); if (lname != null) { recommendationObj.setRecommenderLastName(lname); } } recommendationsArr[i] = recommendationObj; } } if (educationsArr != null) { career.setEducations(educationsArr); } if (positionsArr != null) { career.setPositions(positionsArr); } if (recommendationsArr != null) { career.setRecommendations(recommendationsArr); } } return career; } @Override public ProviderSupport getProviderSupport() { return providerSupport; } @Override public void setProviderSupport(final ProviderSupport providerSupport) { this.providerSupport = providerSupport; } }
package MastodonTypes; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; import io.realm.RealmList; import io.realm.RealmObject; import io.realm.annotations.PrimaryKey; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import java.io.Serializable; @SuppressWarnings("unused") public class Boost extends RealmObject implements Serializable { private final static long serialVersionUID = 4983372382391510544L; @PrimaryKey @SerializedName("id") @Expose private long id; @SerializedName("created_at") @Expose private String createdAt; @SerializedName("in_reply_to_id") @Expose private String inReplyToId; @SerializedName("in_reply_to_account_id") @Expose private String inReplyToAccountId; @SerializedName("sensitive") @Expose private Boolean sensitive; @SerializedName("spoiler_text") @Expose private String spoilerText; @SerializedName("visibility") @Expose private String visibility; @SerializedName("application") @Expose private Application application; @SerializedName("account") @Expose private Account account; @SerializedName("media_attachments") @Expose private RealmList<MediaAttachment> mediaAttachments = null; @SerializedName("mentions") @Expose private RealmList<Mention> mentions = null; @SerializedName("tags") @Expose private RealmList<Tag> tags = null; @SerializedName("uri") @Expose private String uri; @SerializedName("content") @Expose private String content; @SerializedName("url") @Expose private String url; @SerializedName("reblogs_count") @Expose private Integer reblogsCount; @SerializedName("favourites_count") @Expose private Integer favouritesCount; @SerializedName("favourited") @Expose private Boolean favourited; @SerializedName("reblogged") @Expose private Boolean reblogged; public long getId() { return id; } public void setId(long id) { this.id = id; } public String getCreatedAt() { return createdAt; } public void setCreatedAt(String createdAt) { this.createdAt = createdAt; } public Object getInReplyToId() { return inReplyToId; } public void setInReplyToId(String inReplyToId) { this.inReplyToId = inReplyToId; } public String getInReplyToAccountId() { return inReplyToAccountId; } public void setInReplyToAccountId(String inReplyToAccountId) { this.inReplyToAccountId = inReplyToAccountId; } public Boolean getSensitive() { if (sensitive == null) { return false; } return sensitive; } public void setSensitive(Boolean sensitive) { this.sensitive = sensitive; } public String getSpoilerText() { return spoilerText; } public void setSpoilerText(String spoilerText) { this.spoilerText = spoilerText; } public String getVisibility() { return visibility; } public void setVisibility(String visibility) { this.visibility = visibility; } public Application getApplication() { return application; } public void setApplication(Application application) { this.application = application; } public Account getAccount() { return account; } public void setAccount(Account account) { this.account = account; } public RealmList<MediaAttachment> getMediaAttachments() { return mediaAttachments; } public void setMediaAttachments(RealmList<MediaAttachment> mediaAttachments) { this.mediaAttachments = mediaAttachments; } public RealmList<Mention> getMentions() { return mentions; } public void setMentions(RealmList<Mention> mentions) { this.mentions = mentions; } public RealmList<Tag> getTags() { return tags; } public void setTags(RealmList<Tag> tags) { this.tags = tags; } public String getUri() { return uri; } public void setUri(String uri) { this.uri = uri; } public String getContent() { return content; } public void setContent(String content) { this.content = content; } public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } public Integer getReblogsCount() { return reblogsCount; } public void setReblogsCount(Integer reblogsCount) { this.reblogsCount = reblogsCount; } public Integer getFavouritesCount() { return favouritesCount; } public void setFavouritesCount(Integer favouritesCount) { this.favouritesCount = favouritesCount; } public Boolean getFavourited() { if (favourited == null) { return false; } return favourited; } public void setFavourited(Boolean favourited) { this.favourited = favourited; } public Boolean getReblogged() { if (reblogged == null) { return false; } return reblogged; } public void setReblogged(Boolean reblogged) { this.reblogged = reblogged; } @Override public String toString() { return ToStringBuilder.reflectionToString(this); } @Override public int hashCode() { return new HashCodeBuilder().append(id).append(createdAt).append(inReplyToId).append(inReplyToAccountId).append(sensitive).append(spoilerText).append(visibility).append(application).append(account).append(mediaAttachments).append(mentions).append(tags).append(uri).append(content).append(url).append(reblogsCount).append(favouritesCount).append(favourited).append(reblogged).toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if (!(other instanceof Boost)) { return false; } Boost rhs = ((Boost) other); return new EqualsBuilder().append(id, rhs.id).append(createdAt, rhs.createdAt).append(inReplyToId, rhs.inReplyToId).append(inReplyToAccountId, rhs.inReplyToAccountId).append(sensitive, rhs.sensitive).append(spoilerText, rhs.spoilerText).append(visibility, rhs.visibility).append(application, rhs.application).append(account, rhs.account).append(mediaAttachments, rhs.mediaAttachments).append(mentions, rhs.mentions).append(tags, rhs.tags).append(uri, rhs.uri).append(content, rhs.content).append(url, rhs.url).append(reblogsCount, rhs.reblogsCount).append(favouritesCount, rhs.favouritesCount).append(favourited, rhs.favourited).append(reblogged, rhs.reblogged).isEquals(); } }
/* * Copyright (C) 2011 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.net; import static com.google.common.base.CharMatcher.ASCII; import static com.google.common.base.CharMatcher.JAVA_ISO_CONTROL; import static com.google.common.base.Charsets.UTF_8; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import com.google.common.annotations.Beta; import com.google.common.annotations.GwtCompatible; import com.google.common.base.Ascii; import com.google.common.base.CharMatcher; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.base.Joiner.MapJoiner; import com.google.common.base.Objects; import com.google.common.base.Optional; import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.ImmutableMultiset; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import com.google.common.collect.Multimaps; import java.nio.charset.Charset; import java.nio.charset.IllegalCharsetNameException; import java.nio.charset.UnsupportedCharsetException; import java.util.Collection; import java.util.Map; import java.util.Map.Entry; /** * Represents an <a href="http://en.wikipedia.org/wiki/Internet_media_type">Internet Media Type</a> * (also known as a MIME Type or Content Type). This class also supports the concept of media ranges * <a href="http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1">defined by HTTP/1.1</a>. * As such, the {@code *} character is treated as a wildcard and is used to represent any acceptable * type or subtype value. A media type may not have wildcard type with a declared subtype. The * {@code *} character has no special meaning as part of a parameter. All values for type, subtype, * parameter attributes or parameter values must be valid according to RFCs * <a href="http://www.ietf.org/rfc/rfc2045.txt">2045</a> and * <a href="http://www.ietf.org/rfc/rfc2046.txt">2046</a>. * * <p>All portions of the media type that are case-insensitive (type, subtype, parameter attributes) * are normalized to lowercase. The value of the {@code charset} parameter is normalized to * lowercase, but all others are left as-is. * * <p>Note that this specifically does <strong>not</strong> represent the value of the MIME * {@code Content-Type} header and as such has no support for header-specific considerations such as * line folding and comments. * * <p>For media types that take a charset the predefined constants default to UTF-8 and have a * "_UTF_8" suffix. To get a version without a character set, use {@link #withoutParameters}. * * @since 12.0 * * @author Gregory Kick */ @Beta @GwtCompatible public final class MediaType { private static final String CHARSET_ATTRIBUTE = "charset"; private static final ImmutableListMultimap<String, String> UTF_8_CONSTANT_PARAMETERS = ImmutableListMultimap.of(CHARSET_ATTRIBUTE, Ascii.toLowerCase(UTF_8.name())); /** Matcher for type, subtype and attributes. */ private static final CharMatcher TOKEN_MATCHER = ASCII.and(JAVA_ISO_CONTROL.negate()) .and(CharMatcher.isNot(' ')) .and(CharMatcher.noneOf("()<>@,;:\\\"/[]?=")); private static final CharMatcher QUOTED_TEXT_MATCHER = ASCII .and(CharMatcher.noneOf("\"\\\r")); /* * This matches the same characters as linear-white-space from RFC 822, but we make no effort to * enforce any particular rules with regards to line folding as stated in the class docs. */ private static final CharMatcher LINEAR_WHITE_SPACE = CharMatcher.anyOf(" \t\r\n"); // TODO(gak): make these public? private static final String APPLICATION_TYPE = "application"; private static final String AUDIO_TYPE = "audio"; private static final String IMAGE_TYPE = "image"; private static final String TEXT_TYPE = "text"; private static final String VIDEO_TYPE = "video"; private static final String WILDCARD = "*"; private static final Map<MediaType, MediaType> KNOWN_TYPES = Maps.newHashMap(); private static MediaType createConstant(String type, String subtype) { return addKnownType(new MediaType(type, subtype, ImmutableListMultimap.<String, String>of())); } private static MediaType createConstantUtf8(String type, String subtype) { return addKnownType(new MediaType(type, subtype, UTF_8_CONSTANT_PARAMETERS)); } private static MediaType addKnownType(MediaType mediaType) { KNOWN_TYPES.put(mediaType, mediaType); return mediaType; } /* * The following constants are grouped by their type and ordered alphabetically by the constant * name within that type. The constant name should be a sensible identifier that is closest to the * "common name" of the media. This is often, but not necessarily the same as the subtype. * * Be sure to declare all constants with the type and subtype in all lowercase. For types that * take a charset (e.g. all text/* types), default to UTF-8 and suffix the constant name with * "_UTF_8". */ public static final MediaType ANY_TYPE = createConstant(WILDCARD, WILDCARD); public static final MediaType ANY_TEXT_TYPE = createConstant(TEXT_TYPE, WILDCARD); public static final MediaType ANY_IMAGE_TYPE = createConstant(IMAGE_TYPE, WILDCARD); public static final MediaType ANY_AUDIO_TYPE = createConstant(AUDIO_TYPE, WILDCARD); public static final MediaType ANY_VIDEO_TYPE = createConstant(VIDEO_TYPE, WILDCARD); public static final MediaType ANY_APPLICATION_TYPE = createConstant(APPLICATION_TYPE, WILDCARD); /* text types */ public static final MediaType CACHE_MANIFEST_UTF_8 = createConstantUtf8(TEXT_TYPE, "cache-manifest"); public static final MediaType CSS_UTF_8 = createConstantUtf8(TEXT_TYPE, "css"); public static final MediaType CSV_UTF_8 = createConstantUtf8(TEXT_TYPE, "csv"); public static final MediaType HTML_UTF_8 = createConstantUtf8(TEXT_TYPE, "html"); public static final MediaType I_CALENDAR_UTF_8 = createConstantUtf8(TEXT_TYPE, "calendar"); public static final MediaType PLAIN_TEXT_UTF_8 = createConstantUtf8(TEXT_TYPE, "plain"); /** * <a href="http://www.rfc-editor.org/rfc/rfc4329.txt">RFC 4329</a> declares * {@link #JAVASCRIPT_UTF_8 application/javascript} to be the correct media type for JavaScript, * but this may be necessary in certain situations for compatibility. */ public static final MediaType TEXT_JAVASCRIPT_UTF_8 = createConstantUtf8(TEXT_TYPE, "javascript"); /** * <a href="http://www.iana.org/assignments/media-types/text/tab-separated-values"> * Tab separated values</a>. * * @since 15.0 */ public static final MediaType TSV_UTF_8 = createConstantUtf8(TEXT_TYPE, "tab-separated-values"); public static final MediaType VCARD_UTF_8 = createConstantUtf8(TEXT_TYPE, "vcard"); public static final MediaType WML_UTF_8 = createConstantUtf8(TEXT_TYPE, "vnd.wap.wml"); /** * As described in <a href="http://www.ietf.org/rfc/rfc3023.txt">RFC 3023</a>, this constant * ({@code text/xml}) is used for XML documents that are "readable by casual users." * {@link #APPLICATION_XML_UTF_8} is provided for documents that are intended for applications. */ public static final MediaType XML_UTF_8 = createConstantUtf8(TEXT_TYPE, "xml"); /* image types */ public static final MediaType BMP = createConstant(IMAGE_TYPE, "bmp"); /** * The media type for the <a href="http://en.wikipedia.org/wiki/Camera_Image_File_Format">Canon * Image File Format</a> ({@code crw} files), a widely-used "raw image" format for cameras. It is * found in {@code /etc/mime.types}, e.g. in <href= * "http://anonscm.debian.org/gitweb/?p=collab-maint/mime-support.git;a=blob;f=mime.types;hb=HEAD" * >Debian 3.48-1</a>. * * @since 15.0 */ public static final MediaType CRW = createConstant(IMAGE_TYPE, "x-canon-crw"); public static final MediaType GIF = createConstant(IMAGE_TYPE, "gif"); public static final MediaType ICO = createConstant(IMAGE_TYPE, "vnd.microsoft.icon"); public static final MediaType JPEG = createConstant(IMAGE_TYPE, "jpeg"); public static final MediaType PNG = createConstant(IMAGE_TYPE, "png"); /** * The media type for the Photoshop File Format ({@code psd} files) as defined by <a href= * "http://www.iana.org/assignments/media-types/image/vnd.adobe.photoshop">IANA</a>, and found in * {@code /etc/mime.types}, e.g. <a href= * "http://svn.apache.org/repos/asf/httpd/httpd/branches/1.3.x/conf/mime.types"></a> of the Apache * <a href="http://httpd.apache.org/">HTTPD project</a>; for the specification, see * <href="http://www.adobe.com/devnet-apps/photoshop/fileformatashtml/PhotoshopFileFormats.htm"> * Adobe Photoshop Document Format</a> and <a href= * "http://en.wikipedia.org/wiki/Adobe_Photoshop#File_format">Wikipedia</a>; this is the regular * output/input of Photoshop (which can also export to various image formats; note that files with * extension "PSB" are in a distinct but related format). * <p>This is a more recent replacement for the older, experimental type * {@code x-photoshop}: <a href="http://tools.ietf.org/html/rfc2046#section-6">RFC-2046.6</a>. * * @since 15.0 */ public static final MediaType PSD = createConstant(IMAGE_TYPE, "vnd.adobe.photoshop"); public static final MediaType SVG_UTF_8 = createConstantUtf8(IMAGE_TYPE, "svg+xml"); public static final MediaType TIFF = createConstant(IMAGE_TYPE, "tiff"); public static final MediaType WEBP = createConstant(IMAGE_TYPE, "webp"); /* audio types */ public static final MediaType MP4_AUDIO = createConstant(AUDIO_TYPE, "mp4"); public static final MediaType MPEG_AUDIO = createConstant(AUDIO_TYPE, "mpeg"); public static final MediaType OGG_AUDIO = createConstant(AUDIO_TYPE, "ogg"); public static final MediaType WEBM_AUDIO = createConstant(AUDIO_TYPE, "webm"); /* video types */ public static final MediaType MP4_VIDEO = createConstant(VIDEO_TYPE, "mp4"); public static final MediaType MPEG_VIDEO = createConstant(VIDEO_TYPE, "mpeg"); public static final MediaType OGG_VIDEO = createConstant(VIDEO_TYPE, "ogg"); public static final MediaType QUICKTIME = createConstant(VIDEO_TYPE, "quicktime"); public static final MediaType WEBM_VIDEO = createConstant(VIDEO_TYPE, "webm"); public static final MediaType WMV = createConstant(VIDEO_TYPE, "x-ms-wmv"); /* application types */ /** * As described in <a href="http://www.ietf.org/rfc/rfc3023.txt">RFC 3023</a>, this constant * ({@code application/xml}) is used for XML documents that are "unreadable by casual users." * {@link #XML_UTF_8} is provided for documents that may be read by users. */ public static final MediaType APPLICATION_XML_UTF_8 = createConstantUtf8(APPLICATION_TYPE, "xml"); public static final MediaType ATOM_UTF_8 = createConstantUtf8(APPLICATION_TYPE, "atom+xml"); public static final MediaType BZIP2 = createConstant(APPLICATION_TYPE, "x-bzip2"); /** * Media type for <a href="http://en.wikipedia.org/wiki/Embedded_OpenType">Embedded OpenType</a> * fonts. This is * <a href="http://www.iana.org/assignments/media-types/application/vnd.ms-fontobject">registered * </a> with the IANA. * * @since 17.0 */ public static final MediaType EOT = createConstant(APPLICATION_TYPE, "vnd.ms-fontobject"); /** * As described in the <a href="http://idpf.org/epub">International Digital Publishing Forum</a> * EPUB is the distribution and interchange format standard for digital publications and * documents. This media type is defined in the * <a href="http://www.idpf.org/epub/30/spec/epub30-ocf.html">EPUB Open Container Format</a> * specification. * * @since 15.0 */ public static final MediaType EPUB = createConstant(APPLICATION_TYPE, "epub+zip"); public static final MediaType FORM_DATA = createConstant(APPLICATION_TYPE, "x-www-form-urlencoded"); /** * As described in <a href="https://www.rsa.com/rsalabs/node.asp?id=2138">PKCS #12: Personal * Information Exchange Syntax Standard</a>, PKCS #12 defines an archive file format for storing * many cryptography objects as a single file. * * @since 15.0 */ public static final MediaType KEY_ARCHIVE = createConstant(APPLICATION_TYPE, "pkcs12"); /** * This is a non-standard media type, but is commonly used in serving hosted binary files as it is * <a href="http://code.google.com/p/browsersec/wiki/Part2#Survey_of_content_sniffing_behaviors"> * known not to trigger content sniffing in current browsers</a>. It <i>should not</i> be used in * other situations as it is not specified by any RFC and does not appear in the <a href= * "http://www.iana.org/assignments/media-types">/IANA MIME Media Types</a> list. Consider * {@link #OCTET_STREAM} for binary data that is not being served to a browser. * * * @since 14.0 */ public static final MediaType APPLICATION_BINARY = createConstant(APPLICATION_TYPE, "binary"); public static final MediaType GZIP = createConstant(APPLICATION_TYPE, "x-gzip"); /** * <a href="http://www.rfc-editor.org/rfc/rfc4329.txt">RFC 4329</a> declares this to be the * correct media type for JavaScript, but {@link #TEXT_JAVASCRIPT_UTF_8 text/javascript} may be * necessary in certain situations for compatibility. */ public static final MediaType JAVASCRIPT_UTF_8 = createConstantUtf8(APPLICATION_TYPE, "javascript"); public static final MediaType JSON_UTF_8 = createConstantUtf8(APPLICATION_TYPE, "json"); public static final MediaType KML = createConstant(APPLICATION_TYPE, "vnd.google-earth.kml+xml"); public static final MediaType KMZ = createConstant(APPLICATION_TYPE, "vnd.google-earth.kmz"); public static final MediaType MBOX = createConstant(APPLICATION_TYPE, "mbox"); public static final MediaType MICROSOFT_EXCEL = createConstant(APPLICATION_TYPE, "vnd.ms-excel"); public static final MediaType MICROSOFT_POWERPOINT = createConstant(APPLICATION_TYPE, "vnd.ms-powerpoint"); public static final MediaType MICROSOFT_WORD = createConstant(APPLICATION_TYPE, "msword"); public static final MediaType OCTET_STREAM = createConstant(APPLICATION_TYPE, "octet-stream"); public static final MediaType OGG_CONTAINER = createConstant(APPLICATION_TYPE, "ogg"); public static final MediaType OOXML_DOCUMENT = createConstant(APPLICATION_TYPE, "vnd.openxmlformats-officedocument.wordprocessingml.document"); public static final MediaType OOXML_PRESENTATION = createConstant(APPLICATION_TYPE, "vnd.openxmlformats-officedocument.presentationml.presentation"); public static final MediaType OOXML_SHEET = createConstant(APPLICATION_TYPE, "vnd.openxmlformats-officedocument.spreadsheetml.sheet"); public static final MediaType OPENDOCUMENT_GRAPHICS = createConstant(APPLICATION_TYPE, "vnd.oasis.opendocument.graphics"); public static final MediaType OPENDOCUMENT_PRESENTATION = createConstant(APPLICATION_TYPE, "vnd.oasis.opendocument.presentation"); public static final MediaType OPENDOCUMENT_SPREADSHEET = createConstant(APPLICATION_TYPE, "vnd.oasis.opendocument.spreadsheet"); public static final MediaType OPENDOCUMENT_TEXT = createConstant(APPLICATION_TYPE, "vnd.oasis.opendocument.text"); public static final MediaType PDF = createConstant(APPLICATION_TYPE, "pdf"); public static final MediaType POSTSCRIPT = createConstant(APPLICATION_TYPE, "postscript"); /** * <a href="http://tools.ietf.org/html/draft-rfernando-protocol-buffers-00">Protocol buffers</a> * * @since 15.0 */ public static final MediaType PROTOBUF = createConstant(APPLICATION_TYPE, "protobuf"); public static final MediaType RDF_XML_UTF_8 = createConstantUtf8(APPLICATION_TYPE, "rdf+xml"); public static final MediaType RTF_UTF_8 = createConstantUtf8(APPLICATION_TYPE, "rtf"); /** * Media type for SFNT fonts (which includes * <a href="http://en.wikipedia.org/wiki/TrueType/">TrueType</a> and * <a href="http://en.wikipedia.org/wiki/OpenType/">OpenType</a> fonts). This is * <a href="http://www.iana.org/assignments/media-types/application/font-sfnt">registered</a> * with the IANA. * * @since 17.0 */ public static final MediaType SFNT = createConstant(APPLICATION_TYPE, "font-sfnt"); public static final MediaType SHOCKWAVE_FLASH = createConstant(APPLICATION_TYPE, "x-shockwave-flash"); public static final MediaType SKETCHUP = createConstant(APPLICATION_TYPE, "vnd.sketchup.skp"); public static final MediaType TAR = createConstant(APPLICATION_TYPE, "x-tar"); /** * Media type for the * <a href="http://en.wikipedia.org/wiki/Web_Open_Font_Format">Web Open Font Format</a> (WOFF) * <a href="http://www.w3.org/TR/WOFF/">defined</a> by the W3C. This is * <a href="http://www.iana.org/assignments/media-types/application/font-woff">registered</a> * with the IANA. * * @since 17.0 */ public static final MediaType WOFF = createConstant(APPLICATION_TYPE, "font-woff"); public static final MediaType XHTML_UTF_8 = createConstantUtf8(APPLICATION_TYPE, "xhtml+xml"); /** * Media type for Extensible Resource Descriptors. This is not yet registered with the IANA, but * it is specified by OASIS in the * <a href="http://docs.oasis-open.org/xri/xrd/v1.0/cd02/xrd-1.0-cd02.html"> XRD definition</a> * and implemented in projects such as * <a href="http://code.google.com/p/webfinger/">WebFinger</a>. */ public static final MediaType XRD_UTF_8 = createConstantUtf8(APPLICATION_TYPE, "xrd+xml"); public static final MediaType ZIP = createConstant(APPLICATION_TYPE, "zip"); private final String type; private final String subtype; private final ImmutableListMultimap<String, String> parameters; private MediaType(String type, String subtype, ImmutableListMultimap<String, String> parameters) { this.type = type; this.subtype = subtype; this.parameters = parameters; } /** Returns the top-level media type. For example, {@code "text"} in {@code "text/plain"}. */ public String type() { return type; } /** Returns the media subtype. For example, {@code "plain"} in {@code "text/plain"}. */ public String subtype() { return subtype; } /** Returns a multimap containing the parameters of this media type. */ public ImmutableListMultimap<String, String> parameters() { return parameters; } private Map<String, ImmutableMultiset<String>> parametersAsMap() { return Maps.transformValues(parameters.asMap(), new Function<Collection<String>, ImmutableMultiset<String>>() { @Override public ImmutableMultiset<String> apply(Collection<String> input) { return ImmutableMultiset.copyOf(input); } }); } /** * Returns an optional charset for the value of the charset parameter if it is specified. * * @throws IllegalStateException if multiple charset values have been set for this media type * @throws IllegalCharsetNameException if a charset value is present, but illegal * @throws UnsupportedCharsetException if a charset value is present, but no support is available * in this instance of the Java virtual machine */ public Optional<Charset> charset() { ImmutableSet<String> charsetValues = ImmutableSet.copyOf(parameters.get(CHARSET_ATTRIBUTE)); switch (charsetValues.size()) { case 0: return Optional.absent(); case 1: return Optional.of(Charset.forName(Iterables.getOnlyElement(charsetValues))); default: throw new IllegalStateException("Multiple charset values defined: " + charsetValues); } } /** * Returns a new instance with the same type and subtype as this instance, but without any * parameters. */ public MediaType withoutParameters() { return parameters.isEmpty() ? this : create(type, subtype); } /** * <em>Replaces</em> all parameters with the given parameters. * * @throws IllegalArgumentException if any parameter or value is invalid */ public MediaType withParameters(Multimap<String, String> parameters) { return create(type, subtype, parameters); } /** * <em>Replaces</em> all parameters with the given attribute with a single parameter with the * given value. If multiple parameters with the same attributes are necessary use * {@link #withParameters}. Prefer {@link #withCharset} for setting the {@code charset} parameter * when using a {@link Charset} object. * * @throws IllegalArgumentException if either {@code attribute} or {@code value} is invalid */ public MediaType withParameter(String attribute, String value) { checkNotNull(attribute); checkNotNull(value); String normalizedAttribute = normalizeToken(attribute); ImmutableListMultimap.Builder<String, String> builder = ImmutableListMultimap.builder(); for (Entry<String, String> entry : parameters.entries()) { String key = entry.getKey(); if (!normalizedAttribute.equals(key)) { builder.put(key, entry.getValue()); } } builder.put(normalizedAttribute, normalizeParameterValue(normalizedAttribute, value)); MediaType mediaType = new MediaType(type, subtype, builder.build()); // Return one of the constants if the media type is a known type. return Objects.firstNonNull(KNOWN_TYPES.get(mediaType), mediaType); } /** * Returns a new instance with the same type and subtype as this instance, with the * {@code charset} parameter set to the {@link Charset#name name} of the given charset. Only one * {@code charset} parameter will be present on the new instance regardless of the number set on * this one. * * <p>If a charset must be specified that is not supported on this JVM (and thus is not * representable as a {@link Charset} instance, use {@link #withParameter}. */ public MediaType withCharset(Charset charset) { checkNotNull(charset); return withParameter(CHARSET_ATTRIBUTE, charset.name()); } /** Returns true if either the type or subtype is the wildcard. */ public boolean hasWildcard() { return WILDCARD.equals(type) || WILDCARD.equals(subtype); } /** * Returns {@code true} if this instance falls within the range (as defined by * <a href="http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html">the HTTP Accept header</a>) * given by the argument according to three criteria: * * <ol> * <li>The type of the argument is the wildcard or equal to the type of this instance. * <li>The subtype of the argument is the wildcard or equal to the subtype of this instance. * <li>All of the parameters present in the argument are present in this instance. * </ol> * * <p>For example: <pre> {@code * PLAIN_TEXT_UTF_8.is(PLAIN_TEXT_UTF_8) // true * PLAIN_TEXT_UTF_8.is(HTML_UTF_8) // false * PLAIN_TEXT_UTF_8.is(ANY_TYPE) // true * PLAIN_TEXT_UTF_8.is(ANY_TEXT_TYPE) // true * PLAIN_TEXT_UTF_8.is(ANY_IMAGE_TYPE) // false * PLAIN_TEXT_UTF_8.is(ANY_TEXT_TYPE.withCharset(UTF_8)) // true * PLAIN_TEXT_UTF_8.withoutParameters().is(ANY_TEXT_TYPE.withCharset(UTF_8)) // false * PLAIN_TEXT_UTF_8.is(ANY_TEXT_TYPE.withCharset(UTF_16)) // false}</pre> * * <p>Note that while it is possible to have the same parameter declared multiple times within a * media type this method does not consider the number of occurrences of a parameter. For * example, {@code "text/plain; charset=UTF-8"} satisfies * {@code "text/plain; charset=UTF-8; charset=UTF-8"}. */ public boolean is(MediaType mediaTypeRange) { return (mediaTypeRange.type.equals(WILDCARD) || mediaTypeRange.type.equals(this.type)) && (mediaTypeRange.subtype.equals(WILDCARD) || mediaTypeRange.subtype.equals(this.subtype)) && this.parameters.entries().containsAll(mediaTypeRange.parameters.entries()); } /** * Creates a new media type with the given type and subtype. * * @throws IllegalArgumentException if type or subtype is invalid or if a wildcard is used for the * type, but not the subtype. */ public static MediaType create(String type, String subtype) { return create(type, subtype, ImmutableListMultimap.<String, String>of()); } /** * Creates a media type with the "application" type and the given subtype. * * @throws IllegalArgumentException if subtype is invalid */ static MediaType createApplicationType(String subtype) { return create(APPLICATION_TYPE, subtype); } /** * Creates a media type with the "audio" type and the given subtype. * * @throws IllegalArgumentException if subtype is invalid */ static MediaType createAudioType(String subtype) { return create(AUDIO_TYPE, subtype); } /** * Creates a media type with the "image" type and the given subtype. * * @throws IllegalArgumentException if subtype is invalid */ static MediaType createImageType(String subtype) { return create(IMAGE_TYPE, subtype); } /** * Creates a media type with the "text" type and the given subtype. * * @throws IllegalArgumentException if subtype is invalid */ static MediaType createTextType(String subtype) { return create(TEXT_TYPE, subtype); } /** * Creates a media type with the "video" type and the given subtype. * * @throws IllegalArgumentException if subtype is invalid */ static MediaType createVideoType(String subtype) { return create(VIDEO_TYPE, subtype); } private static MediaType create(String type, String subtype, Multimap<String, String> parameters) { checkNotNull(type); checkNotNull(subtype); checkNotNull(parameters); String normalizedType = normalizeToken(type); String normalizedSubtype = normalizeToken(subtype); checkArgument(!WILDCARD.equals(normalizedType) || WILDCARD.equals(normalizedSubtype), "A wildcard type cannot be used with a non-wildcard subtype"); ImmutableListMultimap.Builder<String, String> builder = ImmutableListMultimap.builder(); for (Entry<String, String> entry : parameters.entries()) { String attribute = normalizeToken(entry.getKey()); builder.put(attribute, normalizeParameterValue(attribute, entry.getValue())); } MediaType mediaType = new MediaType(normalizedType, normalizedSubtype, builder.build()); // Return one of the constants if the media type is a known type. return Objects.firstNonNull(KNOWN_TYPES.get(mediaType), mediaType); } private static String normalizeToken(String token) { checkArgument(TOKEN_MATCHER.matchesAllOf(token)); return Ascii.toLowerCase(token); } private static String normalizeParameterValue(String attribute, String value) { return CHARSET_ATTRIBUTE.equals(attribute) ? Ascii.toLowerCase(value) : value; } /** * Parses a media type from its string representation. * * @throws IllegalArgumentException if the input is not parsable */ public static MediaType parse(String input) { checkNotNull(input); Tokenizer tokenizer = new Tokenizer(input); try { String type = tokenizer.consumeToken(TOKEN_MATCHER); tokenizer.consumeCharacter('/'); String subtype = tokenizer.consumeToken(TOKEN_MATCHER); ImmutableListMultimap.Builder<String, String> parameters = ImmutableListMultimap.builder(); while (tokenizer.hasMore()) { tokenizer.consumeCharacter(';'); tokenizer.consumeTokenIfPresent(LINEAR_WHITE_SPACE); String attribute = tokenizer.consumeToken(TOKEN_MATCHER); tokenizer.consumeCharacter('='); final String value; if ('"' == tokenizer.previewChar()) { tokenizer.consumeCharacter('"'); StringBuilder valueBuilder = new StringBuilder(); while ('"' != tokenizer.previewChar()) { if ('\\' == tokenizer.previewChar()) { tokenizer.consumeCharacter('\\'); valueBuilder.append(tokenizer.consumeCharacter(ASCII)); } else { valueBuilder.append(tokenizer.consumeToken(QUOTED_TEXT_MATCHER)); } } value = valueBuilder.toString(); tokenizer.consumeCharacter('"'); } else { value = tokenizer.consumeToken(TOKEN_MATCHER); } parameters.put(attribute, value); } return create(type, subtype, parameters.build()); } catch (IllegalStateException e) { throw new IllegalArgumentException("Could not parse '" + input + "'", e); } } private static final class Tokenizer { final String input; int position = 0; Tokenizer(String input) { this.input = input; } String consumeTokenIfPresent(CharMatcher matcher) { checkState(hasMore()); int startPosition = position; position = matcher.negate().indexIn(input, startPosition); return hasMore() ? input.substring(startPosition, position) : input.substring(startPosition); } String consumeToken(CharMatcher matcher) { int startPosition = position; String token = consumeTokenIfPresent(matcher); checkState(position != startPosition); return token; } char consumeCharacter(CharMatcher matcher) { checkState(hasMore()); char c = previewChar(); checkState(matcher.matches(c)); position++; return c; } char consumeCharacter(char c) { checkState(hasMore()); checkState(previewChar() == c); position++; return c; } char previewChar() { checkState(hasMore()); return input.charAt(position); } boolean hasMore() { return (position >= 0) && (position < input.length()); } } @Override public boolean equals( Object obj) { if (obj == this) { return true; } else if (obj instanceof MediaType) { MediaType that = (MediaType) obj; return this.type.equals(that.type) && this.subtype.equals(that.subtype) // compare parameters regardless of order && this.parametersAsMap().equals(that.parametersAsMap()); } else { return false; } } @Override public int hashCode() { return Objects.hashCode(type, subtype, parametersAsMap()); } private static final MapJoiner PARAMETER_JOINER = Joiner.on("; ").withKeyValueSeparator("="); /** * Returns the string representation of this media type in the format described in <a * href="http://www.ietf.org/rfc/rfc2045.txt">RFC 2045</a>. */ @Override public String toString() { StringBuilder builder = new StringBuilder().append(type).append('/').append(subtype); if (!parameters.isEmpty()) { builder.append("; "); Multimap<String, String> quotedParameters = Multimaps.transformValues(parameters, new Function<String, String>() { @Override public String apply(String value) { return TOKEN_MATCHER.matchesAllOf(value) ? value : escapeAndQuote(value); } }); PARAMETER_JOINER.appendTo(builder, quotedParameters.entries()); } return builder.toString(); } private static String escapeAndQuote(String value) { StringBuilder escaped = new StringBuilder(value.length() + 16).append('"'); for (char ch : value.toCharArray()) { if (ch == '\r' || ch == '\\' || ch == '"') { escaped.append('\\'); } escaped.append(ch); } return escaped.append('"').toString(); } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.jetbrains.jsonSchema.impl; import com.google.gson.Gson; import com.google.gson.JsonObject; import com.google.gson.JsonParseException; import com.intellij.json.psi.JsonContainer; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.impl.http.HttpVirtualFile; import com.intellij.psi.PsiFile; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.ContainerUtilRt; import com.jetbrains.jsonSchema.JsonSchemaVfsListener; import com.jetbrains.jsonSchema.ide.JsonSchemaService; import com.jetbrains.jsonSchema.remote.JsonFileResolver; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; import java.util.stream.Collectors; import java.util.stream.Stream; import static com.jetbrains.jsonSchema.JsonPointerUtil.*; /** * @author Irina.Chernushina on 8/28/2015. */ public class JsonSchemaObject { private static final Logger LOG = Logger.getInstance(JsonSchemaObject.class); @NonNls public static final String DEFINITIONS = "definitions"; @NonNls public static final String PROPERTIES = "properties"; @NonNls public static final String ITEMS = "items"; @NonNls public static final String ADDITIONAL_ITEMS = "additionalItems"; @NonNls public static final String X_INTELLIJ_HTML_DESCRIPTION = "x-intellij-html-description"; @Nullable private final JsonContainer myJsonObject; @Nullable private Map<String, JsonSchemaObject> myDefinitionsMap; @NotNull private static final JsonSchemaObject NULL_OBJ = new JsonSchemaObject(); @NotNull private final ConcurrentMap<String, JsonSchemaObject> myComputedRefs = new ConcurrentHashMap<>(); @NotNull private final AtomicBoolean mySubscribed = new AtomicBoolean(false); @NotNull private Map<String, JsonSchemaObject> myProperties; @Nullable private PatternProperties myPatternProperties; @Nullable private PropertyNamePattern myPattern; @Nullable private String myId; @Nullable private String mySchema; @Nullable private String myTitle; @Nullable private String myDescription; @Nullable private String myHtmlDescription; @Nullable private JsonSchemaType myType; @Nullable private Object myDefault; @Nullable private String myRef; @Nullable private String myFormat; @Nullable private Set<JsonSchemaType> myTypeVariants; @Nullable private Number myMultipleOf; @Nullable private Number myMaximum; private boolean myExclusiveMaximum; @Nullable private Number myExclusiveMaximumNumber; @Nullable private Number myMinimum; private boolean myExclusiveMinimum; @Nullable private Number myExclusiveMinimumNumber; @Nullable private Integer myMaxLength; @Nullable private Integer myMinLength; @Nullable private Boolean myAdditionalPropertiesAllowed; @Nullable private JsonSchemaObject myAdditionalPropertiesSchema; @Nullable private JsonSchemaObject myPropertyNamesSchema; @Nullable private Boolean myAdditionalItemsAllowed; @Nullable private JsonSchemaObject myAdditionalItemsSchema; @Nullable private JsonSchemaObject myItemsSchema; @Nullable private JsonSchemaObject myContainsSchema; @Nullable private List<JsonSchemaObject> myItemsSchemaList; @Nullable private Integer myMaxItems; @Nullable private Integer myMinItems; @Nullable private Boolean myUniqueItems; @Nullable private Integer myMaxProperties; @Nullable private Integer myMinProperties; @Nullable private Set<String> myRequired; @Nullable private Map<String, List<String>> myPropertyDependencies; @Nullable private Map<String, JsonSchemaObject> mySchemaDependencies; @Nullable private List<Object> myEnum; @Nullable private List<JsonSchemaObject> myAllOf; @Nullable private List<JsonSchemaObject> myAnyOf; @Nullable private List<JsonSchemaObject> myOneOf; @Nullable private JsonSchemaObject myNot; @Nullable private JsonSchemaObject myIf; @Nullable private JsonSchemaObject myThen; @Nullable private JsonSchemaObject myElse; private boolean myShouldValidateAgainstJSType; public boolean isValidByExclusion() { return myIsValidByExclusion; } private boolean myIsValidByExclusion = true; public JsonSchemaObject(@NotNull JsonContainer object) { myJsonObject = object; myProperties = new HashMap<>(); } private JsonSchemaObject() { myJsonObject = null; myProperties = new HashMap<>(); } @Nullable private static JsonSchemaType getSubtypeOfBoth(@NotNull JsonSchemaType selfType, @NotNull JsonSchemaType otherType) { if (otherType == JsonSchemaType._any) return selfType; if (selfType == JsonSchemaType._any) return otherType; switch (selfType) { case _string: return otherType == JsonSchemaType._string || otherType == JsonSchemaType._string_number ? JsonSchemaType._string : null; case _number: if (otherType == JsonSchemaType._integer) return JsonSchemaType._integer; return otherType == JsonSchemaType._number || otherType == JsonSchemaType._string_number ? JsonSchemaType._number : null; case _integer: return otherType == JsonSchemaType._number || otherType == JsonSchemaType._string_number || otherType == JsonSchemaType._integer ? JsonSchemaType._integer : null; case _object: return otherType == JsonSchemaType._object ? JsonSchemaType._object : null; case _array: return otherType == JsonSchemaType._array ? JsonSchemaType._array : null; case _boolean: return otherType == JsonSchemaType._boolean ? JsonSchemaType._boolean : null; case _null: return otherType == JsonSchemaType._null ? JsonSchemaType._null : null; case _string_number: return otherType == JsonSchemaType._integer || otherType == JsonSchemaType._number || otherType == JsonSchemaType._string || otherType == JsonSchemaType._string_number ? otherType : null; } return otherType; } @Nullable private JsonSchemaType mergeTypes(@Nullable JsonSchemaType selfType, @Nullable JsonSchemaType otherType, @Nullable Set<JsonSchemaType> otherTypeVariants) { if (selfType == null) return otherType; if (otherType == null) { if (otherTypeVariants != null && !otherTypeVariants.isEmpty()) { Set<JsonSchemaType> filteredVariants = ContainerUtil.newHashSet(otherTypeVariants.size()); for (JsonSchemaType variant : otherTypeVariants) { JsonSchemaType subtype = getSubtypeOfBoth(selfType, variant); if (subtype != null) filteredVariants.add(subtype); } if (filteredVariants.size() == 0) { myIsValidByExclusion = false; return selfType; } if (filteredVariants.size() == 1) { return filteredVariants.iterator().next(); } return null; // will be handled by variants } return selfType; } JsonSchemaType subtypeOfBoth = getSubtypeOfBoth(selfType, otherType); if (subtypeOfBoth == null){ myIsValidByExclusion = false; return otherType; } return subtypeOfBoth; } private Set<JsonSchemaType> mergeTypeVariantSets(@Nullable Set<JsonSchemaType> self, @Nullable Set<JsonSchemaType> other) { if (self == null) return other; if (other == null) return self; Set<JsonSchemaType> resultSet = ContainerUtil.newHashSet(self.size()); for (JsonSchemaType type : self) { JsonSchemaType merged = mergeTypes(type, null, other); if (merged != null) resultSet.add(merged); } if (resultSet.isEmpty()) { myIsValidByExclusion = false; return other; } return resultSet; } // peer pointer is not merged! public void mergeValues(@NotNull JsonSchemaObject other) { // we do not copy id, schema mergeProperties(this, other); myDefinitionsMap = copyMap(myDefinitionsMap, other.myDefinitionsMap); final Map<String, JsonSchemaObject> map = copyMap(myPatternProperties == null ? null : myPatternProperties.mySchemasMap, other.myPatternProperties == null ? null : other.myPatternProperties.mySchemasMap); myPatternProperties = map == null ? null : new PatternProperties(map); if (!StringUtil.isEmptyOrSpaces(other.myTitle)) { myTitle = other.myTitle; } if (!StringUtil.isEmptyOrSpaces(other.myDescription)) { myDescription = other.myDescription; } if (!StringUtil.isEmptyOrSpaces(other.myHtmlDescription)) { myHtmlDescription = other.myHtmlDescription; } myType = mergeTypes(myType, other.myType, other.myTypeVariants); if (other.myDefault != null) myDefault = other.myDefault; if (other.myRef != null) myRef = other.myRef; if (other.myFormat != null) myFormat = other.myFormat; myTypeVariants = mergeTypeVariantSets(myTypeVariants, other.myTypeVariants); if (other.myMultipleOf != null) myMultipleOf = other.myMultipleOf; if (other.myMaximum != null) myMaximum = other.myMaximum; if (other.myExclusiveMaximumNumber != null) myExclusiveMaximumNumber = other.myExclusiveMaximumNumber; myExclusiveMaximum |= other.myExclusiveMaximum; if (other.myMinimum != null) myMinimum = other.myMinimum; if (other.myExclusiveMinimumNumber != null) myExclusiveMinimumNumber = other.myExclusiveMinimumNumber; myExclusiveMinimum |= other.myExclusiveMinimum; if (other.myMaxLength != null) myMaxLength = other.myMaxLength; if (other.myMinLength != null) myMinLength = other.myMinLength; if (other.myPattern != null) myPattern = other.myPattern; if (other.myAdditionalPropertiesAllowed != null) myAdditionalPropertiesAllowed = other.myAdditionalPropertiesAllowed; if (other.myAdditionalPropertiesSchema != null) myAdditionalPropertiesSchema = other.myAdditionalPropertiesSchema; if (other.myPropertyNamesSchema != null) myPropertyNamesSchema = other.myPropertyNamesSchema; if (other.myAdditionalItemsAllowed != null) myAdditionalItemsAllowed = other.myAdditionalItemsAllowed; if (other.myAdditionalItemsSchema != null) myAdditionalItemsSchema = other.myAdditionalItemsSchema; if (other.myItemsSchema != null) myItemsSchema = other.myItemsSchema; if (other.myContainsSchema != null) myContainsSchema = other.myContainsSchema; myItemsSchemaList = copyList(myItemsSchemaList, other.myItemsSchemaList); if (other.myMaxItems != null) myMaxItems = other.myMaxItems; if (other.myMinItems != null) myMinItems = other.myMinItems; if (other.myUniqueItems != null) myUniqueItems = other.myUniqueItems; if (other.myMaxProperties != null) myMaxProperties = other.myMaxProperties; if (other.myMinProperties != null) myMinProperties = other.myMinProperties; if (myRequired != null && other.myRequired != null) { myRequired.addAll(other.myRequired); } else if (other.myRequired != null) { myRequired = other.myRequired; } myPropertyDependencies = copyMap(myPropertyDependencies, other.myPropertyDependencies); mySchemaDependencies = copyMap(mySchemaDependencies, other.mySchemaDependencies); if (other.myEnum != null) myEnum = other.myEnum; myAllOf = copyList(myAllOf, other.myAllOf); myAnyOf = copyList(myAnyOf, other.myAnyOf); myOneOf = copyList(myOneOf, other.myOneOf); if (other.myNot != null) myNot = other.myNot; if (other.myIf != null) myIf = other.myIf; if (other.myThen != null) myThen = other.myThen; if (other.myElse != null) myElse = other.myElse; myShouldValidateAgainstJSType |= other.myShouldValidateAgainstJSType; } private static void mergeProperties(@NotNull JsonSchemaObject thisObject, @NotNull JsonSchemaObject otherObject) { for (Map.Entry<String, JsonSchemaObject> prop: otherObject.myProperties.entrySet()) { String key = prop.getKey(); JsonSchemaObject otherProp = prop.getValue(); if (!thisObject.myProperties.containsKey(key)) { thisObject.myProperties.put(key, otherProp); } else { JsonSchemaObject existingProp = thisObject.myProperties.get(key); thisObject.myProperties.put(key, JsonSchemaVariantsTreeBuilder.merge(existingProp, otherProp, otherProp)); } } } public void shouldValidateAgainstJSType() { myShouldValidateAgainstJSType = true; } public boolean isShouldValidateAgainstJSType() { return myShouldValidateAgainstJSType; } @Nullable private static <T> List<T> copyList(@Nullable List<T> target, @Nullable List<T> source) { if (source == null || source.isEmpty()) return target; if (target == null) target = ContainerUtil.newArrayListWithCapacity(source.size()); target.addAll(source); return target; } @Nullable private static <K, V> Map<K, V> copyMap(@Nullable Map<K, V> target, @Nullable Map<K, V> source) { if (source == null || source.isEmpty()) return target; if (target == null) target = ContainerUtilRt.newHashMap(source.size()); target.putAll(source); return target; } @NotNull public VirtualFile getSchemaFile() { assert myJsonObject != null; return myJsonObject.getContainingFile().getViewProvider().getVirtualFile(); } @NotNull public JsonContainer getJsonObject() { assert myJsonObject != null; return myJsonObject; } @Nullable public Map<String, JsonSchemaObject> getDefinitionsMap() { return myDefinitionsMap; } public void setDefinitionsMap(@NotNull Map<String, JsonSchemaObject> definitionsMap) { myDefinitionsMap = definitionsMap; } @NotNull public Map<String, JsonSchemaObject> getProperties() { return myProperties; } public void setProperties(@NotNull Map<String, JsonSchemaObject> properties) { myProperties = properties; } public boolean hasPatternProperties() { return myPatternProperties != null; } public void setPatternProperties(@NotNull Map<String, JsonSchemaObject> patternProperties) { myPatternProperties = new PatternProperties(patternProperties); } @Nullable public JsonSchemaType getType() { return myType; } public void setType(@Nullable JsonSchemaType type) { myType = type; } @Nullable public Number getMultipleOf() { return myMultipleOf; } public void setMultipleOf(@Nullable Number multipleOf) { myMultipleOf = multipleOf; } @Nullable public Number getMaximum() { return myMaximum; } public void setMaximum(@Nullable Number maximum) { myMaximum = maximum; } public boolean isExclusiveMaximum() { return myExclusiveMaximum; } @Nullable public Number getExclusiveMaximumNumber() { return myExclusiveMaximumNumber; } public void setExclusiveMaximumNumber(@Nullable Number exclusiveMaximumNumber) { myExclusiveMaximumNumber = exclusiveMaximumNumber; } @Nullable public Number getExclusiveMinimumNumber() { return myExclusiveMinimumNumber; } public void setExclusiveMinimumNumber(@Nullable Number exclusiveMinimumNumber) { myExclusiveMinimumNumber = exclusiveMinimumNumber; } public void setExclusiveMaximum(boolean exclusiveMaximum) { myExclusiveMaximum = exclusiveMaximum; } @Nullable public Number getMinimum() { return myMinimum; } public void setMinimum(@Nullable Number minimum) { myMinimum = minimum; } public boolean isExclusiveMinimum() { return myExclusiveMinimum; } public void setExclusiveMinimum(boolean exclusiveMinimum) { myExclusiveMinimum = exclusiveMinimum; } @Nullable public Integer getMaxLength() { return myMaxLength; } public void setMaxLength(@Nullable Integer maxLength) { myMaxLength = maxLength; } @Nullable public Integer getMinLength() { return myMinLength; } public void setMinLength(@Nullable Integer minLength) { myMinLength = minLength; } @Nullable public String getPattern() { return myPattern == null ? null : myPattern.getPattern(); } public void setPattern(@Nullable String pattern) { myPattern = pattern == null ? null : new PropertyNamePattern(pattern); } @Nullable public Boolean getAdditionalPropertiesAllowed() { return myAdditionalPropertiesAllowed == null || myAdditionalPropertiesAllowed; } public void setAdditionalPropertiesAllowed(@Nullable Boolean additionalPropertiesAllowed) { myAdditionalPropertiesAllowed = additionalPropertiesAllowed; } @Nullable public JsonSchemaObject getPropertyNamesSchema() { return myPropertyNamesSchema; } public void setPropertyNamesSchema(@Nullable JsonSchemaObject propertyNamesSchema) { myPropertyNamesSchema = propertyNamesSchema; } @Nullable public JsonSchemaObject getAdditionalPropertiesSchema() { return myAdditionalPropertiesSchema; } public void setAdditionalPropertiesSchema(@Nullable JsonSchemaObject additionalPropertiesSchema) { myAdditionalPropertiesSchema = additionalPropertiesSchema; } @Nullable public Boolean getAdditionalItemsAllowed() { return myAdditionalItemsAllowed == null || myAdditionalItemsAllowed; } public void setAdditionalItemsAllowed(@Nullable Boolean additionalItemsAllowed) { myAdditionalItemsAllowed = additionalItemsAllowed; } @Nullable public JsonSchemaObject getAdditionalItemsSchema() { return myAdditionalItemsSchema; } public void setAdditionalItemsSchema(@Nullable JsonSchemaObject additionalItemsSchema) { myAdditionalItemsSchema = additionalItemsSchema; } @Nullable public JsonSchemaObject getItemsSchema() { return myItemsSchema; } public void setItemsSchema(@Nullable JsonSchemaObject itemsSchema) { myItemsSchema = itemsSchema; } @Nullable public JsonSchemaObject getContainsSchema() { return myContainsSchema; } public void setContainsSchema(@Nullable JsonSchemaObject containsSchema) { myContainsSchema = containsSchema; } @Nullable public List<JsonSchemaObject> getItemsSchemaList() { return myItemsSchemaList; } public void setItemsSchemaList(@Nullable List<JsonSchemaObject> itemsSchemaList) { myItemsSchemaList = itemsSchemaList; } @Nullable public Integer getMaxItems() { return myMaxItems; } public void setMaxItems(@Nullable Integer maxItems) { myMaxItems = maxItems; } @Nullable public Integer getMinItems() { return myMinItems; } public void setMinItems(@Nullable Integer minItems) { myMinItems = minItems; } public boolean isUniqueItems() { return Boolean.TRUE.equals(myUniqueItems); } public void setUniqueItems(boolean uniqueItems) { myUniqueItems = uniqueItems; } @Nullable public Integer getMaxProperties() { return myMaxProperties; } public void setMaxProperties(@Nullable Integer maxProperties) { myMaxProperties = maxProperties; } @Nullable public Integer getMinProperties() { return myMinProperties; } public void setMinProperties(@Nullable Integer minProperties) { myMinProperties = minProperties; } @Nullable public Set<String> getRequired() { return myRequired; } public void setRequired(@Nullable Set<String> required) { myRequired = required; } @Nullable public Map<String, List<String>> getPropertyDependencies() { return myPropertyDependencies; } public void setPropertyDependencies(@Nullable Map<String, List<String>> propertyDependencies) { myPropertyDependencies = propertyDependencies; } @Nullable public Map<String, JsonSchemaObject> getSchemaDependencies() { return mySchemaDependencies; } public void setSchemaDependencies(@Nullable Map<String, JsonSchemaObject> schemaDependencies) { mySchemaDependencies = schemaDependencies; } @Nullable public List<Object> getEnum() { return myEnum; } public void setEnum(@Nullable List<Object> anEnum) { myEnum = anEnum; } @Nullable public List<JsonSchemaObject> getAllOf() { return myAllOf; } public void setAllOf(@Nullable List<JsonSchemaObject> allOf) { myAllOf = allOf; } @Nullable public List<JsonSchemaObject> getAnyOf() { return myAnyOf; } public void setAnyOf(@Nullable List<JsonSchemaObject> anyOf) { myAnyOf = anyOf; } @Nullable public List<JsonSchemaObject> getOneOf() { return myOneOf; } public void setOneOf(@Nullable List<JsonSchemaObject> oneOf) { myOneOf = oneOf; } @Nullable public JsonSchemaObject getNot() { return myNot; } public void setNot(@Nullable JsonSchemaObject not) { myNot = not; } @Nullable public JsonSchemaObject getIf() { return myIf; } public void setIf(@Nullable JsonSchemaObject anIf) { myIf = anIf; } @Nullable public JsonSchemaObject getThen() { return myThen; } public void setThen(@Nullable JsonSchemaObject then) { myThen = then; } @Nullable public JsonSchemaObject getElse() { return myElse; } public void setElse(@Nullable JsonSchemaObject anElse) { myElse = anElse; } @Nullable public Set<JsonSchemaType> getTypeVariants() { return myTypeVariants; } public void setTypeVariants(@Nullable Set<JsonSchemaType> typeVariants) { myTypeVariants = typeVariants; } @Nullable public String getRef() { return myRef; } public void setRef(@Nullable String ref) { myRef = ref; } @Nullable public Object getDefault() { if (JsonSchemaType._integer.equals(myType)) return myDefault instanceof Number ? ((Number)myDefault).intValue() : myDefault; return myDefault; } public void setDefault(@Nullable Object aDefault) { myDefault = aDefault; } @Nullable public String getFormat() { return myFormat; } public void setFormat(@Nullable String format) { myFormat = format; } @Nullable public String getId() { return myId; } public void setId(@Nullable String id) { myId = id; } @Nullable public String getSchema() { return mySchema; } public void setSchema(@Nullable String schema) { mySchema = schema; } @Nullable public String getDescription() { return myDescription; } public void setDescription(@NotNull String description) { myDescription = unescapeJsonString(description); } @Nullable public String getHtmlDescription() { return myHtmlDescription; } public void setHtmlDescription(@NotNull String htmlDescription) { myHtmlDescription = unescapeJsonString(htmlDescription); } @Nullable public String getTitle() { return myTitle; } public void setTitle(@NotNull String title) { myTitle = unescapeJsonString(title); } private static String unescapeJsonString(@NotNull final String text) { try { final String object = String.format("{\"prop\": \"%s\"}", text); return new Gson().fromJson(object, JsonObject.class).get("prop").getAsString(); } catch (JsonParseException e) { return text; } } @Nullable public JsonSchemaObject getMatchingPatternPropertySchema(@NotNull String name) { if (myPatternProperties == null) return null; return myPatternProperties.getPatternPropertySchema(name); } public boolean checkByPattern(@NotNull String value) { return myPattern != null && myPattern.checkByPattern(value); } @Nullable public String getPatternError() { return myPattern == null ? null : myPattern.getPatternError(); } @Nullable public Map<JsonContainer, String> getInvalidPatternProperties() { if (myPatternProperties != null) { final Map<String, String> patterns = myPatternProperties.getInvalidPatterns(); return patterns.entrySet().stream().map(entry -> { final JsonSchemaObject object = myPatternProperties.getSchemaForPattern(entry.getKey()); assert object != null; return Pair.create(object.getJsonObject(), entry.getValue()); }).collect(Collectors.toMap(o -> o.getFirst(), o -> o.getSecond())); } return null; } @Nullable public JsonSchemaObject findRelativeDefinition(@NotNull String ref) { if (isSelfReference(ref)) { return this; } if (!ref.startsWith("#/")) { return null; } ref = ref.substring(2); final List<String> parts = split(ref); JsonSchemaObject current = this; for (int i = 0; i < parts.size(); i++) { if (current == null) return null; final String part = parts.get(i); if (DEFINITIONS.equals(part)) { if (i == (parts.size() - 1)) return null; //noinspection AssignmentToForLoopParameter final String nextPart = parts.get(++i); current = current.getDefinitionsMap() == null ? null : current.getDefinitionsMap().get(unescapeJsonPointerPart(nextPart)); continue; } if (PROPERTIES.equals(part)) { if (i == (parts.size() - 1)) return null; //noinspection AssignmentToForLoopParameter current = current.getProperties().get(unescapeJsonPointerPart(parts.get(++i))); continue; } if (ITEMS.equals(part)) { if (i == (parts.size() - 1)) { current = current.getItemsSchema(); } else { //noinspection AssignmentToForLoopParameter Integer next = tryParseInt(parts.get(++i)); List<JsonSchemaObject> itemsSchemaList = current.getItemsSchemaList(); if (itemsSchemaList != null && next != null && next < itemsSchemaList.size()) { current = itemsSchemaList.get(next); } } continue; } if (ADDITIONAL_ITEMS.equals(part)) { if (i == (parts.size() - 1)) { current = current.getAdditionalItemsSchema(); } continue; } current = current.getDefinitionsMap() == null ? null : current.getDefinitionsMap().get(part); } return current; } @Nullable private static Integer tryParseInt(String s) { try { return Integer.parseInt(s); } catch (Exception __) { return null; } } @Override public boolean equals(@Nullable Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; JsonSchemaObject object = (JsonSchemaObject)o; assert myJsonObject != null; return myJsonObject.equals(object.myJsonObject); } @Override public int hashCode() { assert myJsonObject != null; return myJsonObject.hashCode(); } @NotNull private static String adaptSchemaPattern(String pattern) { pattern = pattern.startsWith("^") || pattern.startsWith("*") || pattern.startsWith(".") ? pattern : (".*" + pattern); pattern = pattern.endsWith("+") || pattern.endsWith("*") || pattern.endsWith("$") ? pattern : (pattern + ".*"); pattern = pattern.replace("\\\\", "\\"); return pattern; } private static Pair<Pattern, String> compilePattern(@NotNull final String pattern) { try { return Pair.create(Pattern.compile(adaptSchemaPattern(pattern)), null); } catch (PatternSyntaxException e) { return Pair.create(null, e.getMessage()); } } public static boolean matchPattern(@NotNull final Pattern pattern, @NotNull final String s) { try { return pattern.matcher(StringUtil.newBombedCharSequence(s, 300)).matches(); } catch (ProcessCanceledException e) { // something wrong with the pattern, infinite cycle? Logger.getInstance(JsonSchemaObject.class).info("Pattern matching canceled"); return false; } catch (Exception e) { // catch exceptions around to prevent things like: // https://bugs.openjdk.java.net/browse/JDK-6984178 Logger.getInstance(JsonSchemaObject.class).info(e); return false; } } @Nullable public String getTypeDescription(boolean shortDesc) { JsonSchemaType type = getType(); if (type != null) return type.getDescription(); Set<JsonSchemaType> possibleTypes = getTypeVariants(); String description = getTypesDescription(shortDesc, possibleTypes); if (description != null) return description; List<Object> anEnum = getEnum(); if (anEnum != null) { return shortDesc ? "enum" : anEnum.stream().map(o -> o.toString()).collect(Collectors.joining(" | ")); } JsonSchemaType guessedType = guessType(); if (guessedType != null) { return guessedType.getDescription(); } return null; } @Nullable public JsonSchemaType guessType() { // if we have an explicit type, here we are JsonSchemaType type = getType(); if (type != null) return type; // process type variants before heuristic type detection final Set<JsonSchemaType> typeVariants = getTypeVariants(); if (typeVariants != null) { final int size = typeVariants.size(); if (size == 1) { return typeVariants.iterator().next(); } else if (size >= 2) { return null; } } // heuristic type detection based on the set of applied constraints boolean hasObjectChecks = hasObjectChecks(); boolean hasNumericChecks = hasNumericChecks(); boolean hasStringChecks = hasStringChecks(); boolean hasArrayChecks = hasArrayChecks(); if (hasObjectChecks && !hasNumericChecks && !hasStringChecks && !hasArrayChecks) { return JsonSchemaType._object; } if (!hasObjectChecks && hasNumericChecks && !hasStringChecks && !hasArrayChecks) { return JsonSchemaType._number; } if (!hasObjectChecks && !hasNumericChecks && hasStringChecks && !hasArrayChecks) { return JsonSchemaType._string; } if (!hasObjectChecks && !hasNumericChecks && !hasStringChecks && hasArrayChecks) { return JsonSchemaType._array; } return null; } public boolean hasNumericChecks() { return getMultipleOf() != null || getExclusiveMinimumNumber() != null || getExclusiveMaximumNumber() != null || getMaximum() != null || getMinimum() != null; } public boolean hasStringChecks() { return getPattern() != null || getFormat() != null; } public boolean hasArrayChecks() { return isUniqueItems() || getContainsSchema() != null || getItemsSchema() != null || getItemsSchemaList() != null || getMinItems() != null || getMaxItems() != null; } public boolean hasObjectChecks() { return !getProperties().isEmpty() || getPropertyNamesSchema() != null || getPropertyDependencies() != null || hasPatternProperties() || getRequired() != null || getMinProperties() != null || getMaxProperties() != null; } @Nullable static String getTypesDescription(boolean shortDesc, @Nullable Collection<JsonSchemaType> possibleTypes) { if (possibleTypes == null || possibleTypes.size() == 0) return null; if (possibleTypes.size() == 1) return possibleTypes.iterator().next().getDescription(); if (possibleTypes.contains(JsonSchemaType._any)) return JsonSchemaType._any.getDescription(); Stream<String> typeDescriptions = possibleTypes.stream().map(t -> t.getDescription()).distinct().sorted(); boolean isShort = false; if (shortDesc) { typeDescriptions = typeDescriptions.limit(3); if (possibleTypes.size() > 3) isShort = true; } return typeDescriptions.collect(Collectors.joining(" | ", "", isShort ? "| ..." : "")); } @Nullable public JsonSchemaObject resolveRefSchema(@NotNull JsonSchemaService service) { final String ref = getRef(); assert !StringUtil.isEmptyOrSpaces(ref); if (!myComputedRefs.containsKey(ref)){ JsonSchemaObject value = fetchSchemaFromRefDefinition(ref, this, service); if (!mySubscribed.get()) { getJsonObject().getProject().getMessageBus().connect().subscribe(JsonSchemaVfsListener.JSON_DEPS_CHANGED, () -> myComputedRefs.clear()); mySubscribed.set(true); } if (!JsonFileResolver.isHttpPath(ref)) { service.registerReference(ref); } else if (value != null) { // our aliases - if http ref actually refers to a local file with specific ID PsiFile file = value.getJsonObject().getContainingFile(); if (file != null) { VirtualFile virtualFile = file.getVirtualFile(); if (virtualFile != null && !(virtualFile instanceof HttpVirtualFile)) { service.registerReference(virtualFile.getName()); } } } myComputedRefs.put(ref, value == null ? NULL_OBJ : value); } JsonSchemaObject object = myComputedRefs.getOrDefault(ref, null); return object == NULL_OBJ ? null : object; } @Nullable private static JsonSchemaObject fetchSchemaFromRefDefinition(@NotNull String ref, @NotNull final JsonSchemaObject schema, @NotNull JsonSchemaService service) { final VirtualFile schemaFile = schema.getSchemaFile(); final JsonSchemaVariantsTreeBuilder.SchemaUrlSplitter splitter = new JsonSchemaVariantsTreeBuilder.SchemaUrlSplitter(ref); String schemaId = splitter.getSchemaId(); if (schemaId != null) { final JsonSchemaObject refSchema = resolveSchemaByReference(service, schemaFile, schemaId); if (refSchema == null) return null; return findRelativeDefinition(refSchema, splitter); } final JsonSchemaObject rootSchema = service.getSchemaObjectForSchemaFile(schemaFile); if (rootSchema == null) { LOG.debug(String.format("Schema object not found for %s", schemaFile.getPath())); return null; } return findRelativeDefinition(rootSchema, splitter); } @Nullable private static JsonSchemaObject resolveSchemaByReference(@NotNull JsonSchemaService service, @NotNull VirtualFile schemaFile, @NotNull String schemaId) { final VirtualFile refFile = service.findSchemaFileByReference(schemaId, schemaFile); if (refFile == null) { LOG.debug(String.format("Schema file not found by reference: '%s' from %s", schemaId, schemaFile.getPath())); return null; } final JsonSchemaObject refSchema = service.getSchemaObjectForSchemaFile(refFile); if (refSchema == null) { LOG.debug(String.format("Schema object not found by reference: '%s' from %s", schemaId, schemaFile.getPath())); return null; } return refSchema; } private static JsonSchemaObject findRelativeDefinition(@NotNull final JsonSchemaObject schema, @NotNull final JsonSchemaVariantsTreeBuilder.SchemaUrlSplitter splitter) { final String path = splitter.getRelativePath(); if (StringUtil.isEmptyOrSpaces(path)) { final String id = splitter.getSchemaId(); if (isSelfReference(id)) { return schema; } if (id != null && id.startsWith("#")) { final String resolvedId = JsonCachedValues.resolveId(schema.getJsonObject().getContainingFile(), id); if (resolvedId == null || id.equals("#" + resolvedId)) return null; return findRelativeDefinition(schema, new JsonSchemaVariantsTreeBuilder.SchemaUrlSplitter("#" + resolvedId)); } return schema; } final JsonSchemaObject definition = schema.findRelativeDefinition(path); if (definition == null) { LOG.debug(String.format("Definition not found by reference: '%s' in file %s", path, schema.getSchemaFile().getPath())); } return definition; } private static class PropertyNamePattern { @NotNull private final String myPattern; @Nullable private final Pattern myCompiledPattern; @Nullable private final String myPatternError; @NotNull private final Map<String, Boolean> myValuePatternCache; PropertyNamePattern(@NotNull String pattern) { myPattern = StringUtil.unescapeBackSlashes(pattern); final Pair<Pattern, String> pair = compilePattern(pattern); myPatternError = pair.getSecond(); myCompiledPattern = pair.getFirst(); myValuePatternCache = ContainerUtil.createConcurrentWeakKeyWeakValueMap(); } @Nullable public String getPatternError() { return myPatternError; } boolean checkByPattern(@NotNull final String name) { if (myPatternError != null) return true; if (Boolean.TRUE.equals(myValuePatternCache.get(name))) return true; assert myCompiledPattern != null; boolean matches = matchPattern(myCompiledPattern, name); myValuePatternCache.put(name, matches); return matches; } @NotNull public String getPattern() { return myPattern; } } private static class PatternProperties { @NotNull private final Map<String, JsonSchemaObject> mySchemasMap; @NotNull private final Map<String, Pattern> myCachedPatterns; @NotNull private final Map<String, String> myCachedPatternProperties; @NotNull private final Map<String, String> myInvalidPatterns; PatternProperties(@NotNull final Map<String, JsonSchemaObject> schemasMap) { mySchemasMap = new HashMap<>(); schemasMap.keySet().forEach(key -> mySchemasMap.put(StringUtil.unescapeBackSlashes(key), schemasMap.get(key))); myCachedPatterns = new HashMap<>(); myCachedPatternProperties = ContainerUtil.createConcurrentWeakKeyWeakValueMap(); myInvalidPatterns = new HashMap<>(); mySchemasMap.keySet().forEach(key -> { final Pair<Pattern, String> pair = compilePattern(key); if (pair.getSecond() != null) { myInvalidPatterns.put(key, pair.getSecond()); } else { assert pair.getFirst() != null; myCachedPatterns.put(key, pair.getFirst()); } }); } @Nullable public JsonSchemaObject getPatternPropertySchema(@NotNull final String name) { String value = myCachedPatternProperties.get(name); if (value != null) { assert mySchemasMap.containsKey(value); return mySchemasMap.get(value); } value = myCachedPatterns.keySet().stream() .filter(key -> matchPattern(myCachedPatterns.get(key), name)) .findFirst() .orElse(null); if (value != null) { myCachedPatternProperties.put(name, value); assert mySchemasMap.containsKey(value); return mySchemasMap.get(value); } return null; } @NotNull public Map<String, String> getInvalidPatterns() { return myInvalidPatterns; } public JsonSchemaObject getSchemaForPattern(@NotNull String key) { return mySchemasMap.get(key); } } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.ndsu.eci.tapestry5cayenne.integration; import edu.ndsu.eci.tapestry5cayenne.TestUtils; import edu.ndsu.eci.tapestry5cayenne.model.Artist; import edu.ndsu.eci.tapestry5cayenne.model.Painting; import edu.ndsu.eci.tapestry5cayenne.services.ObjectContextProvider; import org.apache.cayenne.Persistent; import org.apache.cayenne.query.Ordering; import org.apache.cayenne.query.SortOrder; import org.apache.tapestry5.ValueEncoder; import org.apache.tapestry5.dom.Document; import org.apache.tapestry5.dom.Element; import org.apache.tapestry5.dom.Node; import org.apache.tapestry5.ioc.IOCUtilities; import org.apache.tapestry5.ioc.Registry; import org.apache.tapestry5.test.PageTester; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import java.io.File; import java.io.FileNotFoundException; import java.io.PrintWriter; import java.util.*; @Test(groups = "all", singleThreaded = true) public class TestBlockContributions extends Assert { private Registry _registry; private PageTester _tester; private ObjectContextProvider _provider; private ValueEncoder<Persistent> _encoder; private List<Artist> _data; @SuppressWarnings("unchecked") @BeforeClass void setup() throws Exception { TestUtils.setupdb(); _tester = new PageTester("edu.ndsu.eci.tapestry5cayenne.integration.app0", "app", "src/test/app0"); _registry = _tester.getRegistry(); _provider = _registry.getService(ObjectContextProvider.class); _data = TestUtils.basicData(_provider.currentContext()); new Ordering(Artist.NAME.getName(), SortOrder.ASCENDING).orderList(_data); _encoder = _registry.getService("CayenneEntityEncoder", ValueEncoder.class); } @AfterClass void shutdown() { if (_tester != null) { _tester.shutdown(); } } /** * Ensure that the toOneEditor is properly rendered */ public void testToOneEditor() { Document doc = _tester.renderPage("TestToOneControl"); // Verify the label Element el = doc.getRootElement().getElementByAttributeValue("for", "toOneList"); assertEquals(el.getChildMarkup(), "Artist"); // Verify the select list. // note that this is required, so the blank option isn't present, // so children size should be the same as data size. el = doc.getElementById("toOneList"); assertEquals(el.getChildren().size(), _data.size()); // we expect the list of items to be sorted by the @Label. Collections.sort(_data, new Comparator<Artist>() { public int compare(Artist o1, Artist o2) { return o1.getName().compareTo(o2.getName()); } }); Iterator<Node> children = el.getChildren().iterator(); for (Artist a : _data) { Element option = (Element) children.next(); String val = option.getAttribute("value"); Persistent obj = _encoder.toValue(val); assertEquals(obj, a, "Incorrect order of persistent objects!"); } } /** * tests the "to_one" viewer; also tests editor submission. */ public void testToOneViewer_and_toOneEditorSubmission() { // render the document, select the artist, // submit, then check the view. Document doc = _tester.renderPage("TestToOneControl"); List<Element> els = TestUtils.DOMFindAll(doc.getRootElement(), "body/form"); assertFalse(els.isEmpty()); Element form = els.get(0); Map<String, String> params = new HashMap<String, String>(); params.put("price", "100.0"); params.put("title", "dud"); params.put("toOneList", _encoder.toClient(_data.get(1))); doc = _tester.submitForm(form, params); // make sure that the select is correctly selected. els = TestUtils.DOMFindAll(doc.getRootElement(), "body/form/select/option"); try { PrintWriter writer = new PrintWriter(new File("/tmp/doc.txt")); doc.toMarkup(writer); writer.close(); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } assertFalse(els.isEmpty()); // find the option corresponding to _data.get(1). // remember, painting.artist is required, so no blank option, now that // t5cayenne automagically picks up those validations. assertTrue(els.get(1).getAttribute("selected").equals("selected")); assertTrue(els.get(1).getChildMarkup().equals("Picasso")); // make sure the output is correct. String markup = TestUtils.DOMFindAll(doc.getRootElement(), "body/dl/dt").get(2).getChildMarkup(); assertEquals(markup, "Artist"); markup = TestUtils.DOMFindAll(doc.getRootElement(), "body/dl/dd").get(2).getChildMarkup(); assertEquals(markup, "Picasso"); } /** * Ensures that the ToManyViewer.css link is properly rendered. */ private Document assertToManyHead() { Document doc = _tester.renderPage("TestToManyControl"); // make sure the stylesheet shows up. List<Element> els = TestUtils.DOMFindAll(doc.getRootElement(), "head/link"); // should be 2: one for tapestry, one for t5cayenne assertEquals(els.size(), 7); assertTrue(els.get(6).getAttribute("href").contains("ToManyViewer.css")); // ok... make sure we have the right thing on the bean display... return doc; } /** * Tests the behavior of the tomany view block & component with few elements * (should result in a listing of each element) */ public void testToManyViewer_fewElements() { assertEquals(_data.get(0).getName(), "Dali"); Document doc = assertToManyHead(); List<Element> els = TestUtils.DOMFindAll(doc.getRootElement(), "body/dl/dd/ul"); // one for the paintingList property, and one for the paintings as a map // property. assertEquals(els.size(), 2); assertEquals(els.get(0).getChildren().size(), _data.get(0).getPaintingList().size()); Iterator<Painting> it = _data.get(0).getPaintingList().iterator(); for (Node n : els.get(0).getChildren()) { // should be a li... Element el = (Element) n; assertEquals(el.getName(), "li"); assertEquals(el.getChildMarkup().trim(), doc.getMarkupModel().encode(it.next().toString())); } // now test the map... it = _data.get(0).getPaintingsByTitle().values().iterator(); for (Node n : els.get(1).getChildren()) { Element el = (Element) n; assertEquals(el.getName(), "li"); assertEquals(el.getChildMarkup().trim(), doc.getMarkupModel().encode(it.next().toString())); } } /** * Test what happens with lots of paintings. Currently, it should "kick over" * to generic descriptive text at 20 paintings. */ @Test(dependsOnMethods = "testToManyViewer_fewElements") public void testToManyViewer_manyElements() { assertEquals(_data.get(0).getName(), "Dali"); // add 18 paintings, because "TestUtils.basicData" (called from setup) // creates two paintings for each artist. List<Painting> paintings = TestUtils.addPaintings(_data.get(0), 18, _provider.currentContext()); for (Painting p : paintings) { _data.get(0).addToPaintingList(p); } _data.get(0).getObjectContext().commitChanges(); Document doc = assertToManyHead(); List<Element> els = TestUtils.DOMFindAll(doc.getRootElement(), "body/dl/dd"); assertEquals(els.get(0).getChildMarkup().trim(), "20 associated items"); assertEquals(els.get(1).getChildMarkup().trim(), "20 associated items"); } /** * Test that CayenneSelect renders properly */ public void testCayenneSelect() { Document doc = _tester.renderPage("TestSelect"); // Verify the label // Element el = doc.getElementById("toOneList:label"); // assertEquals(el.getChildMarkup(),"Artist"); // Verify the select list. Element el = doc.getElementById("select_0"); assertEquals(el.getChildren().size() - 1, _data.size()); // we expect the list of items to be sorted by the @Label. Collections.sort(_data, new Comparator<Artist>() { public int compare(Artist o1, Artist o2) { return o1.getName().compareTo(o2.getName()); } }); Iterator<Node> children = el.getChildren().iterator(); // skip the first node: it's blank. children.next(); for (Artist a : _data) { Element option = (Element) children.next(); String val = option.getAttribute("value"); Persistent obj = _encoder.toValue(val); assertEquals(obj, a, "Incorrect order of persistent objects!"); } } }