gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Zed Attack Proxy (ZAP) and its related class files.
*
* ZAP is an HTTP/HTTPS proxy for assessing web application security.
*
* Copyright 2012 The ZAP Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zaproxy.zap.extension.httppanel.view.syntaxhighlight.components.split.request;
import java.awt.Component;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.parosproxy.paros.network.HttpMessage;
import org.parosproxy.paros.view.View;
import org.zaproxy.zap.extension.httppanel.Message;
import org.zaproxy.zap.extension.httppanel.view.impl.models.http.request.RequestHeaderStringHttpPanelViewModel;
import org.zaproxy.zap.extension.httppanel.view.syntaxhighlight.HttpPanelSyntaxHighlightTextArea;
import org.zaproxy.zap.extension.httppanel.view.syntaxhighlight.HttpPanelSyntaxHighlightTextView;
import org.zaproxy.zap.extension.httppanel.view.util.CaretVisibilityEnforcerOnFocusGain;
import org.zaproxy.zap.extension.httppanel.view.util.HttpTextViewUtils;
import org.zaproxy.zap.extension.search.SearchMatch;
import org.zaproxy.zap.model.DefaultTextHttpMessageLocation;
import org.zaproxy.zap.model.HttpMessageLocation;
import org.zaproxy.zap.model.MessageLocation;
import org.zaproxy.zap.model.TextHttpMessageLocation;
import org.zaproxy.zap.utils.DisplayUtils;
import org.zaproxy.zap.view.messagecontainer.http.SelectableContentHttpMessageContainer;
import org.zaproxy.zap.view.messagelocation.MessageLocationHighlight;
import org.zaproxy.zap.view.messagelocation.MessageLocationHighlightsManager;
import org.zaproxy.zap.view.messagelocation.MessageLocationProducerFocusListener;
import org.zaproxy.zap.view.messagelocation.MessageLocationProducerFocusListenerAdapter;
import org.zaproxy.zap.view.messagelocation.TextMessageLocationHighlight;
import org.zaproxy.zap.view.messagelocation.TextMessageLocationHighlightsManager;
public class HttpRequestHeaderPanelSyntaxHighlightTextView extends HttpPanelSyntaxHighlightTextView
implements SelectableContentHttpMessageContainer {
public static final String NAME = "HttpRequestHeaderSyntaxTextView";
private MessageLocationProducerFocusListenerAdapter focusListenerAdapter;
public HttpRequestHeaderPanelSyntaxHighlightTextView(
RequestHeaderStringHttpPanelViewModel model) {
super(model);
getHttpPanelTextArea()
.setComponentPopupMenu(
new CustomPopupMenu() {
private static final long serialVersionUID = -426000345249750052L;
@Override
public void show(Component invoker, int x, int y) {
if (!getHttpPanelTextArea().isFocusOwner()) {
getHttpPanelTextArea().requestFocusInWindow();
}
View.getSingleton()
.getPopupMenu()
.show(
HttpRequestHeaderPanelSyntaxHighlightTextView.this,
x,
y);
}
});
}
@Override
protected HttpPanelSyntaxHighlightTextArea createHttpPanelTextArea() {
return new HttpRequestHeaderPanelSyntaxHighlightTextArea();
}
@Override
protected HttpRequestHeaderPanelSyntaxHighlightTextArea getHttpPanelTextArea() {
return (HttpRequestHeaderPanelSyntaxHighlightTextArea) super.getHttpPanelTextArea();
}
private static class HttpRequestHeaderPanelSyntaxHighlightTextArea
extends HttpPanelSyntaxHighlightTextArea {
private static final long serialVersionUID = -4532294585338584747L;
// private static final String HTTP_REQUEST_HEADER = "HTTP Request Header";
// private static final String SYNTAX_STYLE_HTTP_REQUEST_HEADER =
// "text/http-request-header";
private static RequestHeaderTokenMakerFactory tokenMakerFactory = null;
private CaretVisibilityEnforcerOnFocusGain caretVisibilityEnforcer;
public HttpRequestHeaderPanelSyntaxHighlightTextArea() {
// addSyntaxStyle(HTTP_REQUEST_HEADER, SYNTAX_STYLE_HTTP_REQUEST_HEADER);
// setSyntaxEditingStyle(SYNTAX_STYLE_HTTP_REQUEST_HEADER);
caretVisibilityEnforcer = new CaretVisibilityEnforcerOnFocusGain(this);
}
@Override
public String getName() {
return NAME;
}
@Override
public HttpMessage getMessage() {
return (HttpMessage) super.getMessage();
}
@Override
public void setMessage(Message aMessage) {
super.setMessage(aMessage);
caretVisibilityEnforcer.setEnforceVisibilityOnFocusGain(aMessage != null);
}
protected MessageLocation getSelection() {
int[] position =
HttpTextViewUtils.getViewToHeaderPosition(
this, getSelectionStart(), getSelectionEnd());
if (position.length == 0) {
return new DefaultTextHttpMessageLocation(
HttpMessageLocation.Location.REQUEST_HEADER, 0);
}
int start = position[0];
int end = position[1];
if (start == end) {
return new DefaultTextHttpMessageLocation(
HttpMessageLocation.Location.REQUEST_HEADER, start);
}
return new DefaultTextHttpMessageLocation(
HttpMessageLocation.Location.REQUEST_HEADER,
start,
end,
getMessage().getRequestHeader().toString().substring(start, end));
}
protected MessageLocationHighlightsManager create() {
return new TextMessageLocationHighlightsManager();
}
protected MessageLocationHighlight highlightImpl(
TextHttpMessageLocation textLocation, TextMessageLocationHighlight textHighlight) {
if (getMessage() == null) {
return null;
}
int[] pos =
HttpTextViewUtils.getHeaderToViewPosition(
this,
getMessage().getRequestHeader().toString(),
textLocation.getStart(),
textLocation.getEnd());
if (pos.length == 0) {
return null;
}
textHighlight.setHighlightReference(highlight(pos[0], pos[1], textHighlight));
return textHighlight;
}
@Override
public void search(Pattern p, List<SearchMatch> matches) {
Matcher m = p.matcher(getText());
while (m.find()) {
int[] position =
HttpTextViewUtils.getViewToHeaderPosition(this, m.start(), m.end());
if (position.length == 0) {
return;
}
matches.add(
new SearchMatch(
SearchMatch.Location.REQUEST_HEAD, position[0], position[1]));
}
}
@Override
public void highlight(SearchMatch sm) {
if (!SearchMatch.Location.REQUEST_HEAD.equals(sm.getLocation())) {
return;
}
int[] pos =
HttpTextViewUtils.getHeaderToViewPosition(
this,
sm.getMessage().getRequestHeader().toString(),
sm.getStart(),
sm.getEnd());
if (pos.length == 0) {
return;
}
highlight(pos[0], pos[1]);
}
@Override
protected synchronized CustomTokenMakerFactory getTokenMakerFactory() {
if (tokenMakerFactory == null) {
tokenMakerFactory = new RequestHeaderTokenMakerFactory();
}
return tokenMakerFactory;
}
private static class RequestHeaderTokenMakerFactory extends CustomTokenMakerFactory {
public RequestHeaderTokenMakerFactory() {
// String pkg = "org.zaproxy.zap.extension.httppanel.view.text.lexers.";
// putMapping(SYNTAX_STYLE_HTTP_REQUEST_HEADER, pkg +
// "HttpRequestHeaderTokenMaker");
}
}
}
@Override
public String getName() {
return NAME;
}
@Override
public Class<HttpMessage> getMessageClass() {
return HttpMessage.class;
}
@Override
public Class<? extends MessageLocation> getMessageLocationClass() {
return TextHttpMessageLocation.class;
}
@Override
public MessageLocation getSelection() {
return getHttpPanelTextArea().getSelection();
}
@Override
public MessageLocationHighlightsManager create() {
return getHttpPanelTextArea().create();
}
@Override
public MessageLocationHighlight highlight(MessageLocation location) {
if (!supports(location)) {
return null;
}
TextHttpMessageLocation textLocation = (TextHttpMessageLocation) location;
return getHttpPanelTextArea()
.highlightImpl(
textLocation,
new TextMessageLocationHighlight(DisplayUtils.getHighlightColor()));
}
@Override
public MessageLocationHighlight highlight(
MessageLocation location, MessageLocationHighlight highlight) {
if (!supports(location) || !(highlight instanceof TextMessageLocationHighlight)) {
return null;
}
TextHttpMessageLocation textLocation = (TextHttpMessageLocation) location;
TextMessageLocationHighlight textHighlight = (TextMessageLocationHighlight) highlight;
return getHttpPanelTextArea().highlightImpl(textLocation, textHighlight);
}
@Override
public void removeHighlight(
MessageLocation location, MessageLocationHighlight highlightReference) {
if (!(highlightReference instanceof TextMessageLocationHighlight)) {
return;
}
getHttpPanelTextArea()
.removeHighlight(
((TextMessageLocationHighlight) highlightReference)
.getHighlightReference());
}
@Override
public boolean supports(MessageLocation location) {
if (!(location instanceof TextHttpMessageLocation)) {
return false;
}
return ((TextHttpMessageLocation) location).getLocation()
== TextHttpMessageLocation.Location.REQUEST_HEADER;
}
@Override
public boolean supports(Class<? extends MessageLocation> classLocation) {
return (TextHttpMessageLocation.class.isAssignableFrom(classLocation));
}
@Override
public void addFocusListener(MessageLocationProducerFocusListener focusListener) {
getFocusListenerAdapter().addFocusListener(focusListener);
}
@Override
public void removeFocusListener(MessageLocationProducerFocusListener focusListener) {
getFocusListenerAdapter().removeFocusListener(focusListener);
if (!getFocusListenerAdapter().hasFocusListeners()) {
getHttpPanelTextArea().removeFocusListener(focusListenerAdapter);
focusListenerAdapter = null;
}
}
@Override
public HttpMessage getMessage() {
return getHttpPanelTextArea().getMessage();
}
@Override
public Component getComponent() {
return getHttpPanelTextArea();
}
@Override
public boolean isEmpty() {
return getHttpPanelTextArea().getMessage() == null;
}
private MessageLocationProducerFocusListenerAdapter getFocusListenerAdapter() {
if (focusListenerAdapter == null) {
focusListenerAdapter = new MessageLocationProducerFocusListenerAdapter(this);
getHttpPanelTextArea().addFocusListener(focusListenerAdapter);
}
return focusListenerAdapter;
}
}
| |
/*
* Copyright 2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.cdep.cdep.utils;
import io.cdep.annotations.NotNull;
import io.cdep.annotations.Nullable;
import io.cdep.cdep.Coordinate;
import io.cdep.cdep.yml.cdepmanifest.*;
import io.cdep.cdep.yml.cdepmanifest.v3.V3Reader;
import org.yaml.snakeyaml.Yaml;
import org.yaml.snakeyaml.constructor.Constructor;
import org.yaml.snakeyaml.error.YAMLException;
import org.yaml.snakeyaml.nodes.Node;
import java.io.ByteArrayInputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.*;
import static io.cdep.cdep.Coordinate.EMPTY_COORDINATE;
import static io.cdep.cdep.utils.Invariant.fail;
import static io.cdep.cdep.utils.Invariant.require;
public class CDepManifestYmlUtils {
@NotNull
public static String convertManifestToString(@NotNull CDepManifestYml manifest) {
return CreateCDepManifestYmlString.serialize(manifest);
}
@NotNull
public static CDepManifestYml convertStringToManifest(@NotNull String url, @NotNull String content) {
Invariant.registerYamlFile(url);
Yaml yaml = new Yaml(new Constructor(CDepManifestYml.class));
CDepManifestYml manifest;
byte[] bytes = content.getBytes(StandardCharsets.UTF_8);
try {
manifest = (CDepManifestYml) yaml.load(new ByteArrayInputStream(bytes));
// Try to read current version
if (manifest != null) {
manifest.sourceVersion = CDepManifestYmlVersion.vlatest;
}
} catch (YAMLException e) {
try {
manifest = V3Reader.convertStringToManifest(content);
} catch (YAMLException e2) {
if (!tryCreateSensibleParseError(e, 0)) {
// If older readers also couldn't read it then report the original exception.
require(false, e.toString());
}
return new CDepManifestYml(EMPTY_COORDINATE);
}
}
require(manifest != null, "Manifest was empty");
assert manifest != null;
manifest = new ConvertNullToDefaultRewriter().visitCDepManifestYml(manifest);
Node nodes = yaml.compose(new InputStreamReader(new ByteArrayInputStream(bytes)));
SnakeYmlUtils.mapAndRegisterNodes(url, manifest, nodes);
return manifest;
}
/*
* Attempt to give a better error message for common failures.
* YAMLException doesn't expose anything but a cause and a message so this function uses those and
* does the best job it can.
*/
private static boolean tryCreateSensibleParseError(YAMLException e, int depth) {
if (depth > 20) {
return false;
}
if (e != null) {
if (e.getCause() == null) {
if (e.getMessage().contains("Unable to find property 'lib'")) {
require(false, "Could not parse manifest. " +
"The field 'lib' could not be created. Should it be 'libs'?");
return true;
}
} else {
return tryCreateSensibleParseError((YAMLException) e.getCause(), depth + 1);
}
}
return false;
}
public static void checkManifestSanity(@NotNull CDepManifestYml cdepManifestYml) {
new Checker().visit(cdepManifestYml, CDepManifestYml.class);
}
@NotNull
public static List<HardNameDependency> getTransitiveDependencies(@NotNull CDepManifestYml cdepManifestYml) {
List<HardNameDependency> dependencies = new ArrayList<>();
Collections.addAll(dependencies, cdepManifestYml.dependencies);
return dependencies;
}
public static class Checker extends CDepManifestYmlReadonlyVisitor {
@NotNull
private final Set<String> filesSeen = new LinkedHashSet<>();
@NotNull
private final Map<String, AndroidArchive> distinguishableAndroidArchives = new LinkedHashMap<>();
@Nullable
private Coordinate coordinate = null;
@Nullable
private CDepManifestYmlVersion sourceVersion = null;
@Nullable
private static String nullToStar(@Nullable String string) {
if (string == null) {
return "*";
}
return string;
}
@Override
public void visitString(@Nullable String name, @NotNull String node) {
if (name == null) {
return;
}
if (name.equals("file")) {
assert sourceVersion != null;
if (sourceVersion.ordinal() > CDepManifestYmlVersion.v1.ordinal()) {
require(!filesSeen.contains(node.toLowerCase()),
"Package '%s' contains multiple references to the same archive file '%s'",
coordinate,
node);
}
filesSeen.add(node.toLowerCase());
} else if (name.equals("sha256")) {
if (node.length() == 0) {
require(false,
"Package '%s' contains null or empty sha256",
coordinate);
}
}
}
@Override
public void visitHardNameDependency(@Nullable String name, @NotNull HardNameDependency value) {
require(!value.compile.isEmpty(), "Package '%s' contains dependency with no 'compile' constant", coordinate);
require(!value.sha256.isEmpty(), "Package '%s' contains dependency '%s' with no sha256 constant",
coordinate,
value.compile);
super.visitHardNameDependency(name, value);
}
@Override
public void visitCDepManifestYml(@Nullable String name, @NotNull CDepManifestYml value) {
coordinate = value.coordinate;
sourceVersion = value.sourceVersion;
require(!coordinate.equals(EMPTY_COORDINATE), "Manifest was missing coordinate");
super.visitCDepManifestYml(name, value);
require(!filesSeen.isEmpty(), "Package '%s' does not contain any files", coordinate);
}
@Override
public void visitArchive(@Nullable String name, @Nullable Archive value) {
if (value == null) {
return;
}
require(value.file.length() != 0, "Archive %s is missing file", coordinate);
require(value.sha256.length() != 0, "Archive %s is missing sha256", coordinate);
require(value.size != 0, "Archive %s is missing size or it is zero", coordinate);
require(value.include.length() != 0, "Archive %s is missing include", coordinate);
super.visitArchive(name, value);
}
@Override
public void visitAndroidArchive(@Nullable String name, @NotNull AndroidArchive value) {
require(!value.file.isEmpty(), "Android archive %s is missing file", coordinate);
require(!value.sha256.isEmpty(), "Android archive %s is missing sha256", coordinate);
require(value.size != 0, "Android archive %s is missing size or it is zero", coordinate);
// Have we seen another Archive that is indistinguishable from this one?
String key = nullToStar(value.abi.name) + "-";
key += nullToStar(value.platform) + "-";
key += nullToStar(value.runtime) + "-";
AndroidArchive other = distinguishableAndroidArchives.get(key);
if (other != null) {
require(false,
"Android archive %s file %s is indistinguishable at build time from %s given the information in the manifest",
coordinate,
archiveName(value),
archiveName(other));
}
distinguishableAndroidArchives.put(key, value);
super.visitAndroidArchive(name, value);
}
@Override
public void visitAndroidABI(@Nullable String name, @NotNull AndroidABI value) {
require(value.equals(AndroidABI.EMPTY_ABI)
|| AndroidABI.values().contains(value), "Unknown Android ABI '%s'", value);
}
@NotNull
private String archiveName(@NotNull AndroidArchive value) {
return value.file.isEmpty() ? "<unknown>" : value.file;
}
@Override
public void visitiOSArchive(@Nullable String name, @NotNull iOSArchive value) {
require(value.file.length() != 0, "iOS archive %s is missing file", coordinate);
require(value.sha256.length() != 0, "iOS archive %s is missing sha256", coordinate);
require(value.size != 0, "iOS archive %s is missing size or it is zero", coordinate);
super.visitiOSArchive(name, value);
}
@Override
public void visitLinuxArchive(@Nullable String name, @NotNull LinuxArchive value) {
require(value.file.length() != 0, "Linux archive %s is missing file", coordinate);
require(value.sha256.length() != 0, "Linux archive %s is missing sha256", coordinate);
require(value.size != 0, "Linux archive %s is missing size or it is zero", coordinate);
super.visitLinuxArchive(name, value);
}
@Override
public void visitiOS(@Nullable String name, @NotNull iOS value) {
if (value.archives != null) {
for (iOSArchive archive : value.archives) {
require(!archive.file.isEmpty(), "Package '%s' has missing ios.archive.file", coordinate);
require(!archive.sha256.isEmpty(), "Package '%s' has missing ios.archive.sha256 for '%s'", coordinate, archive.file);
require(archive.size != 0L, "Package '%s' has missing ios.archive.size for '%s'", coordinate, archive.file);
require(!archive.sdk.isEmpty(), "Package '%s' has missing ios.archive.sdk for '%s'", coordinate, archive.file);
require(archive.platform != null, "Package '%s' has missing ios.archive.platform for '%s'", coordinate, archive.file);
for (String lib : archive.libs) {
require(lib.endsWith(".a"),
"Package '%s' has non-static iOS library file name '%s'. Should end in '.a'",
coordinate, lib);
}
}
}
super.visitiOS(name, value);
}
@Override
public void visitLinux(@Nullable String name, @NotNull Linux linux) {
require(linux.archives.length <= 1, "Package '%s' has multiple linux archives. Only one is allowed.", coordinate);
for (LinuxArchive archive : linux.archives) {
for (String lib : archive.libs) {
require(lib.endsWith(".a"),
"Package '%s' has non-static android library file name '%s'. Should end in '.a.'",
coordinate, lib);
}
}
super.visitLinux(name, linux);
}
@Override
public void visitAndroid(@Nullable String name, @NotNull Android value) {
if (value.archives != null) {
for (AndroidArchive archive : value.archives) {
require(!archive.file.isEmpty(), "Package '%s' has missing android.archive.file", coordinate);
require(!archive.sha256.isEmpty(),
"Package '%s' has missing android.archive.sha256 for '%s'",
coordinate,
archive.file);
require(archive.size != 0, "Package '%s' has missing or zero android.archive.size for '%s'", coordinate, archive.file);
for (String lib : archive.libs) {
require(lib.endsWith(".a"),
"Package '%s' has non-static android library file name '%s'. Should end in '.a'",
coordinate, lib);
if (!archive.runtime.isEmpty()) {
switch (archive.runtime) {
case "c++":
case "stlport":
case "gnustl":
break;
default:
fail("Package '%s' has unexpected android runtime '%s'. Allowed: c++, stlport, gnustl",
coordinate,
archive.runtime);
}
}
}
}
}
super.visitAndroid(name, value);
}
@Override
public void visitCoordinate(@Nullable String name, @NotNull Coordinate value) {
require(value.groupId.length() > 0, "Manifest was missing coordinate.groupId");
require(value.artifactId.length() > 0, "Manifest was missing coordinate.artifactId");
require(value.version.value.length() > 0, "Manifest was missing coordinate.version");
String versionDiagnosis = VersionUtils.checkVersion(value.version);
if (versionDiagnosis == null) {
super.visitCoordinate(name, value);
return;
}
fail("Package '%s' has malformed version, %s", coordinate, versionDiagnosis);
}
}
}
| |
/**********************************************************************************
* $URL:$
* $Id:$
***********************************************************************************
*
* Copyright (c) 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.calendar.impl;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.codec.binary.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.sakaiproject.authz.api.SecurityService;
import org.sakaiproject.calendar.api.Calendar;
import org.sakaiproject.calendar.api.*;
import org.sakaiproject.calendar.api.CalendarEvent.EventAccess;
import org.sakaiproject.component.api.ServerConfigurationService;
import org.sakaiproject.entity.api.EntityManager;
import org.sakaiproject.entity.api.Reference;
import org.sakaiproject.entity.api.ResourceProperties;
import org.sakaiproject.entity.api.ResourcePropertiesEdit;
import org.sakaiproject.exception.*;
import org.sakaiproject.id.api.IdManager;
import org.sakaiproject.javax.Filter;
import org.sakaiproject.memory.api.MemoryService;
import org.sakaiproject.memory.api.SimpleConfiguration;
import org.sakaiproject.site.api.Site;
import org.sakaiproject.site.api.SiteService;
import org.sakaiproject.site.api.ToolConfiguration;
import org.sakaiproject.time.api.Time;
import org.sakaiproject.time.api.TimeRange;
import org.sakaiproject.time.api.TimeService;
import org.sakaiproject.tool.api.Placement;
import org.sakaiproject.tool.api.SessionManager;
import org.sakaiproject.tool.api.ToolManager;
import org.sakaiproject.util.BaseResourcePropertiesEdit;
import org.sakaiproject.util.FormattedText;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.*;
public class BaseExternalCalendarSubscriptionService implements
ExternalCalendarSubscriptionService
{
/** Logging */
private static Logger m_log = LoggerFactory.getLogger(BaseExternalCalendarSubscriptionService.class);
/** Schedule tool ID */
private final static String SCHEDULE_TOOL_ID = "sakai.schedule";
/** Default context for institutional subscriptions */
private final static String INSTITUTIONAL_CONTEXT = "!worksite";
/** Default context for user-provided subscriptions */
private final static String USER_CONTEXT = "!user";
/** Default connect timeout when retrieving external subscriptions */
private final static int TIMEOUT = 30000;
/** iCal external subscription enable flag */
private boolean enabled = false;
/** merge iCal external subscriptions from other sites into My Workspace? */
private boolean mergeIntoMyworkspace = true;
/** Column map for iCal processing */
private Map columnMap = null;
/** Cache map of Institutional Calendars: <String url, Calendar cal> */
private SubscriptionCache institutionalSubscriptionCache = null;
/** Cache map of user Calendars: <String url, Calendar cal> */
private SubscriptionCache usersSubscriptionCache = null;
// ######################################################
// Spring services
// ######################################################
/** Dependency: CalendarService. */
// We depend on the BaseCalendarService so we can call methods outside the calendar service API.
protected BaseCalendarService m_calendarService = null;
/** Dependency: SecurityService */
protected SecurityService m_securityService = null;
/** Dependency: SessionManager */
protected SessionManager m_sessionManager = null;
/** Dependency: TimeService */
protected TimeService m_timeService = null;
/** Dependency: ToolManager */
protected ToolManager m_toolManager = null;
/** Dependency: IdManager. */
protected IdManager m_idManager;
/** Dependency: CalendarImporterService. */
protected CalendarImporterService m_importerService = null;
/** Dependency: ServerConfigurationService. */
protected ServerConfigurationService m_configurationService = null;
/** Dependency: EntityManager. */
protected EntityManager m_entityManager = null;
/** Dependency: SiteService. */
protected SiteService m_siteService = null;
protected MemoryService m_memoryService = null;
public void setMemoryService(MemoryService memoryService) {
this.m_memoryService = memoryService;
}
public void setCalendarService(BaseCalendarService service)
{
this.m_calendarService = service;
}
public void setServerConfigurationService(ServerConfigurationService service)
{
this.m_configurationService = service;
}
public void setCalendarImporterService(CalendarImporterService service)
{
this.m_importerService = service;
}
public void setEntityManager(EntityManager service)
{
this.m_entityManager = service;
}
public void setSiteService(SiteService service)
{
this.m_siteService = service;
}
/**
* Dependency: SecurityService.
*
* @param securityService
* The SecurityService.
*/
public void setSecurityService(SecurityService securityService)
{
m_securityService = securityService;
}
/**
* Dependency: SessionManager.
* @param sessionManager
* The SessionManager.
*/
public void setSessionManager(SessionManager sessionManager)
{
this.m_sessionManager = sessionManager;
}
/**
* Dependency: TimeService.
* @param timeService
* The TimeService.
*/
public void setTimeService(TimeService timeService)
{
this.m_timeService = timeService;
}
/**
* Dependency: ToolManager.
* @param toolManager
* The ToolManager.
*/
public void setToolManager(ToolManager toolManager)
{
this.m_toolManager = toolManager;
}
/**
* Dependency: IdManager.
* @param idManager
* The IdManager.
*/
public void setIdManager(IdManager idManager)
{
this.m_idManager = idManager;
}
/** Dependency: Timer */
protected Timer m_timer = null;
public void init()
{
// external calendar subscriptions: enable?
enabled = m_configurationService.getBoolean(SAK_PROP_EXTSUBSCRIPTIONS_ENABLED, false);
mergeIntoMyworkspace = m_configurationService.getBoolean(SAK_PROP_EXTSUBSCRIPTIONS_MERGEINTOMYWORKSPACE, true);
m_log.info("init(): enabled: " + enabled + ", merge from other sites into My Workspace? "+mergeIntoMyworkspace);
if (enabled)
{
// INIT the caches
long cacheRefreshRate = 43200; // 12 hours
SimpleConfiguration cacheConfig = new SimpleConfiguration(1000, cacheRefreshRate, 0); // 12 hours
cacheConfig.setStatisticsEnabled(true);
institutionalSubscriptionCache = new SubscriptionCache(
m_memoryService.createCache("org.sakaiproject.calendar.impl.BaseExternalCacheSubscriptionService.institutionalCache", cacheConfig));
usersSubscriptionCache = new SubscriptionCache(
m_memoryService.createCache("org.sakaiproject.calendar.impl.BaseExternalCacheSubscriptionService.userCache", cacheConfig));
// TODO replace this with a real solution for when the caches are distributed by disabling the timer and using jobscheduler
if (institutionalSubscriptionCache.getCache().isDistributed()) {
m_log.error(institutionalSubscriptionCache.getCache().getName()+" is distributed but calendar subscription caches have a local timer refresh which means they will cause cache replication storms once every "+cacheRefreshRate+" seconds, do NOT distribute this cache");
}
if (usersSubscriptionCache.getCache().isDistributed()) {
m_log.error(usersSubscriptionCache.getCache().getName()+" is distributed but calendar subscription caches have a local timer refresh which means they will cause cache replication storms once every "+cacheRefreshRate+" seconds, do NOT distribute this cache");
}
m_timer = new Timer(); // init timer
// iCal column map
try
{
columnMap = m_importerService
.getDefaultColumnMap(CalendarImporterService.ICALENDAR_IMPORT);
}
catch (ImportException e1)
{
m_log
.error("Unable to get column map for ICal import. External subscriptions will be disabled.");
enabled = false;
return;
}
// load institutional calendar subscriptions as timer tasks, this is so that
// we don't slow up the loading of sakai.
for (final InsitutionalSubscription sub: getInstitutionalSubscriptions()) {
m_timer.schedule(new TimerTask() {
@Override
public void run() {
String reference = calendarSubscriptionReference(INSTITUTIONAL_CONTEXT, getIdFromSubscriptionUrl(sub.url));
getCalendarSubscription(reference);
}
}, 0, cacheRefreshRate);
}
}
}
public void destroy()
{
// Nothing to clean up for now.
}
public boolean isEnabled()
{
return enabled;
}
public void setEnabled(boolean enabled)
{
this.enabled = enabled;
}
// ######################################################
// PUBLIC methods
// ######################################################
/*
* (non-Javadoc)
*
* @see org.sakaiproject.calendar.api.ExternalCalendarSubscriptionService#calendarSubscriptionReference(java.lang.String,
* java.lang.String)
*/
public String calendarSubscriptionReference(String context, String id)
{
return BaseExternalSubscription.calendarSubscriptionReference(context, id);
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.calendar.impl.ExternalCalendarSubscriptionService#getCalendarSubscription(java.lang.String)
*/
public Calendar getCalendarSubscription(String reference)
{
if (!isEnabled() || reference == null) return null;
// Get Reference and Subscription URL
Reference _ref = m_entityManager.newReference(reference);
String subscriptionUrl = getSubscriptionUrlFromId(_ref.getId());
if (subscriptionUrl == null || subscriptionUrl.equals("null")) return null;
m_log.debug("ExternalCalendarSubscriptionService.getCalendarSubscription("
+ reference + ")");
m_log.debug(" |-> subscriptionUrl: " + subscriptionUrl);
ExternalSubscription subscription = getExternalSubscription(subscriptionUrl,
_ref.getContext());
m_log.debug(" |-> Subscription is " + subscription);
if (subscription != null)
{
m_log.debug(" |-> Calendar is " + subscription.getCalendar());
return subscription.getCalendar();
}
else
{
m_log.debug(" |-> Calendar is NULL");
return null;
}
}
private ExternalSubscription getExternalSubscription(String subscriptionUrl, String context) {
// Decide which cache to use.
SubscriptionCache cache = (getInstitutionalSubscription(subscriptionUrl) != null)? institutionalSubscriptionCache : usersSubscriptionCache;
ExternalSubscription subscription = cache.get(subscriptionUrl);
// Did we get it?
if (subscription == null)
{
subscription = loadCalendarSubscriptionFromUrl(subscriptionUrl, context);
cache.put(subscription);
}
return subscription;
}
public Set<String> getCalendarSubscriptionChannelsForChannels(
String primaryCalendarReference,
Collection<Object> channels)
{
Set<String> subscriptionChannels = new HashSet<String>();
Set<String> subscriptionUrlsAdded = new HashSet<String>();
if(isMyWorkspace(primaryCalendarReference) && (!mergeIntoMyworkspace || m_securityService.isSuperUser())) {
channels = new ArrayList<Object>();
channels.add(primaryCalendarReference);
}
for (Object channel : channels)
{
Set<String> channelSubscriptions = getCalendarSubscriptionChannelsForChannel((String) channel);
for (String channelSub : channelSubscriptions)
{
Reference ref = m_entityManager.newReference(channelSub);
if (!subscriptionUrlsAdded.contains(ref.getId()))
{
subscriptionChannels.add(channelSub);
subscriptionUrlsAdded.add(ref.getId());
}
}
}
return subscriptionChannels;
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.calendar.impl.ExternalCalendarSubscriptionService#getCalendarSubscriptionChannelsForSite()
*/
public Set<String> getCalendarSubscriptionChannelsForChannel(String reference)
{
Set<String> channels = new HashSet<String>();
if (!isEnabled() || reference == null) return channels;
// get externally subscribed urls from tool config
Reference ref = m_entityManager.newReference(reference);
Site site = null;
try
{
site = m_siteService.getSite(ref.getContext());
}
catch (IdUnusedException e)
{
m_log
.error("ExternalCalendarSubscriptionService.getCalendarSubscriptionChannelsForChannel(): IdUnusedException for context in reference: "
+ reference);
return channels;
}
ToolConfiguration tc = site.getToolForCommonId(SCHEDULE_TOOL_ID);
Properties config = tc == null? null : tc.getConfig();
if (tc != null && config != null)
{
String prop = config.getProperty(TC_PROP_SUBCRIPTIONS);
if (prop != null)
{
String[] chsPair = prop.split(SUBS_REF_DELIMITER);
for (int i = 0; i < chsPair.length; i++)
{
String[] pair = chsPair[i].split(SUBS_NAME_DELIMITER);
channels.add(pair[0]);
}
}
}
return channels;
}
public Set<ExternalSubscription> getAvailableInstitutionalSubscriptionsForChannel(
String reference)
{
Set<ExternalSubscription> subscriptions = new HashSet<ExternalSubscription>();
if (!isEnabled() || reference == null) return subscriptions;
Reference ref = m_entityManager.newReference(reference);
// If the cache has been flushed then we may need to reload it.
for (InsitutionalSubscription sub : getInstitutionalSubscriptions()) {
// Need to have way to load these.
ExternalSubscription subscription = getExternalSubscription(sub.url, ref.getContext());
if (subscription != null) {
subscription.setContext(ref.getContext());
subscriptions.add(subscription);
subscription.setCalendar(null);
}
}
return subscriptions;
}
public Set<ExternalSubscription> getSubscriptionsForChannel(String reference,
boolean loadCalendar)
{
Set<ExternalSubscription> subscriptions = new HashSet<ExternalSubscription>();
if (!isEnabled() || reference == null) return subscriptions;
// get externally subscribed urls from tool config
Reference ref = m_entityManager.newReference(reference);
Site site = null;
try
{
site = m_siteService.getSite(ref.getContext());
}
catch (IdUnusedException e)
{
m_log
.error("ExternalCalendarSubscriptionService.getSubscriptionsForChannel(): IdUnusedException for context in reference: "
+ reference);
return subscriptions;
}
ToolConfiguration tc = site.getToolForCommonId(SCHEDULE_TOOL_ID);
Properties config = tc == null? null : tc.getConfig();
if (tc != null && config != null)
{
String prop = config.getProperty(TC_PROP_SUBCRIPTIONS);
if (prop != null)
{
String[] chsPair = prop.split(SUBS_REF_DELIMITER);
for (int i = 0; i < chsPair.length; i++)
{
String[] pair = chsPair[i].split(SUBS_NAME_DELIMITER);
String r = pair[0];
Reference r1 = m_entityManager.newReference(r);
String url = getSubscriptionUrlFromId(r1.getId());
String name = null;
if (pair.length == 2)
name = pair[1];
else
{
try
{
name = institutionalSubscriptionCache.get(url)
.getSubscriptionName();
}
catch (Exception e)
{
name = url;
}
}
ExternalSubscription subscription = new BaseExternalSubscription(
name, url, ref.getContext(),
loadCalendar ? getCalendarSubscription(r) : null,
isInstitutionalCalendar(r));
subscriptions.add(subscription);
}
}
}
return subscriptions;
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.calendar.impl.ExternalCalendarSubscriptionService#setSubscriptionsForChannel(String,
* Collection<ExternalSubscription>)
*/
public void setSubscriptionsForChannel(String reference,
Collection<ExternalSubscription> subscriptions)
{
if (!isEnabled() || reference == null) return;
// set externally subscriptions in tool config
Reference ref = m_entityManager.newReference(reference);
Site site = null;
try
{
site = m_siteService.getSite(ref.getContext());
}
catch (IdUnusedException e)
{
m_log
.error("ExternalCalendarSubscriptionService.setSubscriptionsForChannel(): IdUnusedException for context in reference: "
+ reference);
return;
}
ToolConfiguration tc = site.getToolForCommonId(SCHEDULE_TOOL_ID);
if (tc != null)
{
boolean first = true;
StringBuffer tmpStr = new StringBuffer();
for (ExternalSubscription subscription : subscriptions)
{
if (!first) tmpStr.append(SUBS_REF_DELIMITER);
first = false;
tmpStr.append(subscription.getReference());
if (!subscription.isInstitutional())
tmpStr.append(SUBS_NAME_DELIMITER + subscription.getSubscriptionName());
}
Properties config = tc.getConfig();
config.setProperty(TC_PROP_SUBCRIPTIONS, tmpStr.toString());
tc.save();
}
}
public boolean isInstitutionalCalendar(String reference)
{
// Get Reference and Subscription URL
Reference _ref = m_entityManager.newReference(reference);
String subscriptionUrl = getSubscriptionUrlFromId(_ref.getId());
if (subscriptionUrl == null || subscriptionUrl.equals("null")) return false;
// Is a institutional subscription?
String[] subscriptionURLs = m_configurationService
.getStrings(SAK_PROP_EXTSUBSCRIPTIONS_URL);
if (subscriptionURLs != null)
{
for (String url: subscriptionURLs)
{
if (subscriptionUrl.equals(url)) {
return true;
}
}
}
return false;
}
public String getIdFromSubscriptionUrl(String url)
{
return BaseExternalSubscription.getIdFromSubscriptionUrl(url);
}
public String getSubscriptionUrlFromId(String id)
{
return BaseExternalSubscription.getSubscriptionUrlFromId(id);
}
// ######################################################
// PRIVATE methods
// ######################################################
/**
* Get the event type for this institutional subscription.
* @param url
* @return The forced event type or <code>null</code> if it isn't defined.
*/
String getEventType(String url)
{
InsitutionalSubscription sub = getInstitutionalSubscription(url);
return (sub != null)? sub.eventType: null;
}
/**
* Insitutional subscriptions loaded from configuration.
*/
class InsitutionalSubscription {
String url;
String name;
String eventType;
}
InsitutionalSubscription getInstitutionalSubscription(String url)
{
for (InsitutionalSubscription sub: getInstitutionalSubscriptions())
{
if(sub.url.equals(url))
{
return sub;
}
}
return null;
}
List<InsitutionalSubscription> getInstitutionalSubscriptions() {
String[] subscriptionURLs = m_configurationService
.getStrings(SAK_PROP_EXTSUBSCRIPTIONS_URL);
String[] subscriptionNames = m_configurationService
.getStrings(SAK_PROP_EXTSUBSCRIPTIONS_NAME);
String[] subscriptionEventTypes = m_configurationService
.getStrings(SAK_PROP_EXTSUBSCRIPTIONS_EVENTTYPE);
ArrayList<InsitutionalSubscription> subs = new ArrayList<InsitutionalSubscription>();
if (subscriptionURLs != null)
{
for (int i = 0; i < subscriptionURLs.length; i++)
{
String name = subscriptionNames[i];
String eventType = subscriptionEventTypes[i];
if (name != null) {
InsitutionalSubscription sub = new InsitutionalSubscription();
sub.url = subscriptionURLs[i];
sub.name = name;
sub.eventType = eventType;
subs.add(sub);
}
}
}
return subs;
}
ExternalSubscription loadCalendarSubscriptionFromUrl(String url,
String context)
{
InsitutionalSubscription sub = getInstitutionalSubscription(url);
String name = null;
String forcedEventType = null;
if (sub != null)
{
name = sub.name;
forcedEventType = sub.eventType;
}
return loadCalendarSubscriptionFromUrl(url, context, name, forcedEventType); }
ExternalSubscription loadCalendarSubscriptionFromUrl(String url,
String context, String calendarName, String forcedEventType)
{
ExternalSubscription subscription = new BaseExternalSubscription(calendarName,
url, context, null, INSTITUTIONAL_CONTEXT.equals(context));
ExternalCalendarSubscription calendar = null;
List<CalendarEvent> events = null;
BufferedInputStream stream = null;
try
{
URL _url = new URL(url);
if (calendarName == null) calendarName = _url.getFile();
// connect
URLConnection conn = _url.openConnection();
// Must set user agent so we can detect loops.
conn.addRequestProperty("User-Agent", m_calendarService.getUserAgent());
conn.setConnectTimeout(TIMEOUT);
conn.setReadTimeout(TIMEOUT);
// Now make the connection.
conn.connect();
stream = new BufferedInputStream(conn.getInputStream());
// import
events = m_importerService.doImport(CalendarImporterService.ICALENDAR_IMPORT,
stream, columnMap, null);
String subscriptionId = getIdFromSubscriptionUrl(url);
String reference = calendarSubscriptionReference(context, subscriptionId);
calendar = new ExternalCalendarSubscription(reference);
for (CalendarEvent event : events)
{
String eventType = event.getType();
if (forcedEventType != null) eventType = forcedEventType;
calendar.addEvent(event.getRange(), event.getDisplayName(), event
.getDescription(), eventType, event.getLocation(), event
.getRecurrenceRule(), null);
}
calendar.setName(calendarName);
subscription.setCalendar(calendar);
subscription.setInstitutional(getInstitutionalSubscription(url) != null);
m_log.info("Loaded calendar subscription: " + subscription.toString());
}
catch (ImportException e)
{
m_log.info("Error loading calendar subscription '" + calendarName
+ "' (will NOT retry again): " + url);
String subscriptionId = getIdFromSubscriptionUrl(url);
String reference = calendarSubscriptionReference(context, subscriptionId);
calendar = new ExternalCalendarSubscription(reference);
calendar.setName(calendarName);
// By setting the calendar to be an empty one we make sure that we don't attempt to re-retrieve it
// When 2 hours are up it will get refreshed through.
subscription.setCalendar(calendar);
}
catch (PermissionException e)
{
// This will never be called (for now)
e.printStackTrace();
}
catch (MalformedURLException e)
{
m_log.info("Mal-formed URL in calendar subscription '" + calendarName
+ "': " + url);
}
catch (IOException e)
{
m_log.info("Unable to read calendar subscription '" + calendarName
+ "' from URL (I/O Error): " + url);
}
catch (Exception e)
{
m_log.info("Unknown error occurred while reading calendar subscription '"
+ calendarName + "' from URL: " + url);
}
finally
{
if (stream != null) {
// Also closes the underlying InputStream
try {
stream.close();
} catch (IOException e) {
// Ignore
}
}
}
return subscription;
}
/**
* See if the current tab is the workspace tab (i.e. user site)
* @param primaryCalendarReference The primary calendar reference.
* @return true if we are currently on the "My Workspace" tab.
*/
private boolean isMyWorkspace(String primaryCalendarReference)
{
Reference ref = m_entityManager.newReference(primaryCalendarReference);
String siteId = ref.getContext();
return m_siteService.isUserSite(siteId);
}
// ######################################################
// Support classes
// ######################################################
public class ExternalCalendarSubscription implements Calendar
{
/** Memory storage */
protected Map<String, CalendarEvent> m_storage = new HashMap<String, CalendarEvent>();
/** The context in which this calendar exists. */
protected String m_context = null;
/** Store the unique-in-context calendar id. */
protected String m_id = null;
/** Store the calendar name. */
protected String m_name = null;
/** The properties. */
protected ResourcePropertiesEdit m_properties = null;
protected String modifiedDateStr = null;
public ExternalCalendarSubscription(String ref)
{
// set the ids
Reference r = m_entityManager.newReference(ref);
m_context = r.getContext();
m_id = r.getId();
// setup for properties
m_properties = new BaseResourcePropertiesEdit();
}
public CalendarEvent addEvent(TimeRange range, String displayName,
String description, String type, String location, EventAccess access,
Collection groups, List attachments) throws PermissionException
{
return addEvent(range, displayName, description, type, location, attachments);
}
public CalendarEvent addEvent(TimeRange range, String displayName,
String description, String type, String location, List attachments)
throws PermissionException
{
return addEvent(range, displayName, description, type, location, null,
attachments);
}
public CalendarEvent addEvent(TimeRange range, String displayName,
String description, String type, String location, RecurrenceRule rrule,
List attachments) throws PermissionException
{
// allocate a new unique event id
// String id = getUniqueId();
String id = getUniqueIdBasedOnFields(displayName, description, type, location, m_id);
// create event
ExternalCalendarEvent edit = new ExternalCalendarEvent(m_context, m_id, id);
// set it up
edit.setRange(range);
edit.setDisplayName(displayName);
edit.setDescription(description);
edit.setType(type);
edit.setLocation(location);
edit.setCreator();
if (rrule != null) edit.setRecurrenceRule(rrule);
// put in storage
m_storage.put(id, edit);
return edit;
}
public CalendarEventEdit addEvent() throws PermissionException
{
// allocate a new unique event id
// String id = getUniqueId();
// create event
// CalendarEventEdit event = new ExternalCalendarEvent(this, id);
// put in storage
// m_storage.put(id, event);
return null;
}
public CalendarEvent addEvent(CalendarEvent event)
{
// allocate a new unique event id
String id = event.getId();
// put in storage
m_storage.put(id, event);
return event;
}
public Collection<CalendarEvent> getAllEvents()
{
return m_storage.values();
}
public boolean allowAddCalendarEvent()
{
return false;
}
public boolean allowAddEvent()
{
return false;
}
public boolean allowEditEvent(String eventId)
{
return false;
}
public boolean allowGetEvent(String eventId)
{
return true;
}
public boolean allowGetEvents()
{
return true;
}
public boolean allowRemoveEvent(CalendarEvent event)
{
return false;
}
public void cancelEvent(CalendarEventEdit edit)
{
}
public void commitEvent(CalendarEventEdit edit, int intention)
{
}
public void commitEvent(CalendarEventEdit edit)
{
}
public String getContext()
{
return m_context;
}
public CalendarEventEdit getEditEvent(String eventId, String editType)
throws IdUnusedException, PermissionException, InUseException
{
return null;
}
public CalendarEvent getEvent(String eventId) throws IdUnusedException,
PermissionException
{
return m_storage.get(eventId);
}
public String getEventFields()
{
return m_properties
.getPropertyFormatted(ResourceProperties.PROP_CALENDAR_EVENT_FIELDS);
}
public List getEvents(TimeRange range, Filter filter) throws PermissionException
{
return filterEvents(new ArrayList<CalendarEvent>(m_storage.values()), range);
}
public boolean getExportEnabled()
{
return false;
}
public Collection getGroupsAllowAddEvent()
{
return new ArrayList();
}
public Collection getGroupsAllowGetEvent()
{
return new ArrayList();
}
public Collection getGroupsAllowRemoveEvent(boolean own)
{
return new ArrayList();
}
public Time getModified()
{
return m_timeService.newTimeGmt(modifiedDateStr);
}
public CalendarEventEdit mergeEvent(Element el) throws PermissionException,
IdUsedException
{
// TODO Implement mergeEvent()
return null;
}
public void removeEvent(CalendarEventEdit edit, int intention)
throws PermissionException
{
}
public void removeEvent(CalendarEventEdit edit) throws PermissionException
{
}
public void setExportEnabled(boolean enable)
{
}
public void setModified()
{
}
public String getId()
{
return m_id;
}
public ResourceProperties getProperties()
{
return m_properties;
}
public String getReference()
{
return m_calendarService.calendarSubscriptionReference(m_context, m_id);
}
protected void setContext(String context)
{
// set the ids
m_context = context;
for (CalendarEvent e : m_storage.values())
{
// ((ExternalCalendarEvent) e).setCalendar(this);
((ExternalCalendarEvent) e).setCalendarContext(m_context);
((ExternalCalendarEvent) e).setCalendarId(m_id);
}
}
public String getReference(String rootProperty)
{
return rootProperty + getReference();
}
public String getUrl()
{
// TODO Auto-generated method stub
return null;
}
public String getUrl(String rootProperty)
{
// TODO Auto-generated method stub
return null;
}
public Element toXml(Document doc, Stack stack)
{
// TODO Auto-generated method stub
return null;
}
public String getName()
{
return m_name;
}
public void setName(String calendarName)
{
this.m_name = calendarName;
}
/**
* Access the id generating service and return a unique id.
*
* @return a unique id.
*/
protected String getUniqueId()
{
return m_idManager.createUuid();
}
protected String getUniqueIdBasedOnFields(String displayName, String description,
String type, String location, String calendarId)
{
StringBuilder key = new StringBuilder();
key.append(displayName);
key.append(description);
key.append(type);
key.append(location);
key.append(calendarId);
String id = null;
int n = 0;
boolean unique = false;
while (!unique)
{
byte[] bytes = key.toString().getBytes();
try{
MessageDigest digest = MessageDigest.getInstance("SHA-1");
digest.update(bytes);
bytes = digest.digest();
id = getHexStringFromBytes(bytes);
}catch(NoSuchAlgorithmException e){
// fall back to Base64
byte[] encoded = Base64.encodeBase64(bytes);
id = StringUtils.newStringUtf8(encoded);
}
if (!m_storage.containsKey(id)) unique = true;
else key.append(n++);
}
return id;
}
protected String getHexStringFromBytes(byte[] raw)
{
final String HEXES = "0123456789ABCDEF";
if(raw == null)
{
return null;
}
final StringBuilder hex = new StringBuilder(2 * raw.length);
for(final byte b : raw)
{
hex.append(HEXES.charAt((b & 0xF0) >> 4)).append(HEXES.charAt((b & 0x0F)));
}
return hex.toString();
}
/**
* Filter the events to only those in the time range.
*
* @param events
* The full list of events.
* @param range
* The time range.
* @return A list of events from the incoming list that overlap the
* given time range.
*/
protected List<CalendarEvent> filterEvents(List<CalendarEvent> events,
TimeRange range)
{
List<CalendarEvent> filtered = new ArrayList<CalendarEvent>();
for (int i = 0; i < events.size(); i++)
{
CalendarEvent event = events.get(i);
// resolve the event to the list of events in this range
// TODO Support for recurring events
List<CalendarEvent> resolved = ((ExternalCalendarEvent) event)
.resolve(range);
filtered.addAll(resolved);
}
return filtered;
}
/**
* Checks if user has permission to modify any event (or fields) in this calendar
* @param function
* @return
*/
@Override
public boolean canModifyAnyEvent(String function){
return CalendarService.AUTH_MODIFY_CALENDAR_ANY.equals(function);
}
}
public class ExternalCalendarEvent implements CalendarEvent
{
// protected Calendar m_calendar = null;
protected String m_calendar_context = null;
protected String m_calendar_id = null;
protected ResourcePropertiesEdit m_properties = null;
protected String m_id = null;
protected String calendarReference = null;
protected TimeRange m_range = null;
protected TimeRange m_baseRange = null;
protected RecurrenceRule m_singleRule = null;
protected RecurrenceRule m_exclusionRule = null;
public ExternalCalendarEvent(String calendarContext, String calendarId, String id)
{
this(calendarContext, calendarId, id, null);
}
public ExternalCalendarEvent(String calendarContext, String calendarId,
String id, String eventType)
{
m_id = id;
// m_calendar = calendar;
m_calendar_context = calendarContext;
m_calendar_id = calendarId;
m_properties = new BaseResourcePropertiesEdit();
if (eventType != null)
m_properties
.addProperty(ResourceProperties.PROP_CALENDAR_TYPE, eventType);
}
public ExternalCalendarEvent(CalendarEvent other, RecurrenceInstance ri)
{
// m_calendar = ((ExternalCalendarEvent) other).m_calendar;
m_calendar_context = ((ExternalCalendarEvent) other).m_calendar_context;
m_calendar_id = ((ExternalCalendarEvent) other).m_calendar_id;
// encode the instance and the other's id into my id
m_id = '!' + ri.getRange().toString() + '!' + ri.getSequence() + '!'
+ ((ExternalCalendarEvent) other).m_id;
// use the new range
m_range = (TimeRange) ri.getRange().clone();
m_baseRange = ((ExternalCalendarEvent) other).m_range;
// point at the properties
m_properties = ((ExternalCalendarEvent) other).m_properties;
// point at the rules
m_singleRule = ((ExternalCalendarEvent) other).m_singleRule;
m_exclusionRule = ((ExternalCalendarEvent) other).m_exclusionRule;
}
public EventAccess getAccess()
{
return CalendarEvent.EventAccess.SITE;
}
public String getCalendarReference()
{
// return m_calendar.getReference();
return m_calendarService.calendarSubscriptionReference(m_calendar_context,
m_calendar_id);
}
// protected Calendar getCalendar(){
// return m_calendar;
// }
// protected void setCalendar(Calendar calendar) {
// m_calendar = calendar;
// }
protected void setCalendarContext(String calendarContext)
{
m_calendar_context = calendarContext;
}
protected void setCalendarId(String calendarId)
{
m_calendar_id = calendarId;
}
public String getCreator()
{
return m_properties.getProperty(ResourceProperties.PROP_CREATOR);
}
public String getDescription()
{
return FormattedText
.convertFormattedTextToPlaintext(getDescriptionFormatted());
}
public String getDescriptionFormatted()
{
// %%% JANDERSE the calendar event description can now be formatted
// text
// first try to use the formatted text description; if that isn't
// found, use the plaintext description
String desc = m_properties
.getPropertyFormatted(ResourceProperties.PROP_DESCRIPTION + "-html");
if (desc != null && desc.length() > 0) return desc;
desc = m_properties.getPropertyFormatted(ResourceProperties.PROP_DESCRIPTION
+ "-formatted");
desc = FormattedText.convertOldFormattedText(desc);
if (desc != null && desc.length() > 0) return desc;
desc = FormattedText.convertPlaintextToFormattedText(m_properties
.getPropertyFormatted(ResourceProperties.PROP_DESCRIPTION));
return desc;
}
public String getDisplayName()
{
return m_properties
.getPropertyFormatted(ResourceProperties.PROP_DISPLAY_NAME);
}
public String getField(String name)
{
// names are prefixed to form a namespace
name = ResourceProperties.PROP_CALENDAR_EVENT_FIELDS + "." + name;
return m_properties.getPropertyFormatted(name);
}
public Collection getGroupObjects()
{
return new ArrayList();
}
public String getGroupRangeForDisplay(Calendar calendar)
{
return "";
}
public Collection getGroups()
{
return new ArrayList();
}
public String getLocation()
{
return m_properties
.getPropertyFormatted(ResourceProperties.PROP_CALENDAR_LOCATION);
}
public String getModifiedBy()
{
return m_properties.getPropertyFormatted(ResourceProperties.PROP_MODIFIED_BY);
}
public TimeRange getRange()
{
// range might be null in the creation process, before the fields
// are set in an edit, but
// after the storage has registered the event and it's id.
if (m_range == null)
{
return m_timeService.newTimeRange(m_timeService.newTime(0));
}
// return (TimeRange) m_range.clone();
return m_range;
}
public RecurrenceRule getRecurrenceRule()
{
return m_singleRule;
}
public RecurrenceRule getExclusionRule()
{
if (m_exclusionRule == null)
m_exclusionRule = new ExclusionSeqRecurrenceRule();
return m_exclusionRule;
}
protected List resolve(TimeRange range)
{
List rv = new Vector();
// for no rules, use the event if it's in range
if (m_singleRule == null)
{
// the actual event
if (range.overlaps(getRange()))
{
rv.add(this);
}
}
// for rules...
else
{
List instances = m_singleRule.generateInstances(this.getRange(), range,
m_timeService.getLocalTimeZone());
// remove any excluded
getExclusionRule().excludeInstances(instances);
for (Iterator iRanges = instances.iterator(); iRanges.hasNext();)
{
RecurrenceInstance ri = (RecurrenceInstance) iRanges.next();
// generate an event object that is exactly like me but with
// this range and no rules
CalendarEvent clone = new ExternalCalendarEvent(this, ri);
rv.add(clone);
}
}
return rv;
}
public void setRecurrenceRule(RecurrenceRule rule)
{
m_singleRule = rule;
}
public void setExclusionRule(RecurrenceRule rule)
{
m_exclusionRule = rule;
}
public String getType()
{
return m_properties
.getPropertyFormatted(ResourceProperties.PROP_CALENDAR_TYPE);
}
public boolean isUserOwner()
{
return false;
}
public String getId()
{
return m_id;
}
protected void setId(String id)
{
m_id = id;
}
public ResourceProperties getProperties()
{
return m_properties;
}
public String getReference()
{
// return m_calendar.getReference() + Entity.SEPARATOR + m_id;
return m_calendarService.eventSubscriptionReference(m_calendar_context,
m_calendar_id, m_id);
}
public String getReference(String rootProperty)
{
return rootProperty + getReference();
}
public String getUrl()
{
return null;// m_calendar.getUrl() + getId();
}
public String getUrl(String rootProperty)
{
return rootProperty + getUrl();
}
public Element toXml(Document doc, Stack stack)
{
// TODO Auto-generated method stub
return null;
}
public int compareTo(Object o)
{
if (!(o instanceof CalendarEvent)) throw new ClassCastException();
Time mine = getRange().firstTime();
Time other = ((CalendarEvent) o).getRange().firstTime();
if (mine.before(other)) return -1;
if (mine.after(other)) return +1;
return 0;
}
public List getAttachments()
{
// TODO Auto-generated method stub
return null;
}
public void setCreator()
{
String currentUser = m_sessionManager.getCurrentSessionUserId();
String now = m_timeService.newTime().toString();
m_properties.addProperty(ResourceProperties.PROP_CREATOR, currentUser);
m_properties.addProperty(ResourceProperties.PROP_CREATION_DATE, now);
}
public void setLocation(String location)
{
m_properties.addProperty(ResourceProperties.PROP_CALENDAR_LOCATION, location);
}
public void setType(String type)
{
m_properties.addProperty(ResourceProperties.PROP_CALENDAR_TYPE, type);
}
public void setDescription(String description)
{
setDescriptionFormatted(FormattedText
.convertPlaintextToFormattedText(description));
}
public void setDescriptionFormatted(String description)
{
// %%% JANDERSE the calendar event description can now be formatted
// text
// save both a formatted and a plaintext version of the description
m_properties.addProperty(ResourceProperties.PROP_DESCRIPTION + "-html",
description);
m_properties.addProperty(ResourceProperties.PROP_DESCRIPTION, FormattedText
.convertFormattedTextToPlaintext(description));
}
public void setDisplayName(String displayName)
{
m_properties.addProperty(ResourceProperties.PROP_DISPLAY_NAME, displayName);
}
public void setRange(TimeRange range)
{
m_range = (TimeRange) range.clone();
}
/**
* Gets a site name for this calendar event
*/
public String getSiteName()
{
String calendarName = "";
if (m_calendar_context != null)
{
try
{
Site site = m_siteService.getSite(m_calendar_context);
if (site != null)
calendarName = site.getTitle();
}
catch (IdUnusedException e)
{
m_log.warn(".getSiteName(): " + e);
}
}
return calendarName;
}
}
}
| |
package org.apache.lucene.queryparser.complexPhrase;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryparser.classic.ParseException;
import org.apache.lucene.queryparser.classic.QueryParser;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.search.spans.SpanNearQuery;
import org.apache.lucene.search.spans.SpanNotQuery;
import org.apache.lucene.search.spans.SpanOrQuery;
import org.apache.lucene.search.spans.SpanQuery;
import org.apache.lucene.search.spans.SpanTermQuery;
/**
* QueryParser which permits complex phrase query syntax eg "(john jon
* jonathan~) peters*".
* <p>
* Performs potentially multiple passes over Query text to parse any nested
* logic in PhraseQueries. - First pass takes any PhraseQuery content between
* quotes and stores for subsequent pass. All other query content is parsed as
* normal - Second pass parses any stored PhraseQuery content, checking all
* embedded clauses are referring to the same field and therefore can be
* rewritten as Span queries. All PhraseQuery clauses are expressed as
* ComplexPhraseQuery objects
* </p>
* <p>
* This could arguably be done in one pass using a new QueryParser but here I am
* working within the constraints of the existing parser as a base class. This
* currently simply feeds all phrase content through an analyzer to select
* phrase terms - any "special" syntax such as * ~ * etc are not given special
* status
* </p>
*
*/
public class ComplexPhraseQueryParser extends QueryParser {
private ArrayList<ComplexPhraseQuery> complexPhrases = null;
private boolean isPass2ResolvingPhrases;
private boolean inOrder = true;
/**
* When <code>inOrder</code> is true, the search terms must
* exists in the documents as the same order as in query.
*
* @param inOrder parameter to choose between ordered or un-ordered proximity search
*/
public void setInOrder(final boolean inOrder) {
this.inOrder = inOrder;
}
private ComplexPhraseQuery currentPhraseQuery = null;
public ComplexPhraseQueryParser(String f, Analyzer a) {
super(f, a);
}
@Override
protected Query getFieldQuery(String field, String queryText, int slop) {
ComplexPhraseQuery cpq = new ComplexPhraseQuery(field, queryText, slop, inOrder);
complexPhrases.add(cpq); // add to list of phrases to be parsed once
// we
// are through with this pass
return cpq;
}
@Override
public Query parse(String query) throws ParseException {
if (isPass2ResolvingPhrases) {
MultiTermQuery.RewriteMethod oldMethod = getMultiTermRewriteMethod();
try {
// Temporarily force BooleanQuery rewrite so that Parser will
// generate visible
// collection of terms which we can convert into SpanQueries.
// ConstantScoreRewrite mode produces an
// opaque ConstantScoreQuery object which cannot be interrogated for
// terms in the same way a BooleanQuery can.
// QueryParser is not guaranteed threadsafe anyway so this temporary
// state change should not
// present an issue
setMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
return super.parse(query);
} finally {
setMultiTermRewriteMethod(oldMethod);
}
}
// First pass - parse the top-level query recording any PhraseQuerys
// which will need to be resolved
complexPhrases = new ArrayList<>();
Query q = super.parse(query);
// Perform second pass, using this QueryParser to parse any nested
// PhraseQueries with different
// set of syntax restrictions (i.e. all fields must be same)
isPass2ResolvingPhrases = true;
try {
for (Iterator<ComplexPhraseQuery> iterator = complexPhrases.iterator(); iterator.hasNext();) {
currentPhraseQuery = iterator.next();
// in each phrase, now parse the contents between quotes as a
// separate parse operation
currentPhraseQuery.parsePhraseElements(this);
}
} finally {
isPass2ResolvingPhrases = false;
}
return q;
}
// There is No "getTermQuery throws ParseException" method to override so
// unfortunately need
// to throw a runtime exception here if a term for another field is embedded
// in phrase query
@Override
protected Query newTermQuery(Term term) {
if (isPass2ResolvingPhrases) {
try {
checkPhraseClauseIsForSameField(term.field());
} catch (ParseException pe) {
throw new RuntimeException("Error parsing complex phrase", pe);
}
}
return super.newTermQuery(term);
}
// Helper method used to report on any clauses that appear in query syntax
private void checkPhraseClauseIsForSameField(String field)
throws ParseException {
if (!field.equals(currentPhraseQuery.field)) {
throw new ParseException("Cannot have clause for field \"" + field
+ "\" nested in phrase " + " for field \"" + currentPhraseQuery.field
+ "\"");
}
}
@Override
protected Query getWildcardQuery(String field, String termStr)
throws ParseException {
if (isPass2ResolvingPhrases) {
checkPhraseClauseIsForSameField(field);
}
return super.getWildcardQuery(field, termStr);
}
@Override
protected Query getRangeQuery(String field, String part1, String part2,
boolean startInclusive, boolean endInclusive) throws ParseException {
if (isPass2ResolvingPhrases) {
checkPhraseClauseIsForSameField(field);
}
return super.getRangeQuery(field, part1, part2, startInclusive, endInclusive);
}
@Override
protected Query newRangeQuery(String field, String part1, String part2,
boolean startInclusive, boolean endInclusive) {
if (isPass2ResolvingPhrases) {
// Must use old-style RangeQuery in order to produce a BooleanQuery
// that can be turned into SpanOr clause
TermRangeQuery rangeQuery = TermRangeQuery.newStringRange(field, part1, part2, startInclusive, endInclusive);
rangeQuery.setRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
return rangeQuery;
}
return super.newRangeQuery(field, part1, part2, startInclusive, endInclusive);
}
@Override
protected Query getFuzzyQuery(String field, String termStr,
float minSimilarity) throws ParseException {
if (isPass2ResolvingPhrases) {
checkPhraseClauseIsForSameField(field);
}
return super.getFuzzyQuery(field, termStr, minSimilarity);
}
/*
* Used to handle the query content in between quotes and produced Span-based
* interpretations of the clauses.
*/
static class ComplexPhraseQuery extends Query {
final String field;
final String phrasedQueryStringContents;
final int slopFactor;
private final boolean inOrder;
private Query contents;
public ComplexPhraseQuery(String field, String phrasedQueryStringContents,
int slopFactor, boolean inOrder) {
super();
this.field = field;
this.phrasedQueryStringContents = phrasedQueryStringContents;
this.slopFactor = slopFactor;
this.inOrder = inOrder;
}
// Called by ComplexPhraseQueryParser for each phrase after the main
// parse
// thread is through
protected void parsePhraseElements(ComplexPhraseQueryParser qp) throws ParseException {
// TODO ensure that field-sensitivity is preserved ie the query
// string below is parsed as
// field+":("+phrasedQueryStringContents+")"
// but this will need code in rewrite to unwrap the first layer of
// boolean query
String oldDefaultParserField = qp.field;
try {
//temporarily set the QueryParser to be parsing the default field for this phrase e.g author:"fred* smith"
qp.field = this.field;
contents = qp.parse(phrasedQueryStringContents);
}
finally {
qp.field = oldDefaultParserField;
}
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
// ArrayList spanClauses = new ArrayList();
if (contents instanceof TermQuery) {
return contents;
}
// Build a sequence of Span clauses arranged in a SpanNear - child
// clauses can be complex
// Booleans e.g. nots and ors etc
int numNegatives = 0;
if (!(contents instanceof BooleanQuery)) {
throw new IllegalArgumentException("Unknown query type \""
+ contents.getClass().getName()
+ "\" found in phrase query string \"" + phrasedQueryStringContents
+ "\"");
}
BooleanQuery bq = (BooleanQuery) contents;
BooleanClause[] bclauses = bq.getClauses();
SpanQuery[] allSpanClauses = new SpanQuery[bclauses.length];
// For all clauses e.g. one* two~
for (int i = 0; i < bclauses.length; i++) {
// HashSet bclauseterms=new HashSet();
Query qc = bclauses[i].getQuery();
// Rewrite this clause e.g one* becomes (one OR onerous)
qc = qc.rewrite(reader);
if (bclauses[i].getOccur().equals(BooleanClause.Occur.MUST_NOT)) {
numNegatives++;
}
if (qc instanceof BooleanQuery) {
ArrayList<SpanQuery> sc = new ArrayList<>();
addComplexPhraseClause(sc, (BooleanQuery) qc);
if (sc.size() > 0) {
allSpanClauses[i] = sc.get(0);
} else {
// Insert fake term e.g. phrase query was for "Fred Smithe*" and
// there were no "Smithe*" terms - need to
// prevent match on just "Fred".
allSpanClauses[i] = new SpanTermQuery(new Term(field,
"Dummy clause because no terms found - must match nothing"));
}
} else {
if (qc instanceof TermQuery) {
TermQuery tq = (TermQuery) qc;
allSpanClauses[i] = new SpanTermQuery(tq.getTerm());
} else {
throw new IllegalArgumentException("Unknown query type \""
+ qc.getClass().getName()
+ "\" found in phrase query string \""
+ phrasedQueryStringContents + "\"");
}
}
}
if (numNegatives == 0) {
// The simple case - no negative elements in phrase
return new SpanNearQuery(allSpanClauses, slopFactor, inOrder);
}
// Complex case - we have mixed positives and negatives in the
// sequence.
// Need to return a SpanNotQuery
ArrayList<SpanQuery> positiveClauses = new ArrayList<>();
for (int j = 0; j < allSpanClauses.length; j++) {
if (!bclauses[j].getOccur().equals(BooleanClause.Occur.MUST_NOT)) {
positiveClauses.add(allSpanClauses[j]);
}
}
SpanQuery[] includeClauses = positiveClauses
.toArray(new SpanQuery[positiveClauses.size()]);
SpanQuery include = null;
if (includeClauses.length == 1) {
include = includeClauses[0]; // only one positive clause
} else {
// need to increase slop factor based on gaps introduced by
// negatives
include = new SpanNearQuery(includeClauses, slopFactor + numNegatives,
inOrder);
}
// Use sequence of positive and negative values as the exclude.
SpanNearQuery exclude = new SpanNearQuery(allSpanClauses, slopFactor,
inOrder);
SpanNotQuery snot = new SpanNotQuery(include, exclude);
return snot;
}
private void addComplexPhraseClause(List<SpanQuery> spanClauses, BooleanQuery qc) {
ArrayList<SpanQuery> ors = new ArrayList<>();
ArrayList<SpanQuery> nots = new ArrayList<>();
BooleanClause[] bclauses = qc.getClauses();
// For all clauses e.g. one* two~
for (int i = 0; i < bclauses.length; i++) {
Query childQuery = bclauses[i].getQuery();
// select the list to which we will add these options
ArrayList<SpanQuery> chosenList = ors;
if (bclauses[i].getOccur() == BooleanClause.Occur.MUST_NOT) {
chosenList = nots;
}
if (childQuery instanceof TermQuery) {
TermQuery tq = (TermQuery) childQuery;
SpanTermQuery stq = new SpanTermQuery(tq.getTerm());
stq.setBoost(tq.getBoost());
chosenList.add(stq);
} else if (childQuery instanceof BooleanQuery) {
BooleanQuery cbq = (BooleanQuery) childQuery;
addComplexPhraseClause(chosenList, cbq);
} else {
// TODO alternatively could call extract terms here?
throw new IllegalArgumentException("Unknown query type:"
+ childQuery.getClass().getName());
}
}
if (ors.size() == 0) {
return;
}
SpanOrQuery soq = new SpanOrQuery(ors
.toArray(new SpanQuery[ors.size()]));
if (nots.size() == 0) {
spanClauses.add(soq);
} else {
SpanOrQuery snqs = new SpanOrQuery(nots
.toArray(new SpanQuery[nots.size()]));
SpanNotQuery snq = new SpanNotQuery(soq, snqs);
spanClauses.add(snq);
}
}
@Override
public String toString(String field) {
return "\"" + phrasedQueryStringContents + "\"";
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((field == null) ? 0 : field.hashCode());
result = prime
* result
+ ((phrasedQueryStringContents == null) ? 0
: phrasedQueryStringContents.hashCode());
result = prime * result + slopFactor;
result = prime * result + (inOrder ? 1 : 0);
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
if (!super.equals(obj)) {
return false;
}
ComplexPhraseQuery other = (ComplexPhraseQuery) obj;
if (field == null) {
if (other.field != null)
return false;
} else if (!field.equals(other.field))
return false;
if (phrasedQueryStringContents == null) {
if (other.phrasedQueryStringContents != null)
return false;
} else if (!phrasedQueryStringContents
.equals(other.phrasedQueryStringContents))
return false;
if (slopFactor != other.slopFactor)
return false;
return inOrder == other.inOrder;
}
}
}
| |
package com.admarvel.android.ads.internal;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnBufferingUpdateListener;
import android.media.MediaPlayer.OnCompletionListener;
import android.media.MediaPlayer.OnErrorListener;
import android.media.MediaPlayer.OnInfoListener;
import android.media.MediaPlayer.OnPreparedListener;
import android.media.MediaPlayer.OnVideoSizeChangedListener;
import android.net.Uri;
import android.support.v4.media.TransportMediator;
import android.support.v4.widget.ExploreByTouchHelper;
import android.util.Log;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.Surface;
import android.view.TextureView;
import android.view.TextureView.SurfaceTextureListener;
import android.view.View;
import android.view.View.MeasureSpec;
import android.view.ViewGroup.LayoutParams;
import android.view.accessibility.AccessibilityEvent;
import android.view.accessibility.AccessibilityNodeInfo;
import android.widget.MediaController;
import android.widget.MediaController.MediaPlayerControl;
import android.widget.RelativeLayout;
import com.admarvel.android.ads.internal.AdMarvelWebView.AdMarvelWebView;
import com.admarvel.android.ads.internal.util.Logging;
import java.lang.ref.WeakReference;
import java.util.Map;
/* renamed from: com.admarvel.android.ads.internal.j */
public class AdMarvelUniversalVideoView extends TextureView implements MediaPlayerControl, AdMarvelWebView {
private int f462A;
private OnErrorListener f463B;
private OnInfoListener f464C;
private int f465D;
private boolean f466E;
private boolean f467F;
private boolean f468G;
private Context f469H;
private int f470I;
private AdMarvelUniversalVideoView f471J;
private boolean f472K;
private boolean f473L;
private WeakReference<AdMarvelWebView> f474M;
private OnCompletionListener f475N;
private OnInfoListener f476O;
private OnErrorListener f477P;
private OnBufferingUpdateListener f478Q;
public SurfaceTexture f479a;
public MediaPlayer f480b;
OnVideoSizeChangedListener f481c;
OnPreparedListener f482d;
SurfaceTextureListener f483e;
private String f484f;
private Uri f485g;
private Map<String, String> f486h;
private final int f487i;
private final int f488j;
private final int f489k;
private final int f490l;
private final int f491m;
private final int f492n;
private final int f493o;
private int f494p;
private int f495q;
private int f496r;
private int f497s;
private int f498t;
private int f499u;
private int f500v;
private int f501w;
private MediaController f502x;
private OnCompletionListener f503y;
private OnPreparedListener f504z;
/* renamed from: com.admarvel.android.ads.internal.j.1 */
class AdMarvelUniversalVideoView implements OnVideoSizeChangedListener {
final /* synthetic */ AdMarvelUniversalVideoView f455a;
AdMarvelUniversalVideoView(AdMarvelUniversalVideoView adMarvelUniversalVideoView) {
this.f455a = adMarvelUniversalVideoView;
}
public void onVideoSizeChanged(MediaPlayer mp, int width, int height) {
try {
this.f455a.f496r = mp.getVideoWidth();
this.f455a.f497s = mp.getVideoHeight();
if (this.f455a.f496r != 0 && this.f455a.f497s != 0) {
if (this.f455a.f473L) {
this.f455a.getSurfaceTexture().setDefaultBufferSize(this.f455a.f496r, this.f455a.f497s);
this.f455a.requestLayout();
return;
}
this.f455a.m259b();
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
/* renamed from: com.admarvel.android.ads.internal.j.2 */
class AdMarvelUniversalVideoView implements OnPreparedListener {
final /* synthetic */ AdMarvelUniversalVideoView f456a;
AdMarvelUniversalVideoView(AdMarvelUniversalVideoView adMarvelUniversalVideoView) {
this.f456a = adMarvelUniversalVideoView;
}
public void onPrepared(MediaPlayer mp) {
Logging.log("MediaPreparedListener : onPrepared");
this.f456a.f494p = 2;
this.f456a.f466E = this.f456a.f467F = this.f456a.f468G = true;
if (this.f456a.f504z != null) {
this.f456a.f504z.onPrepared(this.f456a.f480b);
}
if (this.f456a.f502x != null) {
this.f456a.f502x.setEnabled(true);
}
this.f456a.f496r = mp.getVideoWidth();
this.f456a.f497s = mp.getVideoHeight();
int f = this.f456a.f465D;
if (f != 0) {
this.f456a.seekTo(f);
}
if (this.f456a.f496r == 0 || this.f456a.f497s == 0 || this.f456a.getSurfaceTexture() == null) {
if (this.f456a.f495q == 3) {
this.f456a.start();
}
} else if (this.f456a.f473L) {
try {
this.f456a.getSurfaceTexture().setDefaultBufferSize(this.f456a.f496r, this.f456a.f497s);
this.f456a.requestLayout();
if (this.f456a.f500v != this.f456a.f496r || this.f456a.f501w != this.f456a.f497s) {
return;
}
if (this.f456a.f495q == 3) {
this.f456a.start();
if (this.f456a.f502x != null) {
this.f456a.f502x.show();
}
} else if (!this.f456a.isPlaying()) {
if ((f != 0 || this.f456a.getCurrentPosition() > 0) && this.f456a.f502x != null) {
this.f456a.f502x.show(0);
}
}
} catch (Exception e) {
e.printStackTrace();
}
} else {
this.f456a.m259b();
}
}
}
/* renamed from: com.admarvel.android.ads.internal.j.3 */
class AdMarvelUniversalVideoView implements OnCompletionListener {
final /* synthetic */ AdMarvelUniversalVideoView f457a;
AdMarvelUniversalVideoView(AdMarvelUniversalVideoView adMarvelUniversalVideoView) {
this.f457a = adMarvelUniversalVideoView;
}
public void onCompletion(MediaPlayer mp) {
try {
Log.d(this.f457a.f484f, "onCompletion: ");
if (this.f457a.f494p != 5) {
this.f457a.f494p = 5;
this.f457a.f495q = 5;
if (this.f457a.f502x != null) {
this.f457a.f502x.hide();
}
if (this.f457a.f503y != null) {
this.f457a.f503y.onCompletion(this.f457a.f480b);
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
/* renamed from: com.admarvel.android.ads.internal.j.4 */
class AdMarvelUniversalVideoView implements OnInfoListener {
final /* synthetic */ AdMarvelUniversalVideoView f458a;
AdMarvelUniversalVideoView(AdMarvelUniversalVideoView adMarvelUniversalVideoView) {
this.f458a = adMarvelUniversalVideoView;
}
public boolean onInfo(MediaPlayer mp, int arg1, int arg2) {
if (this.f458a.f464C != null) {
this.f458a.f464C.onInfo(mp, arg1, arg2);
}
return true;
}
}
/* renamed from: com.admarvel.android.ads.internal.j.5 */
class AdMarvelUniversalVideoView implements OnErrorListener {
final /* synthetic */ AdMarvelUniversalVideoView f459a;
AdMarvelUniversalVideoView(AdMarvelUniversalVideoView adMarvelUniversalVideoView) {
this.f459a = adMarvelUniversalVideoView;
}
public boolean onError(MediaPlayer mp, int framework_err, int impl_err) {
Log.d(this.f459a.f484f, "Error: " + framework_err + "," + impl_err);
this.f459a.f494p = -1;
this.f459a.f495q = -1;
if (this.f459a.f502x != null) {
this.f459a.f502x.hide();
}
return (this.f459a.f463B == null || this.f459a.f463B.onError(this.f459a.f480b, framework_err, impl_err)) ? true : true;
}
}
/* renamed from: com.admarvel.android.ads.internal.j.6 */
class AdMarvelUniversalVideoView implements OnBufferingUpdateListener {
final /* synthetic */ AdMarvelUniversalVideoView f460a;
AdMarvelUniversalVideoView(AdMarvelUniversalVideoView adMarvelUniversalVideoView) {
this.f460a = adMarvelUniversalVideoView;
}
public void onBufferingUpdate(MediaPlayer mp, int percent) {
this.f460a.f462A = percent;
}
}
/* renamed from: com.admarvel.android.ads.internal.j.7 */
class AdMarvelUniversalVideoView implements SurfaceTextureListener {
final /* synthetic */ AdMarvelUniversalVideoView f461a;
AdMarvelUniversalVideoView(AdMarvelUniversalVideoView adMarvelUniversalVideoView) {
this.f461a = adMarvelUniversalVideoView;
}
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
Logging.log("onSurfaceTextureAvailable");
try {
this.f461a.f479a = surface;
this.f461a.f500v = width;
this.f461a.f501w = height;
if (this.f461a.f480b != null) {
this.f461a.f480b.setSurface(new Surface(surface));
}
if (this.f461a.f494p != 0) {
this.f461a.m244i();
}
if (this.f461a.f473L && this.f461a.f471J != null) {
this.f461a.f471J.m214c();
}
this.f461a.f472K = false;
} catch (Exception e) {
e.printStackTrace();
if (this.f461a.f471J != null) {
this.f461a.f471J.m218g();
}
}
}
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
Logging.log("onSurfaceTextureDestroyed");
if (!this.f461a.f473L) {
this.f461a.f472K = true;
if (this.f461a.f474M != null && this.f461a.f474M.get() != null && ((AdMarvelWebView) this.f461a.f474M.get()).aa && this.f461a.m263f()) {
if (surface != null) {
surface.release();
}
if (this.f461a.f502x != null) {
this.f461a.f502x.hide();
}
this.f461a.m262e();
this.f461a.f472K = false;
}
}
return false;
}
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
Logging.log("onSurfaceTextureSizeChanged");
try {
if (this.f461a.f480b != null) {
this.f461a.f496r = this.f461a.f480b.getVideoWidth();
this.f461a.f497s = this.f461a.f480b.getVideoHeight();
if (this.f461a.f496r != 0 && this.f461a.f497s != 0) {
surface.setDefaultBufferSize(this.f461a.f496r, this.f461a.f497s);
this.f461a.requestLayout();
}
}
} catch (Exception e) {
e.printStackTrace();
if (this.f461a.f471J != null) {
this.f461a.f471J.m218g();
}
}
}
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
}
/* renamed from: com.admarvel.android.ads.internal.j.a */
public interface AdMarvelUniversalVideoView {
void m211a();
void m212a(AdMarvelUniversalVideoView adMarvelUniversalVideoView);
void m213b();
void m214c();
void m215d();
void m216e();
void m217f();
void m218g();
}
public AdMarvelUniversalVideoView(Context context) {
super(context);
this.f484f = "AdMarvelUniversalVideoView";
this.f487i = -1;
this.f488j = 0;
this.f489k = 1;
this.f490l = 2;
this.f491m = 3;
this.f492n = 4;
this.f493o = 5;
this.f470I = 0;
this.f472K = false;
this.f474M = null;
this.f481c = new AdMarvelUniversalVideoView(this);
this.f482d = new AdMarvelUniversalVideoView(this);
this.f475N = new AdMarvelUniversalVideoView(this);
this.f476O = new AdMarvelUniversalVideoView(this);
this.f477P = new AdMarvelUniversalVideoView(this);
this.f478Q = new AdMarvelUniversalVideoView(this);
this.f483e = new AdMarvelUniversalVideoView(this);
this.f473L = true;
m240g();
}
public AdMarvelUniversalVideoView(Context context, boolean z, String str, AdMarvelWebView adMarvelWebView) {
super(context);
this.f484f = "AdMarvelUniversalVideoView";
this.f487i = -1;
this.f488j = 0;
this.f489k = 1;
this.f490l = 2;
this.f491m = 3;
this.f492n = 4;
this.f493o = 5;
this.f470I = 0;
this.f472K = false;
this.f474M = null;
this.f481c = new AdMarvelUniversalVideoView(this);
this.f482d = new AdMarvelUniversalVideoView(this);
this.f475N = new AdMarvelUniversalVideoView(this);
this.f476O = new AdMarvelUniversalVideoView(this);
this.f477P = new AdMarvelUniversalVideoView(this);
this.f478Q = new AdMarvelUniversalVideoView(this);
this.f483e = new AdMarvelUniversalVideoView(this);
try {
this.f469H = context;
this.f473L = z;
m240g();
this.f485g = Uri.parse(str);
this.f474M = new WeakReference(adMarvelWebView);
} catch (Exception e) {
e.printStackTrace();
}
}
private void m222a(Uri uri, Map<String, String> map) {
try {
this.f485g = uri;
this.f486h = map;
this.f465D = 0;
m244i();
requestLayout();
invalidate();
} catch (Exception e) {
e.printStackTrace();
}
}
private void m226b(boolean z) {
try {
if (this.f480b != null) {
this.f480b.reset();
this.f480b.release();
this.f480b = null;
this.f494p = 0;
if (z) {
this.f495q = 0;
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
private void m240g() {
this.f469H = getContext();
this.f496r = 0;
this.f497s = 0;
if (this.f483e != null) {
setSurfaceTextureListener(this.f483e);
}
setFocusable(true);
setFocusableInTouchMode(true);
requestFocus();
if (this.f480b == null) {
this.f494p = 0;
this.f495q = 0;
}
}
private void m242h() {
try {
if (this.f480b != null) {
this.f480b.setOnPreparedListener(this.f482d);
this.f480b.setOnVideoSizeChangedListener(this.f481c);
this.f480b.setOnCompletionListener(this.f475N);
this.f480b.setOnErrorListener(this.f477P);
this.f480b.setOnInfoListener(this.f476O);
this.f480b.setOnBufferingUpdateListener(this.f478Q);
this.f462A = 0;
this.f480b.setScreenOnWhilePlaying(true);
}
} catch (Exception e) {
e.printStackTrace();
}
}
private void m244i() {
if (this.f485g != null) {
if (Utils.m536r(this.f469H)) {
if (this.f480b == null) {
try {
this.f480b = new MediaPlayer();
m242h();
this.f480b.setDataSource(this.f469H, this.f485g, this.f486h);
this.f480b.setAudioStreamType(3);
this.f480b.prepareAsync();
this.f494p = 1;
m246j();
return;
} catch (Throwable e) {
Log.w(this.f484f, "Unable to open content: " + this.f485g, e);
this.f494p = -1;
this.f495q = -1;
this.f477P.onError(this.f480b, 1, 0);
return;
} catch (Throwable e2) {
Log.w(this.f484f, "Unable to open content: " + this.f485g, e2);
this.f494p = -1;
this.f495q = -1;
this.f477P.onError(this.f480b, 1, 0);
return;
}
}
m242h();
} else if (this.f471J != null) {
this.f471J.m217f();
}
}
}
private void m246j() {
try {
if (this.f480b != null && this.f502x != null) {
this.f502x.setMediaPlayer(this);
this.f502x.setAnchorView(getParent() instanceof View ? (View) getParent() : this);
this.f502x.setEnabled(m250l());
}
} catch (Exception e) {
e.printStackTrace();
}
}
private void m248k() {
try {
if (this.f502x == null) {
return;
}
if (this.f502x.isShowing()) {
this.f502x.hide();
} else {
this.f502x.show();
}
} catch (Exception e) {
e.printStackTrace();
}
}
private boolean m250l() {
return (this.f480b == null || this.f494p == -1 || this.f494p == 0 || this.f494p == 1) ? false : true;
}
public void m256a() {
try {
if (this.f480b != null) {
this.f480b.stop();
this.f480b.reset();
this.f480b.release();
this.f480b = null;
this.f494p = 0;
this.f495q = 0;
if (this.f471J != null) {
this.f471J.m213b();
}
}
} catch (Throwable e) {
if (this.f471J != null) {
this.f471J.m218g();
}
Logging.log(Log.getStackTraceString(e));
}
}
public void m257a(int i, int i2, int i3, int i4) {
this.f496r = i;
this.f497s = i2;
this.f498t = i3;
this.f499u = i4;
}
public void m258a(boolean z) {
if (z && this.f474M != null && this.f474M.get() != null && ((AdMarvelWebView) this.f474M.get()).aa && this.f472K && m263f()) {
this.f479a = null;
if (this.f502x != null) {
this.f502x.hide();
}
m262e();
this.f472K = false;
}
}
public void m259b() {
float f = getContext().getResources().getDisplayMetrics().density;
LayoutParams layoutParams = (this.f496r <= 0 || this.f497s <= 0) ? new RelativeLayout.LayoutParams(-2, -2) : new RelativeLayout.LayoutParams((int) (((float) this.f496r) * f), (int) (((float) this.f497s) * f));
if (this.f498t >= 0) {
if (this.f499u >= 0) {
layoutParams.leftMargin = (int) (((float) this.f498t) * f);
layoutParams.topMargin = (int) (f * ((float) this.f499u));
} else {
layoutParams.leftMargin = (int) (f * ((float) this.f498t));
layoutParams.addRule(15);
}
} else if (this.f499u >= 0) {
layoutParams.topMargin = (int) (f * ((float) this.f499u));
layoutParams.addRule(14);
} else {
layoutParams.addRule(13);
}
setLayoutParams(layoutParams);
}
public void m260c() {
if (m250l() && this.f480b != null) {
this.f480b.setVolume(0.0f, 0.0f);
}
}
public boolean canPause() {
return this.f466E;
}
public boolean canSeekBackward() {
return this.f467F;
}
public boolean canSeekForward() {
return this.f468G;
}
public void m261d() {
if (m250l() && this.f480b != null) {
this.f480b.setVolume(1.0f, 1.0f);
}
}
void m262e() {
Logging.log("AdMarvelUniversalVideoView : cleanUpVideoView - VideoView Destroyed");
m226b(true);
if (this.f471J != null) {
this.f471J.m212a(this);
}
}
boolean m263f() {
boolean z = false;
AdMarvelWebView adMarvelWebView = (AdMarvelWebView) this.f474M.get();
if (adMarvelWebView == null) {
return true;
}
int[] iArr = new int[]{-1, -1};
adMarvelWebView.getLocationInWindow(iArr);
int height = adMarvelWebView.getHeight() > 0 ? adMarvelWebView.getHeight() / 2 : 0;
int i = (adMarvelWebView.f921w == ExploreByTouchHelper.INVALID_ID || adMarvelWebView.f921w <= 0) ? 0 : adMarvelWebView.f921w;
if (iArr[1] > 0 && (iArr[1] - i) + height >= 0 && height + iArr[1] < Utils.m528n(adMarvelWebView.getContext())) {
z = true;
}
return z;
}
public int getAudioSessionId() {
return 0;
}
public int getBufferPercentage() {
return this.f480b != null ? this.f462A : 0;
}
public int getCurrentPosition() {
return (!m250l() || this.f480b == null) ? 0 : this.f480b.getCurrentPosition();
}
public int getCurrentPositionToDisplay() {
try {
if (!m250l()) {
return this.f470I;
}
int currentPosition = this.f480b.getCurrentPosition();
this.f470I = currentPosition;
return currentPosition;
} catch (Exception e) {
e.printStackTrace();
this.f470I = 0;
return 0;
}
}
public int getDuration() {
return (!m250l() || this.f480b == null) ? -1 : this.f480b.getDuration();
}
public int getLastCurrentPosition() {
return this.f494p;
}
public boolean isPlaying() {
return m250l() && this.f480b.isPlaying();
}
public void onInitializeAccessibilityEvent(AccessibilityEvent event) {
super.onInitializeAccessibilityEvent(event);
event.setClassName(AdMarvelUniversalVideoView.class.getName());
}
public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) {
super.onInitializeAccessibilityNodeInfo(info);
info.setClassName(AdMarvelUniversalVideoView.class.getName());
}
public boolean onKeyDown(int keyCode, KeyEvent event) {
boolean z = (keyCode == 4 || keyCode == 24 || keyCode == 25 || keyCode == 164 || keyCode == 82 || keyCode == 5 || keyCode == 6) ? false : true;
if (m250l() && z && this.f502x != null) {
if (keyCode == 79 || keyCode == 85) {
try {
if (this.f480b.isPlaying()) {
pause();
this.f502x.show();
return true;
}
start();
this.f502x.hide();
return true;
} catch (Exception e) {
e.printStackTrace();
}
} else if (keyCode == TransportMediator.KEYCODE_MEDIA_PLAY) {
if (this.f480b.isPlaying()) {
return true;
}
start();
this.f502x.hide();
return true;
} else if (keyCode != 86 && keyCode != TransportMediator.KEYCODE_MEDIA_PAUSE) {
m248k();
} else if (!this.f480b.isPlaying()) {
return true;
} else {
pause();
this.f502x.show();
return true;
}
}
return super.onKeyDown(keyCode, event);
}
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
try {
int defaultSize = AdMarvelUniversalVideoView.getDefaultSize(this.f496r, widthMeasureSpec);
int defaultSize2 = AdMarvelUniversalVideoView.getDefaultSize(this.f497s, heightMeasureSpec);
if (this.f496r > 0 && this.f497s > 0) {
int mode = MeasureSpec.getMode(widthMeasureSpec);
int size = MeasureSpec.getSize(widthMeasureSpec);
int mode2 = MeasureSpec.getMode(heightMeasureSpec);
defaultSize2 = MeasureSpec.getSize(heightMeasureSpec);
if (mode == 1073741824 && mode2 == 1073741824) {
if (this.f496r * defaultSize2 < this.f497s * size) {
defaultSize = (this.f496r * defaultSize2) / this.f497s;
} else if (this.f496r * defaultSize2 > this.f497s * size) {
defaultSize2 = (this.f497s * size) / this.f496r;
defaultSize = size;
} else {
defaultSize = size;
}
} else if (mode == 1073741824) {
defaultSize = (this.f497s * size) / this.f496r;
if (mode2 != ExploreByTouchHelper.INVALID_ID || defaultSize <= defaultSize2) {
defaultSize2 = defaultSize;
defaultSize = size;
} else {
defaultSize = size;
}
} else if (mode2 == 1073741824) {
defaultSize = (this.f496r * defaultSize2) / this.f497s;
if (mode == ExploreByTouchHelper.INVALID_ID && defaultSize > size) {
defaultSize = size;
}
} else {
int i = this.f496r;
defaultSize = this.f497s;
if (mode2 != ExploreByTouchHelper.INVALID_ID || defaultSize <= defaultSize2) {
defaultSize2 = defaultSize;
defaultSize = i;
} else {
defaultSize = (this.f496r * defaultSize2) / this.f497s;
}
if (mode == ExploreByTouchHelper.INVALID_ID && r1 > size) {
defaultSize2 = (this.f497s * size) / this.f496r;
defaultSize = size;
}
}
}
setMeasuredDimension(defaultSize, defaultSize2);
} catch (Exception e) {
e.printStackTrace();
}
}
public boolean onTouchEvent(MotionEvent ev) {
if (m250l() && this.f502x != null) {
m248k();
}
if (this.f471J != null) {
this.f471J.m216e();
}
return false;
}
public boolean onTrackballEvent(MotionEvent ev) {
if (m250l() && this.f502x != null) {
m248k();
}
return false;
}
public void pause() {
try {
if (m250l() && this.f480b != null && this.f480b.isPlaying()) {
this.f480b.pause();
this.f494p = 4;
if (this.f471J != null) {
this.f471J.m211a();
}
this.f470I = getCurrentPosition();
}
this.f495q = 4;
} catch (Throwable e) {
if (this.f471J != null) {
this.f471J.m218g();
}
Logging.log(Log.getStackTraceString(e));
}
}
public void seekTo(int msec) {
try {
if (m250l()) {
this.f480b.seekTo(msec);
this.f465D = 0;
return;
}
this.f465D = msec;
} catch (IllegalStateException e) {
e.printStackTrace();
}
}
public void setListener(AdMarvelUniversalVideoView listener) {
this.f471J = listener;
}
public void setMediaController(MediaController controller) {
try {
if (this.f502x != null) {
this.f502x.hide();
}
this.f502x = controller;
m246j();
} catch (Exception e) {
e.printStackTrace();
}
}
public void setOnCompletionListener(OnCompletionListener l) {
this.f503y = l;
}
public void setOnErrorListener(OnErrorListener l) {
this.f463B = l;
}
public void setOnInfoListener(OnInfoListener l) {
this.f464C = l;
}
public void setOnPreparedListener(OnPreparedListener l) {
this.f504z = l;
}
public void setVideoPath(String path) {
try {
setVideoURI(Uri.parse(path));
} catch (Throwable e) {
if (this.f471J != null) {
this.f471J.m218g();
}
Logging.log(Log.getStackTraceString(e));
}
}
public void setVideoURI(Uri uri) {
try {
m222a(uri, null);
} catch (Throwable e) {
if (this.f471J != null) {
this.f471J.m218g();
}
Logging.log(Log.getStackTraceString(e));
}
}
public void start() {
try {
if (m250l() && this.f480b != null) {
if (this.f470I <= 0 || this.f495q != 4) {
if (this.f471J != null) {
this.f471J.m215d();
}
} else if (this.f471J != null) {
this.f471J.m214c();
}
this.f480b.start();
this.f494p = 3;
}
this.f495q = 3;
} catch (Throwable e) {
if (this.f471J != null) {
this.f471J.m218g();
}
Logging.log(Log.getStackTraceString(e));
}
}
}
| |
/*
* Copyright (c) 2007, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/**
* @test
* @bug 4898484 6604496
* @summary basic test for symmetric ciphers with no padding
* @author Valerie Peng
* @library ..
*/
import java.io.*;
import java.nio.*;
import java.util.*;
import java.security.*;
import java.security.spec.AlgorithmParameterSpec;
import javax.crypto.*;
import javax.crypto.spec.IvParameterSpec;
public class TestSymmCiphersNoPad extends PKCS11Test {
private static class CI { // class for holding Cipher Information
String transformation;
String keyAlgo;
int dataSize;
CI(String transformation, String keyAlgo, int dataSize) {
this.transformation = transformation;
this.keyAlgo = keyAlgo;
this.dataSize = dataSize;
}
}
private static final CI TEST_LIST[] = {
new CI("ARCFOUR", "ARCFOUR", 400),
new CI("RC4", "RC4", 401),
new CI("DES/CBC/NoPadding", "DES", 400),
new CI("DESede/CBC/NoPadding", "DESede", 160),
new CI("AES/CBC/NoPadding", "AES", 4800),
new CI("Blowfish/CBC/NoPadding", "Blowfish", 24),
new CI("AES/CTR/NoPadding", "AES", 1600)
};
private static StringBuffer debugBuf;
public void main(Provider p) throws Exception {
boolean status = true;
Random random = new Random();
try {
for (int i = 0; i < TEST_LIST.length; i++) {
CI currTest = TEST_LIST[i];
System.out.println("===" + currTest.transformation + "===");
try {
KeyGenerator kg =
KeyGenerator.getInstance(currTest.keyAlgo, p);
SecretKey key = kg.generateKey();
Cipher c1 = Cipher.getInstance(currTest.transformation, p);
Cipher c2 = Cipher.getInstance(currTest.transformation,
"SunJCE");
byte[] plainTxt = new byte[currTest.dataSize];
random.nextBytes(plainTxt);
System.out.println("Testing inLen = " + plainTxt.length);
c2.init(Cipher.ENCRYPT_MODE, key);
AlgorithmParameters params = c2.getParameters();
byte[] answer = c2.doFinal(plainTxt);
test(c1, Cipher.ENCRYPT_MODE, key, params,
plainTxt, answer);
System.out.println("Encryption tests: DONE");
c2.init(Cipher.DECRYPT_MODE, key, params);
byte[] answer2 = c2.doFinal(answer);
test(c1, Cipher.DECRYPT_MODE, key, params,
answer, answer2);
System.out.println("Decryption tests: DONE");
} catch (NoSuchAlgorithmException nsae) {
System.out.println("Skipping unsupported algorithm: " +
nsae);
}
}
} catch (Exception ex) {
// print out debug info when exception is encountered
if (debugBuf != null) {
System.out.println(debugBuf.toString());
}
throw ex;
}
}
private static void test(Cipher cipher, int mode, SecretKey key,
AlgorithmParameters params,
byte[] in, byte[] answer) throws Exception {
// test setup
debugBuf = new StringBuffer();
cipher.init(mode, key, params);
int outLen = cipher.getOutputSize(in.length);
debugBuf.append("Estimated output size = " + outLen + "\n");
// test data preparation
ByteBuffer inBuf = ByteBuffer.allocate(in.length);
inBuf.put(in);
inBuf.position(0);
ByteBuffer inDirectBuf = ByteBuffer.allocateDirect(in.length);
inDirectBuf.put(in);
inDirectBuf.position(0);
ByteBuffer outBuf = ByteBuffer.allocate(outLen);
ByteBuffer outDirectBuf = ByteBuffer.allocateDirect(outLen);
// test#1: byte[] in + byte[] out
debugBuf.append("Test#1:\n");
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte[] testOut1 = cipher.update(in, 0, 16);
if (testOut1 != null) baos.write(testOut1, 0, testOut1.length);
testOut1 = cipher.doFinal(in, 16, in.length-16);
if (testOut1 != null) baos.write(testOut1, 0, testOut1.length);
testOut1 = baos.toByteArray();
match(testOut1, answer);
// test#2: Non-direct Buffer in + non-direct Buffer out
debugBuf.append("Test#2:\n");
debugBuf.append("inputBuf: " + inBuf + "\n");
debugBuf.append("outputBuf: " + outBuf + "\n");
cipher.update(inBuf, outBuf);
cipher.doFinal(inBuf, outBuf);
match(outBuf, answer);
// test#3: Direct Buffer in + direc Buffer out
debugBuf.append("Test#3:\n");
debugBuf.append("(pre) inputBuf: " + inDirectBuf + "\n");
debugBuf.append("(pre) outputBuf: " + outDirectBuf + "\n");
cipher.update(inDirectBuf, outDirectBuf);
cipher.doFinal(inDirectBuf, outDirectBuf);
debugBuf.append("(post) inputBuf: " + inDirectBuf + "\n");
debugBuf.append("(post) outputBuf: " + outDirectBuf + "\n");
match(outDirectBuf, answer);
// test#4: Direct Buffer in + non-direct Buffer out
debugBuf.append("Test#4:\n");
inDirectBuf.position(0);
outBuf.position(0);
debugBuf.append("inputBuf: " + inDirectBuf + "\n");
debugBuf.append("outputBuf: " + outBuf + "\n");
cipher.update(inDirectBuf, outBuf);
cipher.doFinal(inDirectBuf, outBuf);
match(outBuf, answer);
// test#5: Non-direct Buffer in + direct Buffer out
debugBuf.append("Test#5:\n");
inBuf.position(0);
outDirectBuf.position(0);
debugBuf.append("(pre) inputBuf: " + inBuf + "\n");
debugBuf.append("(pre) outputBuf: " + outDirectBuf + "\n");
cipher.update(inBuf, outDirectBuf);
cipher.doFinal(inBuf, outDirectBuf);
debugBuf.append("(post) inputBuf: " + inBuf + "\n");
debugBuf.append("(post) outputBuf: " + outDirectBuf + "\n");
match(outDirectBuf, answer);
debugBuf = null;
}
private static void match(byte[] b1, byte[] b2) throws Exception {
if (b1.length != b2.length) {
debugBuf.append("got len : " + b1.length + "\n");
debugBuf.append("expect len: " + b2.length + "\n");
throw new Exception("mismatch - different length!\n");
} else {
for (int i = 0; i < b1.length; i++) {
if (b1[i] != b2[i]) {
debugBuf.append("got : " + toString(b1) + "\n");
debugBuf.append("expect: " + toString(b2) + "\n");
throw new Exception("mismatch");
}
}
}
}
private static void match(ByteBuffer bb, byte[] answer) throws Exception {
byte[] bbTemp = new byte[bb.position()];
bb.position(0);
bb.get(bbTemp, 0, bbTemp.length);
match(bbTemp, answer);
}
public static void main(String[] args) throws Exception {
main(new TestSymmCiphersNoPad());
}
}
| |
package com.github.javactic.futures;
/*
* ___ _ _
* |_ | | | (_)
* | | __ ___ ____ _ ___| |_ _ ___
* | |/ _` \ \ / / _` |/ __| __| |/ __|
* /\__/ / (_| |\ V / (_| | (__| |_| | (__ -2015-
* \____/ \__,_| \_/ \__,_|\___|\__|_|\___|
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.github.javactic.Bad;
import com.github.javactic.Every;
import com.github.javactic.Or;
import com.github.javactic.Validation;
import io.vavr.Tuple;
import io.vavr.Tuple2;
import io.vavr.control.Option;
import java.time.Duration;
import java.util.Objects;
import java.util.Queue;
import java.util.concurrent.CompletionException;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.Function;
class OrFutureImpl<G, B> implements OrFuture<G, B> {
private final ExecutionContext<?> executionContext;
private final AtomicReference<Or<G, B>> value = new AtomicReference<>();
private final CountDownLatch finished = new CountDownLatch(1);
private final Queue<Consumer<? super Or<G, B>>> actions = new ConcurrentLinkedQueue<>();
OrFutureImpl(ExecutionContext<?> executionContext) {
this.executionContext = executionContext;
}
@SuppressWarnings("unchecked")
boolean tryComplete(Or<? extends G, ? extends B> value) {
return complete((Or<G, B>) value);
}
@Override
public boolean isCompleted() {
return value.get() != null;
}
boolean complete(Or<G, B> result) {
Objects.requireNonNull(result, "cannot complete with null");
if (value.compareAndSet(null, result)) {
finished.countDown();
// sync necessary so onComplete does not get called twice for an action
synchronized (actions) {
actions.forEach(this::perform);
}
return true;
} else {
return false;
}
}
@Override
public void onComplete(Consumer<? super Or<G, B>> action) {
Objects.requireNonNull(action, "action is null");
if (isCompleted()) {
perform(action);
} else {
// sync necessary so onComplete does not get called twice for an action
synchronized (actions) {
if (isCompleted()) {
perform(action);
} else {
actions.add(action);
}
}
}
}
private void perform(Consumer<? super Or<G, B>> action) {
executionContext.getExecutor().execute(() -> {
try {
action.accept(value.get());
} catch (Throwable t) {
handleUncaughtThrowable(t);
}
});
}
private void handleUncaughtThrowable(Throwable t) {
Thread.UncaughtExceptionHandler handler = Thread.currentThread().getUncaughtExceptionHandler();
if(handler != null) handler.uncaughtException(Thread.currentThread(), t);
else {
handler = Thread.getDefaultUncaughtExceptionHandler();
if(handler != null) handler.uncaughtException(Thread.currentThread(), t);
else System.err.println("no default or other UncaughtExceptionHandler found for Throwable " + t.toString());
}
}
@Override
public Option<Or<G, B>> getOption() {
return Option.of(value.get());
}
@Override
public Or<G, B> get(Duration timeout) throws InterruptedException, TimeoutException {
if (finished.await(timeout.toMillis(), TimeUnit.MILLISECONDS)) return value.get();
else throw new TimeoutException("timeout waiting for result");
}
@Override
public Or<G, B> get(Duration timeout, B timeoutBad) throws InterruptedException {
if (finished.await(timeout.toMillis(), TimeUnit.MILLISECONDS)) return value.get();
else return Bad.of(timeoutBad);
}
@Override
public Or<G, B> getUnsafe() throws CompletionException {
try {
return get(Duration.ofMillis(Long.MAX_VALUE));
} catch (InterruptedException | TimeoutException e) {
throw new CompletionException(e);
}
}
@Override
public String toString() {
return "OrFuture(" + getOption().map(Object::toString).getOrElse("N/A") + ")";
}
@Override
public OrFuture<G, B> andThen(Consumer<? super Or<G, B>> consumer) {
OrPromise<G, B> p = executionContext.promise();
onComplete(or -> {
try {
consumer.accept(or);
} catch (Throwable t) {
handleUncaughtThrowable(t);
} finally {
p.complete(or);
}
});
return p.future();
}
@Override
public OrFuture<G, B> filter(Function<? super G, ? extends Validation<? extends B>> validator) {
OrPromise<G, B> promise = executionContext.promise();
onComplete(or -> promise.complete(or.filter(validator)));
return promise.future();
}
@Override
public <H> OrFuture<H, B> flatMap(Function<? super G, ? extends OrFuture<? extends H, ? extends B>> mapper) {
OrPromise<H, B> promise = executionContext.promise();
onComplete(or ->
or.forEach(
g -> promise.completeWith(mapper.apply(g)),
promise::failure)
);
return promise.future();
}
@Override
public OrFuture<G, B> recover(Function<? super B, ? extends G> fn) {
OrPromise<G, B> promise = executionContext.promise();
onComplete(or -> promise.complete(or.recover(fn)));
return promise.future();
}
@SuppressWarnings("unchecked")
@Override
public <C> OrFuture<G, C> recoverWith(Function<? super B, ? extends OrFuture<? extends G, ? extends C>> fn) {
return transformWith(or -> {
if(or.isGood()) return (OrFuture<G,C>)this;
else return fn.apply(or.getBad());
});
}
@Override
public <H, C> OrFuture<H, C> transform(Function<? super Or<? extends G, ? extends B>, ? extends Or<? extends H, ? extends C>> f) {
OrPromise<H, C> promise = executionContext.promise();
onComplete(or -> promise.complete(f.apply(or)));
return promise.future();
}
@Override
public <H, C> OrFuture<H, C> transformWith(Function<? super Or<? extends G, ? extends B>, ? extends OrFuture<? extends H, ? extends C>> f) {
OrPromise<H, C> promise = executionContext.promise();
onComplete(or -> promise.completeWith(f.apply(or)));
return promise.future();
}
@Override
public <H> OrFuture<Tuple2<G, H>, Every<B>> zip(OrFuture<? extends H, ? extends B> that) {
return zipWith(that, Tuple::of);
}
@Override
public <H, X> OrFuture<X, Every<B>> zipWith(OrFuture<? extends H, ? extends B> that, BiFunction<? super G, ? super H, ? extends X> f) {
OrPromise<X, Every<B>> promise = executionContext.promise();
onComplete(thisOr -> that.onComplete(thatOr -> promise.complete(thisOr.zipWith(thatOr, f))));
return promise.future();
}
}
| |
/*
*
* Copyright 2015 Robert Winkler
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package io.github.robwin.swagger2markup;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.github.robwin.markup.builder.MarkupLanguage;
import io.github.robwin.swagger2markup.builder.document.DefinitionsDocument;
import io.github.robwin.swagger2markup.builder.document.OverviewDocument;
import io.github.robwin.swagger2markup.builder.document.PathsDocument;
import io.swagger.models.Swagger;
import io.swagger.parser.SwaggerParser;
import io.swagger.util.Json;
import io.swagger.util.Yaml;
import org.apache.commons.lang3.Validate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
/**
* @author Robert Winkler
*/
public class Swagger2MarkupConverter {
private static final Logger LOG = LoggerFactory.getLogger(Swagger2MarkupConverter.class);
private final Swagger swagger;
private final MarkupLanguage markupLanguage;
private final String examplesFolderPath;
private final String schemasFolderPath;
private final String descriptionsFolderPath;
private final boolean separatedDefinitions;
private static final String OVERVIEW_DOCUMENT = "overview";
private static final String PATHS_DOCUMENT = "paths";
private static final String DEFINITIONS_DOCUMENT = "definitions";
/**
* @param markupLanguage the markup language which is used to generate the files
* @param swagger the Swagger object
* @param examplesFolderPath the folderPath where examples are stored
* @param schemasFolderPath the folderPath where (XML, JSON)-Schema files are stored
* @param descriptionsFolderPath the folderPath where descriptions are stored
* @param separatedDefinitions create separate definition files for each model definition.
*/
Swagger2MarkupConverter(MarkupLanguage markupLanguage, Swagger swagger, String examplesFolderPath, String schemasFolderPath, String descriptionsFolderPath, boolean separatedDefinitions){
this.markupLanguage = markupLanguage;
this.swagger = swagger;
this.examplesFolderPath = examplesFolderPath;
this.schemasFolderPath = schemasFolderPath;
this.descriptionsFolderPath = descriptionsFolderPath;
this.separatedDefinitions = separatedDefinitions;
}
/**
* Creates a Swagger2MarkupConverter.Builder using a given Swagger source.
*
* @param swaggerLocation the Swagger location. Can be a HTTP url or a path to a local file.
* @return a Swagger2MarkupConverter
*/
public static Builder from(String swaggerLocation){
Validate.notEmpty(swaggerLocation, "swaggerLocation must not be empty!");
return new Builder(swaggerLocation);
}
/**
* Creates a Swagger2MarkupConverter.Builder from a given Swagger model.
*
* @param swagger the Swagger source.
* @return a Swagger2MarkupConverter
*/
public static Builder from(Swagger swagger){
Validate.notNull(swagger, "swagger must not be null!");
return new Builder(swagger);
}
/**
* Creates a Swagger2MarkupConverter.Builder from a given Swagger YAML or JSON String.
*
* @param swagger the Swagger YAML or JSON String.
* @return a Swagger2MarkupConverter
* @throws java.io.IOException if String can not be parsed
*/
public static Builder fromString(String swagger) throws IOException {
Validate.notEmpty(swagger, "swagger must not be null!");
ObjectMapper mapper;
if(swagger.trim().startsWith("{")) {
mapper = Json.mapper();
}else {
mapper = Yaml.mapper();
}
JsonNode rootNode = mapper.readTree(swagger);
// must have swagger node set
JsonNode swaggerNode = rootNode.get("swagger");
if(swaggerNode == null)
throw new IllegalArgumentException("Swagger String is in the wrong format");
return new Builder(mapper.convertValue(rootNode, Swagger.class));
}
/**
* Builds the document with the given markup language and stores
* the files in the given folder.
*
* @param targetFolderPath the target folder
* @throws IOException if the files cannot be written
*/
public void intoFolder(String targetFolderPath) throws IOException {
Validate.notEmpty(targetFolderPath, "folderPath must not be null!");
buildDocuments(targetFolderPath);
}
/**
* Builds the document with the given markup language and returns it as a String
*
* @return a the document as a String
* @throws java.io.IOException if files can not be read
*/
public String asString() throws IOException{
return buildDocuments();
}
/**
* Builds all documents and writes them to a directory
* @param directory the directory where the generated file should be stored
* @throws IOException if a file cannot be written
*/
private void buildDocuments(String directory) throws IOException {
new OverviewDocument(swagger, markupLanguage).build().writeToFile(directory, OVERVIEW_DOCUMENT, StandardCharsets.UTF_8);
new PathsDocument(swagger, markupLanguage, examplesFolderPath, descriptionsFolderPath).build().writeToFile(directory, PATHS_DOCUMENT, StandardCharsets.UTF_8);
new DefinitionsDocument(swagger, markupLanguage, schemasFolderPath, descriptionsFolderPath, separatedDefinitions, directory).build().writeToFile(directory, DEFINITIONS_DOCUMENT, StandardCharsets.UTF_8);
}
/**
* Returns all documents as a String
* @return a the document as a String
*/
private String buildDocuments() throws IOException {
return new OverviewDocument(swagger, markupLanguage).build().toString().concat(
new PathsDocument(swagger, markupLanguage, examplesFolderPath, schemasFolderPath).build().toString()
.concat(new DefinitionsDocument(swagger, markupLanguage, schemasFolderPath, schemasFolderPath, false, null).build().toString()));
}
public static class Builder{
private final Swagger swagger;
private String examplesFolderPath;
private String schemasFolderPath;
private String descriptionsFolderPath;
private boolean separatedDefinitions;
private MarkupLanguage markupLanguage = MarkupLanguage.ASCIIDOC;
/**
* Creates a Builder using a given Swagger source.
*
* @param swaggerLocation the Swagger location. Can be a HTTP url or a path to a local file.
*/
Builder(String swaggerLocation){
swagger = new SwaggerParser().read(swaggerLocation);
if(swagger == null){
throw new IllegalArgumentException("Failed to read the Swagger file. ");
}
}
/**
* Creates a Builder using a given Swagger model.
*
* @param swagger the Swagger source.
*/
Builder(Swagger swagger){
this.swagger = swagger;
}
public Swagger2MarkupConverter build(){
return new Swagger2MarkupConverter(markupLanguage, swagger, examplesFolderPath, schemasFolderPath, descriptionsFolderPath, separatedDefinitions);
}
/**
* Specifies the markup language which should be used to generate the files
*
* @param markupLanguage the markup language which is used to generate the files
* @return the Swagger2MarkupConverter.Builder
*/
public Builder withMarkupLanguage(MarkupLanguage markupLanguage){
this.markupLanguage = markupLanguage;
return this;
}
/**
* Include hand-written descriptions into the Paths and Definitions document
*
* @param descriptionsFolderPath the path to the folder where the description documents reside
* @return the Swagger2MarkupConverter.Builder
*/
public Builder withDescriptions(String descriptionsFolderPath){
this.descriptionsFolderPath = descriptionsFolderPath;
return this;
}
/**
* In addition to the definitions file, also create separate definition files for each model definition.
* @return the Swagger2MarkupConverter.Builder
*/
public Builder withSeparatedDefinitions() {
this.separatedDefinitions = true;
return this;
}
/**
* Include examples into the Paths document
*
* @param examplesFolderPath the path to the folder where the example documents reside
* @return the Swagger2MarkupConverter.Builder
*/
public Builder withExamples(String examplesFolderPath){
this.examplesFolderPath = examplesFolderPath;
return this;
}
/**
* Include (JSON, XML) schemas into the Definitions document
*
* @param schemasFolderPath the path to the folder where the schema documents reside
* @return the Swagger2MarkupConverter.Builder
*/
public Builder withSchemas(String schemasFolderPath){
this.schemasFolderPath = schemasFolderPath;
return this;
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.groboclown.idea.p4ic.ui.config;
import com.intellij.uiDesigner.core.GridConstraints;
import com.intellij.uiDesigner.core.GridLayoutManager;
import com.intellij.uiDesigner.core.Spacer;
import net.groboclown.idea.p4ic.config.UserProjectPreferences;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import java.awt.*;
import java.util.ArrayList;
import java.util.List;
import java.util.ResourceBundle;
public class UserPreferencesPanel {
private JSpinner myMaxServerConnections;
private JSpinner myMaxTimeout;
private JPanel myRootPanel;
public UserPreferencesPanel() {
myMaxServerConnections.setModel(new MinMaxSpinnerModel(
UserProjectPreferences.MIN_SERVER_CONNECTIONS,
UserProjectPreferences.MAX_SERVER_CONNECTIONS,
1,
UserProjectPreferences.DEFAULT_SERVER_CONNECTIONS));
myMaxTimeout.setModel(new MinMaxSpinnerModel(
UserProjectPreferences.MIN_CONNECTION_WAIT_TIME_MILLIS,
UserProjectPreferences.MAX_CONNECTION_WAIT_TIME_MILLIS,
500,
UserProjectPreferences.DEFAULT_CONNECTION_WAIT_TIME_MILLIS));
}
protected void loadSettingsIntoGUI(@NotNull UserProjectPreferences userPrefs) {
myMaxServerConnections.setValue(userPrefs.getMaxServerConnections());
myMaxTimeout.setValue(userPrefs.getMaxConnectionWaitTimeMillis());
}
protected void saveSettingsToConfig(@NotNull UserProjectPreferences userPrefs) {
userPrefs.setMaxServerConnections(getMaxServerConnections());
userPrefs.setMaxConnectionWaitTimeMillis(getMaxTimeout());
}
boolean isModified(@NotNull final UserProjectPreferences preferences) {
return
getMaxServerConnections() != preferences.getMaxServerConnections() ||
getMaxTimeout() != preferences.getMaxConnectionWaitTimeMillis();
}
int getMaxServerConnections() {
return (Integer) myMaxServerConnections.getModel().getValue();
}
int getMaxTimeout() {
return (Integer) myMaxTimeout.getModel().getValue();
}
{
// GUI initializer generated by IntelliJ IDEA GUI Designer
// >>> IMPORTANT!! <<<
// DO NOT EDIT OR ADD ANY CODE HERE!
$$$setupUI$$$();
}
/**
* Method generated by IntelliJ IDEA GUI Designer
* >>> IMPORTANT!! <<<
* DO NOT edit this method OR call it in your code!
*
* @noinspection ALL
*/
private void $$$setupUI$$$() {
myRootPanel = new JPanel();
myRootPanel.setLayout(new GridLayoutManager(3, 2, new Insets(0, 0, 0, 0), -1, -1));
final JLabel label1 = new JLabel();
this.$$$loadLabelText$$$(label1,
ResourceBundle.getBundle("net/groboclown/idea/p4ic/P4Bundle").getString("user.prefs.max_connections"));
myRootPanel.add(label1, new GridConstraints(0, 0, 1, 1, GridConstraints.ANCHOR_EAST, GridConstraints.FILL_NONE,
GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
final Spacer spacer1 = new Spacer();
myRootPanel.add(spacer1,
new GridConstraints(2, 0, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_VERTICAL, 1,
GridConstraints.SIZEPOLICY_WANT_GROW, null, null, null, 0, false));
myMaxServerConnections = new JSpinner();
myRootPanel.add(myMaxServerConnections,
new GridConstraints(0, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE,
GridConstraints.SIZEPOLICY_WANT_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0,
false));
final JLabel label2 = new JLabel();
this.$$$loadLabelText$$$(label2,
ResourceBundle.getBundle("net/groboclown/idea/p4ic/P4Bundle").getString("user.prefs.max_timeout"));
myRootPanel.add(label2, new GridConstraints(1, 0, 1, 1, GridConstraints.ANCHOR_EAST, GridConstraints.FILL_NONE,
GridConstraints.SIZEPOLICY_FIXED, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false));
myMaxTimeout = new JSpinner();
myRootPanel.add(myMaxTimeout,
new GridConstraints(1, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE,
GridConstraints.SIZEPOLICY_WANT_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0,
false));
label1.setLabelFor(myMaxServerConnections);
label2.setLabelFor(myMaxTimeout);
}
/**
* @noinspection ALL
*/
private void $$$loadLabelText$$$(JLabel component, String text) {
StringBuffer result = new StringBuffer();
boolean haveMnemonic = false;
char mnemonic = '\0';
int mnemonicIndex = -1;
for (int i = 0; i < text.length(); i++) {
if (text.charAt(i) == '&') {
i++;
if (i == text.length()) {
break;
}
if (!haveMnemonic && text.charAt(i) != '&') {
haveMnemonic = true;
mnemonic = text.charAt(i);
mnemonicIndex = result.length();
}
}
result.append(text.charAt(i));
}
component.setText(result.toString());
if (haveMnemonic) {
component.setDisplayedMnemonic(mnemonic);
component.setDisplayedMnemonicIndex(mnemonicIndex);
}
}
/**
* @noinspection ALL
*/
public JComponent $$$getRootComponent$$$() {
return myRootPanel;
}
static class MinMaxSpinnerModel implements SpinnerModel {
private final List<ChangeListener> listeners = new ArrayList<ChangeListener>();
private final int minValue;
private final int maxValue;
private final int step;
private int value;
MinMaxSpinnerModel(final int minValue, final int maxValue, final int step, final int initialValue) {
this.minValue = minValue;
this.maxValue = maxValue;
this.step = step;
this.value = initialValue;
}
@Override
public Object getValue() {
return value;
}
@Override
public void setValue(final Object value) {
if (value == null || !(value instanceof Number)) {
return;
}
int newValue = Math.min(
maxValue,
Math.max(
minValue,
((Number) value).intValue()));
if (newValue != this.value) {
this.value = newValue;
synchronized (listeners) {
for (ChangeListener listener : listeners) {
listener.stateChanged(new ChangeEvent(this));
}
}
}
}
@Override
public Object getNextValue() {
return Math.min(maxValue, value + step);
}
@Override
public Object getPreviousValue() {
return Math.max(minValue, value - step);
}
@Override
public void addChangeListener(final ChangeListener l) {
if (l != null) {
synchronized (listeners) {
listeners.add(l);
}
}
}
@Override
public void removeChangeListener(final ChangeListener l) {
synchronized (listeners) {
listeners.remove(l);
}
}
}
}
| |
/*
* Copyright (c) 2008, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.governance.api.common.dataobjects;
import org.wso2.carbon.governance.api.exception.GovernanceException;
import javax.xml.namespace.QName;
import java.util.List;
import java.util.Map;
public interface GovernanceArtifact {
/**
* Returns the QName of the artifact.
*
* @return the QName of the artifact
*/
QName getQName();
/**
* Set the QName of the artifact.
*
* @param qName of the artifact
*/
void setQName(QName qName) throws GovernanceException;
/**
* Returns the id of the artifact
*
* @return the id
*/
String getId();
/**
* Returns the media type of the artifact.
*
* @return the media type of the artifact
*/
@SuppressWarnings("unused")
String getMediaType();
/**
* Set the id
*
* @param id the id
*/
void setId(String id);
/**
* Returns the path of the artifact, need to save the artifact before
* getting the path.
*
* @return here we return the path of the artifact.
* @throws org.wso2.carbon.governance.api.exception.GovernanceException if an error occurred.
*/
String getPath() throws GovernanceException;
/**
* Returns the names of the lifecycle associated with this artifact.
*
* @return the names of the lifecycle associated with this artifact as an array
* @throws org.wso2.carbon.governance.api.exception.GovernanceException if an error occurred.
*/
@SuppressWarnings("unused")
String[] getLifecycleNames() throws GovernanceException;
/**
* Returns the name of the default lifecycle associated with this artifact.
*
* @return the names of the lifecycle associated with this artifact.
* @throws org.wso2.carbon.governance.api.exception.GovernanceException if an error occurred.
*/
String getLifecycleName() throws GovernanceException;
/**
* Associates the named lifecycle with the artifact
*
* @param name the name of the lifecycle to be associated with this artifact.
* @throws org.wso2.carbon.governance.api.exception.GovernanceException if an error occurred.
*/
void attachLifecycle(String name) throws GovernanceException;
/**
* Returns the state of the default lifecycle associated with this artifact.
*
* @return the state of the default lifecycle associated with this artifact.
* @throws org.wso2.carbon.governance.api.exception.GovernanceException if an error occurred.
*/
String getLifecycleState() throws GovernanceException;
/**
* Returns the state of the lifecycle associated with this artifact.
*
* @param lifeCycleName the name of the lifecycle of which the state is required
* @return the state of the lifecycle associated with this artifact.
* @throws org.wso2.carbon.governance.api.exception.GovernanceException if an error occurred.
*/
@SuppressWarnings("unused")
String getLifecycleState(String lifeCycleName) throws GovernanceException;
/**
* Adding an attribute to the artifact. The artifact should be saved to get effect the change.
* In the case of a single-valued attribute, this method will set or replace the existing
* attribute with the provided value. In the case of a multi-valued attribute, this method will
* append the provided value to the existing list.
*
* @param key the key.
* @param value the value.
* @throws org.wso2.carbon.governance.api.exception.GovernanceException throws if the operation failed.
*/
void addAttribute(String key, String value) throws GovernanceException;
/**
* Set/Update an attribute with multiple values. The artifact should be saved to get effect the
* change.
*
* @param key the key
* @param newValues the value
* @throws org.wso2.carbon.governance.api.exception.GovernanceException throws if the operation failed.
*/
void setAttributes(String key, String[] newValues) throws GovernanceException;
/**
* Set/Update an attribute with a single value. The artifact should be saved to get effect the
* change. This method will replace the existing attribute with the provided value. In the case
* of a multi-valued attribute this will remove all existing values. If you want to append the
* provided value to a list values of a multi-valued attribute, use the addAttribute method
* instead.
*
* @param key the key
* @param newValue the value
* @throws org.wso2.carbon.governance.api.exception.GovernanceException throws if the operation failed.
*/
void setAttribute(String key, String newValue) throws GovernanceException;
/**
* Returns the attribute of a given key.
*
* @param key the key
* @return the value of the attribute, if there are more than one attribute for the key this
* returns the first value.
* @throws org.wso2.carbon.governance.api.exception.GovernanceException throws if the operation failed.
*/
String getAttribute(String key) throws GovernanceException;
/**
* Returns the available attribute keys
*
* @return an array of attribute keys.
* @throws org.wso2.carbon.governance.api.exception.GovernanceException throws if the operation failed.
*/
String[] getAttributeKeys() throws GovernanceException;
/**
* Returns the attribute values for a key.
*
* @param key the key.
* @return attribute values for the key.
* @throws org.wso2.carbon.governance.api.exception.GovernanceException throws if the operation failed.
*/
String[] getAttributes(String key) throws GovernanceException;
/**
* Remove attribute with the given key. The artifact should be saved to get effect the change.
*
* @param key the key
* @throws org.wso2.carbon.governance.api.exception.GovernanceException throws if the operation failed.
*/
@SuppressWarnings("unused")
void removeAttribute(String key) throws GovernanceException;
/**
* Get dependencies of an artifacts. The artifacts should be saved, before calling this method.
*
* @return an array of dependencies of this artifact.
* @throws org.wso2.carbon.governance.api.exception.GovernanceException throws if the operation failed.
*/
GovernanceArtifact[] getDependencies() throws GovernanceException;
/**
* Get dependents of an artifact. The artifacts should be saved, before calling this method.
*
* @return an array of artifacts that is dependent on this artifact.
* @throws org.wso2.carbon.governance.api.exception.GovernanceException throws if the operation failed.
*/
GovernanceArtifact[] getDependents() throws GovernanceException;
/**
* Get all lifecycle actions for the current state of the lifecycle
*
* @param lifeCycleName lifecycle name of which actions are needed
* @return Action set which can be invoked
* @throws org.wso2.carbon.governance.api.exception.GovernanceException throws if the operation failed.
*/
@SuppressWarnings("unused")
public String[] getAllLifecycleActions(String lifeCycleName) throws GovernanceException;
/**
* Promote the artifact to the next state of the lifecycle
*
* @param action lifecycle action tobe invoked
* @param aspectName lifecycle name of which action to be invoked
* @throws org.wso2.carbon.governance.api.exception.GovernanceException throws if the operation failed.
*/
@SuppressWarnings("unused")
void invokeAction(String action, String aspectName) throws GovernanceException;
/**
* Promote the artifact to the next state of the lifecycle
*
* @param action lifecycle action tobe invoked
* @param aspectName lifecycle name of which action to be invoked
* @param parameters extra parameters needed when promoting
* @throws org.wso2.carbon.governance.api.exception.GovernanceException throws if the operation failed.
*/
@SuppressWarnings("unused")
void invokeAction(String action, Map<String, String> parameters, String aspectName) throws GovernanceException;
/**
* Retrieve name set of the checklist items
*
* @param aspectName lifecycle name of which action to be invoked
* @return Checklist item name set
* @throws org.wso2.carbon.governance.api.exception.GovernanceException throws if the operation failed.
*/
@SuppressWarnings("unused")
String[] getAllCheckListItemNames(String aspectName) throws GovernanceException;
/**
* Check the checklist item
*
* @param order order of the checklist item need to checked
* @param aspectName lifecycle name of which action to be invoked
* @throws org.wso2.carbon.governance.api.exception.GovernanceException throws if the operation failed.
*/
@SuppressWarnings("unused")
void checkLCItem(int order, String aspectName) throws GovernanceException;
/**
* Check whether the given ordered lifecycle checklist item is checked or not
*
* @param order order of the checklist item need to unchecked
* @param aspectName lifecycle name of which action to be invoked
* @return whether the given ordered lifecycle checklist item is checked or not
* @throws org.wso2.carbon.governance.api.exception.GovernanceException throws if the operation failed.
*/
@SuppressWarnings("unused")
public boolean isLCItemChecked(int order, String aspectName) throws GovernanceException;
/**
* Un-check the checklist item
*
* @param order order of the checklist item need to unchecked
* @param aspectName lifecycle name of which action to be invoked
* @throws org.wso2.carbon.governance.api.exception.GovernanceException throws if the operation failed.
*/
@SuppressWarnings("unused")
void uncheckLCItem(int order, String aspectName) throws GovernanceException;
/**
* Retrieve action set which need votes
*
* @return Action set which can vote
* @throws org.wso2.carbon.governance.api.exception.GovernanceException throws if the operation failed.
*/
@SuppressWarnings("unused")
public String[] getAllVotingItems() throws GovernanceException;
/**
* Vote for an action
*
* @param order order of the action which need to be voted
* @throws org.wso2.carbon.governance.api.exception.GovernanceException throws if the operation failed.
*/
public void vote(int order) throws GovernanceException;
/**
* Check whether the current user voted for given order event
*
* @param order order of the action which need to be voted
* @return whether the current user voted for the given order event
* @throws org.wso2.carbon.governance.api.exception.GovernanceException throws if the operation failed.
*/
@SuppressWarnings("unused")
public boolean isVoted(int order) throws GovernanceException;
/**
* Unvote for an action
*
* @param order order of the action which need to be un voted
* @throws org.wso2.carbon.governance.api.exception.GovernanceException throws if the operation failed.
*/
@SuppressWarnings("unused")
public void unvote(int order) throws GovernanceException;
/**
* Returns the available attribute keys
*
* @return an array of attribute keys.
* @throws org.wso2.carbon.governance.api.exception.GovernanceException throws if the operation failed.
*/
String[] getPropertyKeys() throws GovernanceException;
/**
* Attach the current artifact to an another artifact. Both the artifacts should be saved,
* before calling this method. This method will two generic artifact types. There are specific
* methods
*
* @param attachedToArtifact the artifact the current artifact is attached to
* @throws GovernanceException throws if the operation failed.
*/
public void attach(GovernanceArtifact attachedToArtifact) throws GovernanceException;
public void attach(String artifactId) throws GovernanceException;
/**
* Detach the current artifact from the provided artifact. Both the artifacts should be saved,
* before calling this method.
*
* @param artifactId the artifact id of the attached artifact
* @throws GovernanceException throws if the operation failed.
*/
public void detach(String artifactId) throws GovernanceException;
public void addAssociation(String associationType, GovernanceArtifact attachedToArtifact) throws GovernanceException;
public void addAssociation(String associationType, String artifactId) throws GovernanceException;
public void removeAssociation(String associationType, String artifactId) throws GovernanceException;
public void removeAssociation(String artifactId) throws GovernanceException;
public Map<String, List<GovernanceArtifact>> getAssociations() throws GovernanceException;
public Map<String, List<String>> getAssociatedArtifactIds() throws GovernanceException;
public boolean isRegistryAwareArtifact();
public void addTag(String tag) throws GovernanceException;
public void addTags(List<String> tags) throws GovernanceException;
public List<String> listTags() throws GovernanceException;
public void removeTag(String tag) throws GovernanceException;
public void removeTags(List<String> tags) throws GovernanceException;
public void addBidirectionalAssociation(String forwardType, String backwardType, GovernanceArtifact attachedToArtifact) throws GovernanceException;
public boolean uniqueTo(GovernanceArtifact artifact);
public boolean compareTo(GovernanceArtifact artifact);
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.lang3.text;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.text.DecimalFormatSymbols;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import org.junit.jupiter.api.Test;
/**
* Unit tests for {@link org.apache.commons.lang3.text.StrBuilder}.
*/
@Deprecated
public class StrBuilderAppendInsertTest {
/** The system line separator. */
private static final String SEP = System.lineSeparator();
/** Test subclass of Object, with a toString method. */
private static final Object FOO = new Object() {
@Override
public String toString() {
return "foo";
}
};
//-----------------------------------------------------------------------
@Test
public void testAppendNewLine() {
StrBuilder sb = new StrBuilder("---");
sb.appendNewLine().append("+++");
assertEquals("---" + SEP + "+++", sb.toString());
sb = new StrBuilder("---");
sb.setNewLineText("#").appendNewLine().setNewLineText(null).appendNewLine();
assertEquals("---#" + SEP, sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppendWithNullText() {
final StrBuilder sb = new StrBuilder();
sb.setNullText("NULL");
assertEquals("", sb.toString());
sb.appendNull();
assertEquals("NULL", sb.toString());
sb.append((Object) null);
assertEquals("NULLNULL", sb.toString());
sb.append(FOO);
assertEquals("NULLNULLfoo", sb.toString());
sb.append((String) null);
assertEquals("NULLNULLfooNULL", sb.toString());
sb.append("");
assertEquals("NULLNULLfooNULL", sb.toString());
sb.append("bar");
assertEquals("NULLNULLfooNULLbar", sb.toString());
sb.append((StringBuffer) null);
assertEquals("NULLNULLfooNULLbarNULL", sb.toString());
sb.append(new StringBuffer("baz"));
assertEquals("NULLNULLfooNULLbarNULLbaz", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppend_Object() {
final StrBuilder sb = new StrBuilder();
sb.appendNull();
assertEquals("", sb.toString());
sb.append((Object) null);
assertEquals("", sb.toString());
sb.append(FOO);
assertEquals("foo", sb.toString());
sb.append((StringBuffer) null);
assertEquals("foo", sb.toString());
sb.append(new StringBuffer("baz"));
assertEquals("foobaz", sb.toString());
sb.append(new StrBuilder("yes"));
assertEquals("foobazyes", sb.toString());
sb.append((CharSequence) "Seq");
assertEquals("foobazyesSeq", sb.toString());
sb.append(new StringBuilder("bld")); // Check it supports StringBuilder
assertEquals("foobazyesSeqbld", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppend_StringBuilder() {
StrBuilder sb = new StrBuilder();
sb.setNullText("NULL").append((String) null);
assertEquals("NULL", sb.toString());
sb = new StrBuilder();
sb.append(new StringBuilder("foo"));
assertEquals("foo", sb.toString());
sb.append(new StringBuilder(""));
assertEquals("foo", sb.toString());
sb.append(new StringBuilder("bar"));
assertEquals("foobar", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppend_String() {
StrBuilder sb = new StrBuilder();
sb.setNullText("NULL").append((String) null);
assertEquals("NULL", sb.toString());
sb = new StrBuilder();
sb.append("foo");
assertEquals("foo", sb.toString());
sb.append("");
assertEquals("foo", sb.toString());
sb.append("bar");
assertEquals("foobar", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppend_String_int_int() {
StrBuilder sb = new StrBuilder();
sb.setNullText("NULL").append((String) null, 0, 1);
assertEquals("NULL", sb.toString());
sb = new StrBuilder();
sb.append("foo", 0, 3);
assertEquals("foo", sb.toString());
final StrBuilder sb1 = sb;
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append("bar", -1, 1),
"append(char[], -1,) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append("bar", 3, 1),
"append(char[], 3,) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append("bar", 1, -1),
"append(char[],, -1) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append("bar", 1, 3),
"append(char[], 1, 3) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append("bar", -1, 3),
"append(char[], -1, 3) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append("bar", 4, 0),
"append(char[], 4, 0) expected IndexOutOfBoundsException");
sb.append("bar", 3, 0);
assertEquals("foo", sb.toString());
sb.append("abcbardef", 3, 3);
assertEquals("foobar", sb.toString());
sb.append((CharSequence) "abcbardef", 4, 3);
assertEquals("foobarard", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppend_StringBuilder_int_int() {
StrBuilder sb = new StrBuilder();
sb.setNullText("NULL").append((String) null, 0, 1);
assertEquals("NULL", sb.toString());
sb = new StrBuilder();
sb.append(new StringBuilder("foo"), 0, 3);
assertEquals("foo", sb.toString());
final StrBuilder sb1 = sb;
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append(new StringBuilder("bar"), -1, 1),
"append(StringBuilder, -1,) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append(new StringBuilder("bar"), 3, 1),
"append(StringBuilder, 3,) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append(new StringBuilder("bar"), 1, -1),
"append(StringBuilder,, -1) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append(new StringBuilder("bar"), 1, 3),
"append(StringBuilder, 1, 3) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append(new StringBuilder("bar"), -1, 3),
"append(StringBuilder, -1, 3) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append(new StringBuilder("bar"), 4, 0),
"append(StringBuilder, 4, 0) expected IndexOutOfBoundsException");
sb.append(new StringBuilder("bar"), 3, 0);
assertEquals("foo", sb.toString());
sb.append(new StringBuilder("abcbardef"), 3, 3);
assertEquals("foobar", sb.toString());
sb.append( new StringBuilder("abcbardef"), 4, 3);
assertEquals("foobarard", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppend_StringBuffer() {
StrBuilder sb = new StrBuilder();
sb.setNullText("NULL").append((StringBuffer) null);
assertEquals("NULL", sb.toString());
sb = new StrBuilder();
sb.append(new StringBuffer("foo"));
assertEquals("foo", sb.toString());
sb.append(new StringBuffer(""));
assertEquals("foo", sb.toString());
sb.append(new StringBuffer("bar"));
assertEquals("foobar", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppend_StringBuffer_int_int() {
StrBuilder sb = new StrBuilder();
sb.setNullText("NULL").append((StringBuffer) null, 0, 1);
assertEquals("NULL", sb.toString());
sb = new StrBuilder();
sb.append(new StringBuffer("foo"), 0, 3);
assertEquals("foo", sb.toString());
final StrBuilder sb1 = sb;
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append(new StringBuffer("bar"), -1, 1),
"append(char[], -1,) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append(new StringBuffer("bar"), 3, 1),
"append(char[], 3,) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append(new StringBuffer("bar"), 1, -1),
"append(char[],, -1) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append(new StringBuffer("bar"), 1, 3),
"append(char[], 1, 3) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append(new StringBuffer("bar"), -1, 3),
"append(char[], -1, 3) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append(new StringBuffer("bar"), 4, 0),
"append(char[], 4, 0) expected IndexOutOfBoundsException");
sb.append(new StringBuffer("bar"), 3, 0);
assertEquals("foo", sb.toString());
sb.append(new StringBuffer("abcbardef"), 3, 3);
assertEquals("foobar", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppend_StrBuilder() {
StrBuilder sb = new StrBuilder();
sb.setNullText("NULL").append((StrBuilder) null);
assertEquals("NULL", sb.toString());
sb = new StrBuilder();
sb.append(new StrBuilder("foo"));
assertEquals("foo", sb.toString());
sb.append(new StrBuilder(""));
assertEquals("foo", sb.toString());
sb.append(new StrBuilder("bar"));
assertEquals("foobar", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppend_StrBuilder_int_int() {
StrBuilder sb = new StrBuilder();
sb.setNullText("NULL").append((StrBuilder) null, 0, 1);
assertEquals("NULL", sb.toString());
sb = new StrBuilder();
sb.append(new StrBuilder("foo"), 0, 3);
assertEquals("foo", sb.toString());
final StrBuilder sb1 = sb;
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append(new StrBuilder("bar"), -1, 1),
"append(char[], -1,) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append(new StrBuilder("bar"), 3, 1),
"append(char[], 3,) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append(new StrBuilder("bar"), 1, -1),
"append(char[],, -1) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append(new StrBuilder("bar"), 1, 3),
"append(char[], 1, 3) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append(new StrBuilder("bar"), -1, 3),
"append(char[], -1, 3) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append(new StrBuilder("bar"), 4, 0),
"append(char[], 4, 0) expected IndexOutOfBoundsException");
sb.append(new StrBuilder("bar"), 3, 0);
assertEquals("foo", sb.toString());
sb.append(new StrBuilder("abcbardef"), 3, 3);
assertEquals("foobar", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppend_CharArray() {
StrBuilder sb = new StrBuilder();
sb.setNullText("NULL").append((char[]) null);
assertEquals("NULL", sb.toString());
sb = new StrBuilder();
sb.append(new char[0]);
assertEquals("", sb.toString());
sb.append(new char[]{'f', 'o', 'o'});
assertEquals("foo", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppend_CharArray_int_int() {
StrBuilder sb = new StrBuilder();
sb.setNullText("NULL").append((char[]) null, 0, 1);
assertEquals("NULL", sb.toString());
sb = new StrBuilder();
sb.append(new char[]{'f', 'o', 'o'}, 0, 3);
assertEquals("foo", sb.toString());
final StrBuilder sb1 = sb;
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append(new char[]{'b', 'a', 'r'}, -1, 1),
"append(char[], -1,) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append(new char[]{'b', 'a', 'r'}, 3, 1),
"append(char[], 3,) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append(new char[]{'b', 'a', 'r'}, 1, -1),
"append(char[],, -1) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append(new char[]{'b', 'a', 'r'}, 1, 3),
"append(char[], 1, 3) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append(new char[]{'b', 'a', 'r'}, -1, 3),
"append(char[], -1, 3) expected IndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb1.append(new char[]{'b', 'a', 'r'}, 4, 0),
"append(char[], 4, 0) expected IndexOutOfBoundsException");
sb.append(new char[]{'b', 'a', 'r'}, 3, 0);
assertEquals("foo", sb.toString());
sb.append(new char[]{'a', 'b', 'c', 'b', 'a', 'r', 'd', 'e', 'f'}, 3, 3);
assertEquals("foobar", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppend_Boolean() {
final StrBuilder sb = new StrBuilder();
sb.append(true);
assertEquals("true", sb.toString());
sb.append(false);
assertEquals("truefalse", sb.toString());
sb.append('!');
assertEquals("truefalse!", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppend_PrimitiveNumber() {
final StrBuilder sb = new StrBuilder();
sb.append(0);
assertEquals("0", sb.toString());
sb.append(1L);
assertEquals("01", sb.toString());
sb.append(2.3f);
assertEquals("012.3", sb.toString());
sb.append(4.5d);
assertEquals("012.34.5", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppendln_FormattedString() {
final int[] count = new int[2];
final StrBuilder sb = new StrBuilder() {
private static final long serialVersionUID = 1L;
@Override
public StrBuilder append(final String str) {
count[0]++;
return super.append(str);
}
@Override
public StrBuilder appendNewLine() {
count[1]++;
return super.appendNewLine();
}
};
sb.appendln("Hello %s", "Alice");
assertEquals("Hello Alice" + SEP, sb.toString());
assertEquals(2, count[0]); // appendNewLine() calls append(String)
assertEquals(1, count[1]);
}
//-----------------------------------------------------------------------
@Test
public void testAppendln_Object() {
final StrBuilder sb = new StrBuilder();
sb.appendln((Object) null);
assertEquals("" + SEP, sb.toString());
sb.appendln(FOO);
assertEquals(SEP + "foo" + SEP, sb.toString());
sb.appendln(Integer.valueOf(6));
assertEquals(SEP + "foo" + SEP + "6" + SEP, sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppendln_String() {
final int[] count = new int[2];
final StrBuilder sb = new StrBuilder() {
private static final long serialVersionUID = 1L;
@Override
public StrBuilder append(final String str) {
count[0]++;
return super.append(str);
}
@Override
public StrBuilder appendNewLine() {
count[1]++;
return super.appendNewLine();
}
};
sb.appendln("foo");
assertEquals("foo" + SEP, sb.toString());
assertEquals(2, count[0]); // appendNewLine() calls append(String)
assertEquals(1, count[1]);
}
//-----------------------------------------------------------------------
@Test
public void testAppendln_String_int_int() {
final int[] count = new int[2];
final StrBuilder sb = new StrBuilder() {
private static final long serialVersionUID = 1L;
@Override
public StrBuilder append(final String str, final int startIndex, final int length) {
count[0]++;
return super.append(str, startIndex, length);
}
@Override
public StrBuilder appendNewLine() {
count[1]++;
return super.appendNewLine();
}
};
sb.appendln("foo", 0, 3);
assertEquals("foo" + SEP, sb.toString());
assertEquals(1, count[0]);
assertEquals(1, count[1]);
}
//-----------------------------------------------------------------------
@Test
public void testAppendln_StringBuffer() {
final int[] count = new int[2];
final StrBuilder sb = new StrBuilder() {
private static final long serialVersionUID = 1L;
@Override
public StrBuilder append(final StringBuffer str) {
count[0]++;
return super.append(str);
}
@Override
public StrBuilder appendNewLine() {
count[1]++;
return super.appendNewLine();
}
};
sb.appendln(new StringBuffer("foo"));
assertEquals("foo" + SEP, sb.toString());
assertEquals(1, count[0]);
assertEquals(1, count[1]);
}
//-----------------------------------------------------------------------
@Test
public void testAppendln_StringBuilder() {
final int[] count = new int[2];
final StrBuilder sb = new StrBuilder() {
private static final long serialVersionUID = 1L;
@Override
public StrBuilder append(final StringBuilder str) {
count[0]++;
return super.append(str);
}
@Override
public StrBuilder appendNewLine() {
count[1]++;
return super.appendNewLine();
}
};
sb.appendln(new StringBuilder("foo"));
assertEquals("foo" + SEP, sb.toString());
assertEquals(1, count[0]);
assertEquals(1, count[1]);
}
//-----------------------------------------------------------------------
@Test
public void testAppendln_StringBuffer_int_int() {
final int[] count = new int[2];
final StrBuilder sb = new StrBuilder() {
private static final long serialVersionUID = 1L;
@Override
public StrBuilder append(final StringBuffer str, final int startIndex, final int length) {
count[0]++;
return super.append(str, startIndex, length);
}
@Override
public StrBuilder appendNewLine() {
count[1]++;
return super.appendNewLine();
}
};
sb.appendln(new StringBuffer("foo"), 0, 3);
assertEquals("foo" + SEP, sb.toString());
assertEquals(1, count[0]);
assertEquals(1, count[1]);
}
//-----------------------------------------------------------------------
@Test
public void testAppendln_StringBuilder_int_int() {
final int[] count = new int[2];
final StrBuilder sb = new StrBuilder() {
private static final long serialVersionUID = 1L;
@Override
public StrBuilder append(final StringBuilder str, final int startIndex, final int length) {
count[0]++;
return super.append(str, startIndex, length);
}
@Override
public StrBuilder appendNewLine() {
count[1]++;
return super.appendNewLine();
}
};
sb.appendln(new StringBuilder("foo"), 0, 3);
assertEquals("foo" + SEP, sb.toString());
assertEquals(1, count[0]);
assertEquals(1, count[1]);
}
//-----------------------------------------------------------------------
@Test
public void testAppendln_StrBuilder() {
final int[] count = new int[2];
final StrBuilder sb = new StrBuilder() {
private static final long serialVersionUID = 1L;
@Override
public StrBuilder append(final StrBuilder str) {
count[0]++;
return super.append(str);
}
@Override
public StrBuilder appendNewLine() {
count[1]++;
return super.appendNewLine();
}
};
sb.appendln(new StrBuilder("foo"));
assertEquals("foo" + SEP, sb.toString());
assertEquals(1, count[0]);
assertEquals(1, count[1]);
}
//-----------------------------------------------------------------------
@Test
public void testAppendln_StrBuilder_int_int() {
final int[] count = new int[2];
final StrBuilder sb = new StrBuilder() {
private static final long serialVersionUID = 1L;
@Override
public StrBuilder append(final StrBuilder str, final int startIndex, final int length) {
count[0]++;
return super.append(str, startIndex, length);
}
@Override
public StrBuilder appendNewLine() {
count[1]++;
return super.appendNewLine();
}
};
sb.appendln(new StrBuilder("foo"), 0, 3);
assertEquals("foo" + SEP, sb.toString());
assertEquals(1, count[0]);
assertEquals(1, count[1]);
}
//-----------------------------------------------------------------------
@Test
public void testAppendln_CharArray() {
final int[] count = new int[2];
final StrBuilder sb = new StrBuilder() {
private static final long serialVersionUID = 1L;
@Override
public StrBuilder append(final char[] str) {
count[0]++;
return super.append(str);
}
@Override
public StrBuilder appendNewLine() {
count[1]++;
return super.appendNewLine();
}
};
sb.appendln("foo".toCharArray());
assertEquals("foo" + SEP, sb.toString());
assertEquals(1, count[0]);
assertEquals(1, count[1]);
}
//-----------------------------------------------------------------------
@Test
public void testAppendln_CharArray_int_int() {
final int[] count = new int[2];
final StrBuilder sb = new StrBuilder() {
private static final long serialVersionUID = 1L;
@Override
public StrBuilder append(final char[] str, final int startIndex, final int length) {
count[0]++;
return super.append(str, startIndex, length);
}
@Override
public StrBuilder appendNewLine() {
count[1]++;
return super.appendNewLine();
}
};
sb.appendln("foo".toCharArray(), 0, 3);
assertEquals("foo" + SEP, sb.toString());
assertEquals(1, count[0]);
assertEquals(1, count[1]);
}
//-----------------------------------------------------------------------
@Test
public void testAppendln_Boolean() {
final StrBuilder sb = new StrBuilder();
sb.appendln(true);
assertEquals("true" + SEP, sb.toString());
sb.clear();
sb.appendln(false);
assertEquals("false" + SEP, sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppendln_PrimitiveNumber() {
final StrBuilder sb = new StrBuilder();
sb.appendln(0);
assertEquals("0" + SEP, sb.toString());
sb.clear();
sb.appendln(1L);
assertEquals("1" + SEP, sb.toString());
sb.clear();
sb.appendln(2.3f);
assertEquals("2.3" + SEP, sb.toString());
sb.clear();
sb.appendln(4.5d);
assertEquals("4.5" + SEP, sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppendPadding() {
final StrBuilder sb = new StrBuilder();
sb.append("foo");
assertEquals("foo", sb.toString());
sb.appendPadding(-1, '-');
assertEquals("foo", sb.toString());
sb.appendPadding(0, '-');
assertEquals("foo", sb.toString());
sb.appendPadding(1, '-');
assertEquals("foo-", sb.toString());
sb.appendPadding(16, '-');
assertEquals(20, sb.length());
// 12345678901234567890
assertEquals("foo-----------------", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppendFixedWidthPadLeft() {
final StrBuilder sb = new StrBuilder();
sb.appendFixedWidthPadLeft("foo", -1, '-');
assertEquals("", sb.toString());
sb.clear();
sb.appendFixedWidthPadLeft("foo", 0, '-');
assertEquals("", sb.toString());
sb.clear();
sb.appendFixedWidthPadLeft("foo", 1, '-');
assertEquals("o", sb.toString());
sb.clear();
sb.appendFixedWidthPadLeft("foo", 2, '-');
assertEquals("oo", sb.toString());
sb.clear();
sb.appendFixedWidthPadLeft("foo", 3, '-');
assertEquals("foo", sb.toString());
sb.clear();
sb.appendFixedWidthPadLeft("foo", 4, '-');
assertEquals("-foo", sb.toString());
sb.clear();
sb.appendFixedWidthPadLeft("foo", 10, '-');
assertEquals(10, sb.length());
// 1234567890
assertEquals("-------foo", sb.toString());
sb.clear();
sb.setNullText("null");
sb.appendFixedWidthPadLeft(null, 5, '-');
assertEquals("-null", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppendFixedWidthPadLeft_int() {
final StrBuilder sb = new StrBuilder();
sb.appendFixedWidthPadLeft(123, -1, '-');
assertEquals("", sb.toString());
sb.clear();
sb.appendFixedWidthPadLeft(123, 0, '-');
assertEquals("", sb.toString());
sb.clear();
sb.appendFixedWidthPadLeft(123, 1, '-');
assertEquals("3", sb.toString());
sb.clear();
sb.appendFixedWidthPadLeft(123, 2, '-');
assertEquals("23", sb.toString());
sb.clear();
sb.appendFixedWidthPadLeft(123, 3, '-');
assertEquals("123", sb.toString());
sb.clear();
sb.appendFixedWidthPadLeft(123, 4, '-');
assertEquals("-123", sb.toString());
sb.clear();
sb.appendFixedWidthPadLeft(123, 10, '-');
assertEquals(10, sb.length());
// 1234567890
assertEquals("-------123", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppendFixedWidthPadRight() {
final StrBuilder sb = new StrBuilder();
sb.appendFixedWidthPadRight("foo", -1, '-');
assertEquals("", sb.toString());
sb.clear();
sb.appendFixedWidthPadRight("foo", 0, '-');
assertEquals("", sb.toString());
sb.clear();
sb.appendFixedWidthPadRight("foo", 1, '-');
assertEquals("f", sb.toString());
sb.clear();
sb.appendFixedWidthPadRight("foo", 2, '-');
assertEquals("fo", sb.toString());
sb.clear();
sb.appendFixedWidthPadRight("foo", 3, '-');
assertEquals("foo", sb.toString());
sb.clear();
sb.appendFixedWidthPadRight("foo", 4, '-');
assertEquals("foo-", sb.toString());
sb.clear();
sb.appendFixedWidthPadRight("foo", 10, '-');
assertEquals(10, sb.length());
// 1234567890
assertEquals("foo-------", sb.toString());
sb.clear();
sb.setNullText("null");
sb.appendFixedWidthPadRight(null, 5, '-');
assertEquals("null-", sb.toString());
}
// See: https://issues.apache.org/jira/browse/LANG-299
@Test
public void testLang299() {
final StrBuilder sb = new StrBuilder(1);
sb.appendFixedWidthPadRight("foo", 1, '-');
assertEquals("f", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppendFixedWidthPadRight_int() {
final StrBuilder sb = new StrBuilder();
sb.appendFixedWidthPadRight(123, -1, '-');
assertEquals("", sb.toString());
sb.clear();
sb.appendFixedWidthPadRight(123, 0, '-');
assertEquals("", sb.toString());
sb.clear();
sb.appendFixedWidthPadRight(123, 1, '-');
assertEquals("1", sb.toString());
sb.clear();
sb.appendFixedWidthPadRight(123, 2, '-');
assertEquals("12", sb.toString());
sb.clear();
sb.appendFixedWidthPadRight(123, 3, '-');
assertEquals("123", sb.toString());
sb.clear();
sb.appendFixedWidthPadRight(123, 4, '-');
assertEquals("123-", sb.toString());
sb.clear();
sb.appendFixedWidthPadRight(123, 10, '-');
assertEquals(10, sb.length());
// 1234567890
assertEquals("123-------", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppend_FormattedString() {
StrBuilder sb;
sb = new StrBuilder();
sb.append("Hi", (Object[]) null);
assertEquals("Hi", sb.toString());
sb = new StrBuilder();
sb.append("Hi", "Alice");
assertEquals("Hi", sb.toString());
sb = new StrBuilder();
sb.append("Hi %s", "Alice");
assertEquals("Hi Alice", sb.toString());
sb = new StrBuilder();
sb.append("Hi %s %,d", "Alice", 5000);
// group separator depends on system locale
final char groupingSeparator = DecimalFormatSymbols.getInstance().getGroupingSeparator();
final String expected = "Hi Alice 5" + groupingSeparator + "000";
assertEquals(expected, sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppendAll_Array() {
final StrBuilder sb = new StrBuilder();
sb.appendAll((Object[]) null);
assertEquals("", sb.toString());
sb.clear();
sb.appendAll();
assertEquals("", sb.toString());
sb.clear();
sb.appendAll("foo", "bar", "baz");
assertEquals("foobarbaz", sb.toString());
sb.clear();
sb.appendAll("foo", "bar", "baz");
assertEquals("foobarbaz", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppendAll_Collection() {
final StrBuilder sb = new StrBuilder();
sb.appendAll((Collection<?>) null);
assertEquals("", sb.toString());
sb.clear();
sb.appendAll(Collections.EMPTY_LIST);
assertEquals("", sb.toString());
sb.clear();
sb.appendAll(Arrays.asList("foo", "bar", "baz"));
assertEquals("foobarbaz", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppendAll_Iterator() {
final StrBuilder sb = new StrBuilder();
sb.appendAll((Iterator<?>) null);
assertEquals("", sb.toString());
sb.clear();
sb.appendAll(Collections.EMPTY_LIST.iterator());
assertEquals("", sb.toString());
sb.clear();
sb.appendAll(Arrays.asList("foo", "bar", "baz").iterator());
assertEquals("foobarbaz", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppendWithSeparators_Array() {
final StrBuilder sb = new StrBuilder();
sb.appendWithSeparators((Object[]) null, ",");
assertEquals("", sb.toString());
sb.clear();
sb.appendWithSeparators(new Object[0], ",");
assertEquals("", sb.toString());
sb.clear();
sb.appendWithSeparators(new Object[]{"foo", "bar", "baz"}, ",");
assertEquals("foo,bar,baz", sb.toString());
sb.clear();
sb.appendWithSeparators(new Object[]{"foo", "bar", "baz"}, null);
assertEquals("foobarbaz", sb.toString());
sb.clear();
sb.appendWithSeparators(new Object[]{"foo", null, "baz"}, ",");
assertEquals("foo,,baz", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppendWithSeparators_Collection() {
final StrBuilder sb = new StrBuilder();
sb.appendWithSeparators((Collection<?>) null, ",");
assertEquals("", sb.toString());
sb.clear();
sb.appendWithSeparators(Collections.EMPTY_LIST, ",");
assertEquals("", sb.toString());
sb.clear();
sb.appendWithSeparators(Arrays.asList("foo", "bar", "baz"), ",");
assertEquals("foo,bar,baz", sb.toString());
sb.clear();
sb.appendWithSeparators(Arrays.asList("foo", "bar", "baz"), null);
assertEquals("foobarbaz", sb.toString());
sb.clear();
sb.appendWithSeparators(Arrays.asList("foo", null, "baz"), ",");
assertEquals("foo,,baz", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppendWithSeparators_Iterator() {
final StrBuilder sb = new StrBuilder();
sb.appendWithSeparators((Iterator<?>) null, ",");
assertEquals("", sb.toString());
sb.clear();
sb.appendWithSeparators(Collections.EMPTY_LIST.iterator(), ",");
assertEquals("", sb.toString());
sb.clear();
sb.appendWithSeparators(Arrays.asList("foo", "bar", "baz").iterator(), ",");
assertEquals("foo,bar,baz", sb.toString());
sb.clear();
sb.appendWithSeparators(Arrays.asList("foo", "bar", "baz").iterator(), null);
assertEquals("foobarbaz", sb.toString());
sb.clear();
sb.appendWithSeparators(Arrays.asList("foo", null, "baz").iterator(), ",");
assertEquals("foo,,baz", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppendWithSeparatorsWithNullText() {
final StrBuilder sb = new StrBuilder();
sb.setNullText("null");
sb.appendWithSeparators(new Object[]{"foo", null, "baz"}, ",");
assertEquals("foo,null,baz", sb.toString());
sb.clear();
sb.appendWithSeparators(Arrays.asList("foo", null, "baz"), ",");
assertEquals("foo,null,baz", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppendSeparator_String() {
final StrBuilder sb = new StrBuilder();
sb.appendSeparator(","); // no effect
assertEquals("", sb.toString());
sb.append("foo");
assertEquals("foo", sb.toString());
sb.appendSeparator(",");
assertEquals("foo,", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppendSeparator_String_String() {
final StrBuilder sb = new StrBuilder();
final String startSeparator = "order by ";
final String standardSeparator = ",";
final String foo = "foo";
sb.appendSeparator(null, null);
assertEquals("", sb.toString());
sb.appendSeparator(standardSeparator, null);
assertEquals("", sb.toString());
sb.appendSeparator(standardSeparator, startSeparator);
assertEquals(startSeparator, sb.toString());
sb.appendSeparator(null, null);
assertEquals(startSeparator, sb.toString());
sb.appendSeparator(null, startSeparator);
assertEquals(startSeparator, sb.toString());
sb.append(foo);
assertEquals(startSeparator + foo, sb.toString());
sb.appendSeparator(standardSeparator, startSeparator);
assertEquals(startSeparator + foo + standardSeparator, sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppendSeparator_char() {
final StrBuilder sb = new StrBuilder();
sb.appendSeparator(','); // no effect
assertEquals("", sb.toString());
sb.append("foo");
assertEquals("foo", sb.toString());
sb.appendSeparator(',');
assertEquals("foo,", sb.toString());
}
@Test
public void testAppendSeparator_char_char() {
final StrBuilder sb = new StrBuilder();
final char startSeparator = ':';
final char standardSeparator = ',';
final String foo = "foo";
sb.appendSeparator(standardSeparator, startSeparator); // no effect
assertEquals(String.valueOf(startSeparator), sb.toString());
sb.append(foo);
assertEquals(String.valueOf(startSeparator) + foo, sb.toString());
sb.appendSeparator(standardSeparator, startSeparator);
assertEquals(String.valueOf(startSeparator) + foo + standardSeparator, sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppendSeparator_String_int() {
final StrBuilder sb = new StrBuilder();
sb.appendSeparator(",", 0); // no effect
assertEquals("", sb.toString());
sb.append("foo");
assertEquals("foo", sb.toString());
sb.appendSeparator(",", 1);
assertEquals("foo,", sb.toString());
sb.appendSeparator(",", -1); // no effect
assertEquals("foo,", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testAppendSeparator_char_int() {
final StrBuilder sb = new StrBuilder();
sb.appendSeparator(',', 0); // no effect
assertEquals("", sb.toString());
sb.append("foo");
assertEquals("foo", sb.toString());
sb.appendSeparator(',', 1);
assertEquals("foo,", sb.toString());
sb.appendSeparator(',', -1); // no effect
assertEquals("foo,", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testInsert() {
final StrBuilder sb = new StrBuilder();
sb.append("barbaz");
assertEquals("barbaz", sb.toString());
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(-1, FOO),
"insert(-1, Object) expected StringIndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(7, FOO),
"insert(7, Object) expected StringIndexOutOfBoundsException");
sb.insert(0, (Object) null);
assertEquals("barbaz", sb.toString());
sb.insert(0, FOO);
assertEquals("foobarbaz", sb.toString());
sb.clear();
sb.append("barbaz");
assertEquals("barbaz", sb.toString());
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(-1, "foo"),
"insert(-1, String) expected StringIndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(7, "foo"),
"insert(7, String) expected StringIndexOutOfBoundsException");
sb.insert(0, (String) null);
assertEquals("barbaz", sb.toString());
sb.insert(0, "foo");
assertEquals("foobarbaz", sb.toString());
sb.clear();
sb.append("barbaz");
assertEquals("barbaz", sb.toString());
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(-1, new char[]{'f', 'o', 'o'}),
"insert(-1, char[]) expected StringIndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(7, new char[]{'f', 'o', 'o'}),
"insert(7, char[]) expected StringIndexOutOfBoundsException");
sb.insert(0, (char[]) null);
assertEquals("barbaz", sb.toString());
sb.insert(0, new char[0]);
assertEquals("barbaz", sb.toString());
sb.insert(0, new char[]{'f', 'o', 'o'});
assertEquals("foobarbaz", sb.toString());
sb.clear();
sb.append("barbaz");
assertEquals("barbaz", sb.toString());
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(-1, new char[]{'a', 'b', 'c', 'f', 'o', 'o', 'd', 'e', 'f'}, 3, 3),
"insert(-1, char[], 3, 3) expected StringIndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(7, new char[]{'a', 'b', 'c', 'f', 'o', 'o', 'd', 'e', 'f'}, 3, 3),
"insert(7, char[], 3, 3) expected StringIndexOutOfBoundsException");
sb.insert(0, null, 0, 0);
assertEquals("barbaz", sb.toString());
sb.insert(0, new char[0], 0, 0);
assertEquals("barbaz", sb.toString());
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(0, new char[]{'a', 'b', 'c', 'f', 'o', 'o', 'd', 'e', 'f'}, -1, 3),
"insert(0, char[], -1, 3) expected StringIndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(0, new char[]{'a', 'b', 'c', 'f', 'o', 'o', 'd', 'e', 'f'}, 10, 3),
"insert(0, char[], 10, 3) expected StringIndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(0, new char[]{'a', 'b', 'c', 'f', 'o', 'o', 'd', 'e', 'f'}, 0, -1),
"insert(0, char[], 0, -1) expected StringIndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(0, new char[]{'a', 'b', 'c', 'f', 'o', 'o', 'd', 'e', 'f'}, 0, 10),
"insert(0, char[], 0, 10) expected StringIndexOutOfBoundsException");
sb.insert(0, new char[]{'a', 'b', 'c', 'f', 'o', 'o', 'd', 'e', 'f'}, 0, 0);
assertEquals("barbaz", sb.toString());
sb.insert(0, new char[]{'a', 'b', 'c', 'f', 'o', 'o', 'd', 'e', 'f'}, 3, 3);
assertEquals("foobarbaz", sb.toString());
sb.clear();
sb.append("barbaz");
assertEquals("barbaz", sb.toString());
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(-1, true),
"insert(-1, boolean) expected StringIndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(7, true),
"insert(7, boolean) expected StringIndexOutOfBoundsException");
sb.insert(0, true);
assertEquals("truebarbaz", sb.toString());
sb.insert(0, false);
assertEquals("falsetruebarbaz", sb.toString());
sb.clear();
sb.append("barbaz");
assertEquals("barbaz", sb.toString());
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(-1, '!'),
"insert(-1, char) expected StringIndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(7, '!'),
"insert(7, char) expected StringIndexOutOfBoundsException");
sb.insert(0, '!');
assertEquals("!barbaz", sb.toString());
sb.clear();
sb.append("barbaz");
assertEquals("barbaz", sb.toString());
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(-1, 0),
"insert(-1, int) expected StringIndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(7, 0),
"insert(7, int) expected StringIndexOutOfBoundsException");
sb.insert(0, '0');
assertEquals("0barbaz", sb.toString());
sb.clear();
sb.append("barbaz");
assertEquals("barbaz", sb.toString());
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(-1, 1L),
"insert(-1, long) expected StringIndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(7, 1L),
"insert(7, long) expected StringIndexOutOfBoundsException");
sb.insert(0, 1L);
assertEquals("1barbaz", sb.toString());
sb.clear();
sb.append("barbaz");
assertEquals("barbaz", sb.toString());
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(-1, 2.3F),
"insert(-1, float) expected StringIndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(7, 2.3F),
"insert(7, float) expected StringIndexOutOfBoundsException");
sb.insert(0, 2.3F);
assertEquals("2.3barbaz", sb.toString());
sb.clear();
sb.append("barbaz");
assertEquals("barbaz", sb.toString());
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(-1, 4.5D),
"insert(-1, double) expected StringIndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(7, 4.5D),
"insert(7, double) expected StringIndexOutOfBoundsException");
sb.insert(0, 4.5D);
assertEquals("4.5barbaz", sb.toString());
}
//-----------------------------------------------------------------------
@Test
public void testInsertWithNullText() {
final StrBuilder sb = new StrBuilder();
sb.setNullText("null");
sb.append("barbaz");
assertEquals("barbaz", sb.toString());
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(-1, FOO),
"insert(-1, Object) expected StringIndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(7, FOO),
"insert(7, Object) expected StringIndexOutOfBoundsException");
sb.insert(0, (Object) null);
assertEquals("nullbarbaz", sb.toString());
sb.insert(0, FOO);
assertEquals("foonullbarbaz", sb.toString());
sb.clear();
sb.append("barbaz");
assertEquals("barbaz", sb.toString());
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(-1, "foo"),
"insert(-1, String) expected StringIndexOutOfBoundsException");
assertThrows(
IndexOutOfBoundsException.class,
() -> sb.insert(7, "foo"),
"insert(7, String) expected StringIndexOutOfBoundsException");
sb.insert(0, (String) null);
assertEquals("nullbarbaz", sb.toString());
sb.insert(0, "foo");
assertEquals("foonullbarbaz", sb.toString());
sb.insert(0, (char[]) null);
assertEquals("nullfoonullbarbaz", sb.toString());
sb.insert(0, null, 0, 0);
assertEquals("nullnullfoonullbarbaz", sb.toString());
}
}
| |
/**
* Copyright (C) 2014-2016 LinkedIn Corp. (pinot-core@linkedin.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.linkedin.pinot.core.operator;
import com.linkedin.pinot.common.exception.QueryException;
import com.linkedin.pinot.common.request.BrokerRequest;
import com.linkedin.pinot.core.common.Block;
import com.linkedin.pinot.core.common.BlockId;
import com.linkedin.pinot.core.common.Operator;
import com.linkedin.pinot.core.operator.blocks.IntermediateResultsBlock;
import com.linkedin.pinot.core.query.reduce.CombineService;
import com.linkedin.pinot.core.util.trace.TraceCallable;
import com.linkedin.pinot.core.util.trace.TraceRunnable;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The <code>MCombineOperator</code> class is the operator to combine selection results and aggregation only results.
*/
public class MCombineOperator extends BaseOperator {
private static final Logger LOGGER = LoggerFactory.getLogger(MCombineOperator.class);
private static final String OPERATOR_NAME = "MCombineOperator";
private final List<Operator> _operators;
private final BrokerRequest _brokerRequest;
private final ExecutorService _executorService;
private final long _timeOutMs;
//Make this configurable
//These two control the parallelism on a per query basis, depending on the number of segments to process
private static final int MIN_THREADS_PER_QUERY;
private static final int MAX_THREADS_PER_QUERY;
private static final int MIN_SEGMENTS_PER_THREAD = 10;
static {
int numCores = Runtime.getRuntime().availableProcessors();
MIN_THREADS_PER_QUERY = Math.max(1, (int) (numCores * .5));
//Dont have more than 10 threads per query
MAX_THREADS_PER_QUERY = Math.min(10, (int) (numCores * .5));
}
public MCombineOperator(List<Operator> operators, ExecutorService executorService, long timeOutMs,
BrokerRequest brokerRequest) {
_operators = operators;
_executorService = executorService;
_brokerRequest = brokerRequest;
_timeOutMs = timeOutMs;
}
@Override
public boolean open() {
for (Operator op : _operators) {
op.open();
}
return true;
}
@Override
public Block getNextBlock() {
final long startTime = System.currentTimeMillis();
final long queryEndTime = System.currentTimeMillis() + _timeOutMs;
final int numOperators = _operators.size();
// Ensure that the number of groups is not more than the number of segments
final int numGroups = Math.min(numOperators, Math.max(MIN_THREADS_PER_QUERY,
Math.min(MAX_THREADS_PER_QUERY, (numOperators + MIN_SEGMENTS_PER_THREAD - 1) / MIN_SEGMENTS_PER_THREAD)));
final List<List<Operator>> operatorGroups = new ArrayList<>(numGroups);
for (int i = 0; i < numGroups; i++) {
operatorGroups.add(new ArrayList<Operator>());
}
for (int i = 0; i < numOperators; i++) {
operatorGroups.get(i % numGroups).add(_operators.get(i));
}
final BlockingQueue<Block> blockingQueue = new ArrayBlockingQueue<>(numGroups);
// Submit operators.
for (final List<Operator> operatorGroup : operatorGroups) {
_executorService.submit(new TraceRunnable() {
@Override
public void runJob() {
IntermediateResultsBlock mergedBlock = null;
try {
for (Operator operator : operatorGroup) {
IntermediateResultsBlock blockToMerge = (IntermediateResultsBlock) operator.nextBlock();
if (mergedBlock == null) {
mergedBlock = blockToMerge;
} else {
try {
CombineService.mergeTwoBlocks(_brokerRequest, mergedBlock, blockToMerge);
} catch (Exception e) {
LOGGER.error("Caught exception while merging two blocks (step 1).", e);
mergedBlock.addToProcessingExceptions(
QueryException.getException(QueryException.MERGE_RESPONSE_ERROR, e));
}
}
}
} catch (Exception e) {
LOGGER.error("Caught exception while executing query.", e);
mergedBlock = new IntermediateResultsBlock(e);
}
assert mergedBlock != null;
blockingQueue.offer(mergedBlock);
}
});
}
LOGGER.debug("Submitting operators to be run in parallel and it took:" + (System.currentTimeMillis() - startTime));
// Submit merger job:
Future<IntermediateResultsBlock> mergedBlockFuture =
_executorService.submit(new TraceCallable<IntermediateResultsBlock>() {
@Override
public IntermediateResultsBlock callJob()
throws Exception {
int mergedBlocksNumber = 0;
IntermediateResultsBlock mergedBlock = null;
while (mergedBlocksNumber < numGroups) {
if (mergedBlock == null) {
mergedBlock = (IntermediateResultsBlock) blockingQueue.poll(queryEndTime - System.currentTimeMillis(),
TimeUnit.MILLISECONDS);
if (mergedBlock != null) {
mergedBlocksNumber++;
}
LOGGER.debug("Got response from operator 0 after: {}", (System.currentTimeMillis() - startTime));
} else {
IntermediateResultsBlock blockToMerge =
(IntermediateResultsBlock) blockingQueue.poll(queryEndTime - System.currentTimeMillis(),
TimeUnit.MILLISECONDS);
if (blockToMerge != null) {
try {
LOGGER.debug("Got response from operator {} after: {}", mergedBlocksNumber,
(System.currentTimeMillis() - startTime));
CombineService.mergeTwoBlocks(_brokerRequest, mergedBlock, blockToMerge);
LOGGER.debug("Merged response from operator {} after: {}", mergedBlocksNumber,
(System.currentTimeMillis() - startTime));
} catch (Exception e) {
LOGGER.error("Caught exception while merging two blocks (step 2).", e);
mergedBlock.addToProcessingExceptions(
QueryException.getException(QueryException.MERGE_RESPONSE_ERROR, e));
}
mergedBlocksNumber++;
}
}
}
return mergedBlock;
}
});
// Get merge results.
IntermediateResultsBlock mergedBlock;
try {
mergedBlock = mergedBlockFuture.get(queryEndTime - System.currentTimeMillis(), TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
LOGGER.error("Caught InterruptedException.", e);
mergedBlock = new IntermediateResultsBlock(QueryException.getException(QueryException.FUTURE_CALL_ERROR, e));
} catch (ExecutionException e) {
LOGGER.error("Caught ExecutionException.", e);
mergedBlock = new IntermediateResultsBlock(QueryException.getException(QueryException.MERGE_RESPONSE_ERROR, e));
} catch (TimeoutException e) {
LOGGER.error("Caught TimeoutException", e);
mergedBlockFuture.cancel(true);
mergedBlock =
new IntermediateResultsBlock(QueryException.getException(QueryException.EXECUTION_TIMEOUT_ERROR, e));
}
// Update execution statistics.
ExecutionStatistics executionStatistics = new ExecutionStatistics();
for (Operator operator : _operators) {
ExecutionStatistics executionStatisticsToMerge = operator.getExecutionStatistics();
if (executionStatisticsToMerge != null) {
executionStatistics.merge(executionStatisticsToMerge);
}
}
mergedBlock.setNumDocsScanned(executionStatistics.getNumDocsScanned());
mergedBlock.setNumEntriesScannedInFilter(executionStatistics.getNumEntriesScannedInFilter());
mergedBlock.setNumEntriesScannedPostFilter(executionStatistics.getNumEntriesScannedPostFilter());
mergedBlock.setNumTotalRawDocs(executionStatistics.getNumTotalRawDocs());
return mergedBlock;
}
@Override
public Block getNextBlock(BlockId blockId) {
throw new UnsupportedOperationException();
}
@Override
public String getOperatorName() {
return OPERATOR_NAME;
}
@Override
public boolean close() {
for (Operator op : _operators) {
op.close();
}
return true;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.test.karaf;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.PrintStream;
import java.net.URL;
import java.security.Principal;
import java.security.PrivilegedExceptionAction;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Dictionary;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.FutureTask;
import java.util.concurrent.TimeUnit;
import javax.inject.Inject;
import javax.management.remote.JMXConnector;
import javax.management.remote.JMXConnectorFactory;
import javax.management.remote.JMXServiceURL;
import javax.security.auth.Subject;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.apache.camel.util.ObjectHelper;
import org.apache.felix.service.command.CommandProcessor;
import org.apache.felix.service.command.CommandSession;
import org.apache.karaf.features.Feature;
import org.apache.karaf.features.FeaturesService;
import org.ops4j.pax.exam.Option;
import org.ops4j.pax.exam.ProbeBuilder;
import org.ops4j.pax.exam.TestProbeBuilder;
import org.osgi.framework.BundleContext;
import org.osgi.framework.Constants;
import org.osgi.framework.Filter;
import org.osgi.framework.FrameworkUtil;
import org.osgi.framework.InvalidSyntaxException;
import org.osgi.framework.ServiceReference;
import org.osgi.util.tracker.ServiceTracker;
public class CamelKarafTestSupport extends CamelTestSupport {
static final Long COMMAND_TIMEOUT = 30000L;
static final Long SERVICE_TIMEOUT = 30000L;
protected ExecutorService executor = Executors.newCachedThreadPool();
@Inject
protected BundleContext bundleContext;
@Inject
protected FeaturesService featuresService;
@ProbeBuilder
public TestProbeBuilder probeConfiguration(TestProbeBuilder probe) {
probe.setHeader(Constants.DYNAMICIMPORT_PACKAGE, "*,org.apache.felix.service.*;status=provisional");
return probe;
}
public File getConfigFile(String path) {
URL res = this.getClass().getResource(path);
if (res == null) {
throw new RuntimeException("Config resource " + path + " not found");
}
return new File(res.getFile());
}
public static Option[] configure(String... extra) {
return AbstractFeatureTest.configure(extra);
}
/**
* Executes a shell command and returns output as a String.
* Commands have a default timeout of 10 seconds.
*
* @param command The command to execute
* @param principals The principals (e.g. RolePrincipal objects) to run the command under
*/
protected String executeCommand(final String command, Principal... principals) {
return executeCommand(command, COMMAND_TIMEOUT, false, principals);
}
/**
* Executes a shell command and returns output as a String.
* Commands have a default timeout of 10 seconds.
*
* @param command The command to execute.
* @param timeout The amount of time in millis to wait for the command to execute.
* @param silent Specifies if the command should be displayed in the screen.
* @param principals The principals (e.g. RolePrincipal objects) to run the command under
*/
protected String executeCommand(final String command, final Long timeout, final Boolean silent, final Principal... principals) {
waitForCommandService(command);
String response;
final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
final PrintStream printStream = new PrintStream(byteArrayOutputStream);
final Callable<String> commandCallable = new Callable<String>() {
@Override
public String call() throws Exception {
try {
if (!silent) {
System.err.println(command);
}
final CommandProcessor commandProcessor = getOsgiService(CommandProcessor.class);
final CommandSession commandSession = commandProcessor.createSession(System.in, printStream, System.err);
commandSession.execute(command);
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
}
printStream.flush();
return byteArrayOutputStream.toString();
}
};
FutureTask<String> commandFuture;
if (principals.length == 0) {
commandFuture = new FutureTask<String>(commandCallable);
} else {
// If principals are defined, run the command callable via Subject.doAs()
commandFuture = new FutureTask<String>(new Callable<String>() {
@Override
public String call() throws Exception {
Subject subject = new Subject();
subject.getPrincipals().addAll(Arrays.asList(principals));
return Subject.doAs(subject, new PrivilegedExceptionAction<String>() {
@Override
public String run() throws Exception {
return commandCallable.call();
}
});
}
});
}
try {
executor.submit(commandFuture);
response = commandFuture.get(timeout, TimeUnit.MILLISECONDS);
} catch (Exception e) {
e.printStackTrace(System.err);
response = "SHELL COMMAND TIMED OUT: ";
}
return response;
}
private void waitForCommandService(String command) {
// the commands are represented by services. Due to the asynchronous nature of services they may not be
// immediately available. This code waits the services to be available, in their secured form. It
// means that the code waits for the command service to appear with the roles defined.
if (command == null || command.length() == 0) {
return;
}
int spaceIdx = command.indexOf(' ');
if (spaceIdx > 0) {
command = command.substring(0, spaceIdx);
}
int colonIndx = command.indexOf(':');
try {
if (colonIndx > 0) {
String scope = command.substring(0, colonIndx);
String function = command.substring(colonIndx + 1);
waitForService("(&(osgi.command.scope=" + scope + ")(osgi.command.function=" + function + "))", SERVICE_TIMEOUT);
} else {
waitForService("(osgi.command.function=" + command + ")", SERVICE_TIMEOUT);
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@SuppressWarnings("unchecked")
private void waitForService(String filter, long timeout) throws InvalidSyntaxException,
InterruptedException {
ServiceTracker st = new ServiceTracker(bundleContext,
bundleContext.createFilter(filter),
null);
try {
st.open();
st.waitForService(timeout);
} finally {
st.close();
}
}
protected <T> T getOsgiService(Class<T> type, long timeout) {
return getOsgiService(type, null, timeout);
}
protected <T> T getOsgiService(Class<T> type) {
return getOsgiService(type, null, SERVICE_TIMEOUT);
}
@SuppressWarnings("unchecked")
protected <T> T getOsgiService(Class<T> type, String filter, long timeout) {
ServiceTracker tracker = null;
try {
String flt;
if (filter != null) {
if (filter.startsWith("(")) {
flt = "(&(" + Constants.OBJECTCLASS + "=" + type.getName() + ")" + filter + ")";
} else {
flt = "(&(" + Constants.OBJECTCLASS + "=" + type.getName() + ")(" + filter + "))";
}
} else {
flt = "(" + Constants.OBJECTCLASS + "=" + type.getName() + ")";
}
Filter osgiFilter = FrameworkUtil.createFilter(flt);
tracker = new ServiceTracker(bundleContext, osgiFilter, null);
tracker.open(true);
// Note that the tracker is not closed to keep the reference
// This is buggy, as the service reference may change i think
Object svc = type.cast(tracker.waitForService(timeout));
if (svc == null) {
Dictionary dic = bundleContext.getBundle().getHeaders();
System.err.println("Test bundle headers: " + explode(dic));
for (ServiceReference ref : asCollection(bundleContext.getAllServiceReferences(null, null))) {
System.err.println("ServiceReference: " + ref);
}
for (ServiceReference ref : asCollection(bundleContext.getAllServiceReferences(null, flt))) {
System.err.println("Filtered ServiceReference: " + ref);
}
throw new RuntimeException("Gave up waiting for service " + flt);
}
return type.cast(svc);
} catch (InvalidSyntaxException e) {
throw new IllegalArgumentException("Invalid filter", e);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
/*
* Explode the dictionary into a ,-delimited list of key=value pairs
*/
private static String explode(Dictionary dictionary) {
Enumeration keys = dictionary.keys();
StringBuilder sb = new StringBuilder();
while (keys.hasMoreElements()) {
Object key = keys.nextElement();
sb.append(String.format("%s=%s", key, dictionary.get(key)));
if (keys.hasMoreElements()) {
sb.append(", ");
}
}
return sb.toString();
}
/**
* Provides an iterable collection of references, even if the original array is null
*/
private static Collection<ServiceReference> asCollection(ServiceReference[] references) {
return references != null ? Arrays.asList(references) : Collections.<ServiceReference>emptyList();
}
public JMXConnector getJMXConnector() throws Exception {
return getJMXConnector("karaf", "karaf");
}
public JMXConnector getJMXConnector(String userName, String passWord) throws Exception {
JMXServiceURL url = new JMXServiceURL("service:jmx:rmi:///jndi/rmi://localhost:1099/karaf-root");
Hashtable<String, Object> env = new Hashtable<>();
String[] credentials = new String[]{userName, passWord};
env.put("jmx.remote.credentials", credentials);
JMXConnector connector = JMXConnectorFactory.connect(url, env);
return connector;
}
public void assertFeatureInstalled(String featureName) {
try {
Feature[] features = featuresService.listInstalledFeatures();
for (Feature feature : features) {
if (featureName.equals(feature.getName())) {
return;
}
}
fail("Feature " + featureName + " should be installed but is not");
} catch (Exception e) {
throw ObjectHelper.wrapRuntimeCamelException(e);
}
}
public void assertFeatureInstalled(String featureName, String featureVersion) {
try {
Feature[] features = featuresService.listInstalledFeatures();
for (Feature feature : features) {
if (featureName.equals(feature.getName()) && featureVersion.equals(feature.getVersion())) {
return;
}
}
fail("Feature " + featureName + "/" + featureVersion + " should be installed but is not");
} catch (Exception e) {
throw ObjectHelper.wrapRuntimeCamelException(e);
}
}
protected void installAndAssertFeature(String feature) throws Exception {
featuresService.installFeature(feature);
assertFeatureInstalled(feature);
}
protected void installAndAssertFeature(String feature, String version) throws Exception {
featuresService.installFeature(feature, version);
assertFeatureInstalled(feature, version);
}
protected void installAssertAndUninstallFeature(String feature) throws Exception {
Set<Feature> featuresBefore = new HashSet<Feature>(Arrays.asList(featuresService.listInstalledFeatures()));
try {
featuresService.installFeature(feature);
assertFeatureInstalled(feature);
} finally {
uninstallNewFeatures(featuresBefore);
}
}
protected void installAssertAndUninstallFeature(String feature, String version) throws Exception {
Set<Feature> featuresBefore = new HashSet<Feature>(Arrays.asList(featuresService.listInstalledFeatures()));
try {
featuresService.installFeature(feature, version);
assertFeatureInstalled(feature, version);
} finally {
uninstallNewFeatures(featuresBefore);
}
}
protected void installAssertAndUninstallFeatures(String... feature) throws Exception {
Set<Feature> featuresBefore = new HashSet<Feature>(Arrays.asList(featuresService.listInstalledFeatures()));
try {
for (String curFeature : feature) {
featuresService.installFeature(curFeature);
assertFeatureInstalled(curFeature);
}
} finally {
uninstallNewFeatures(featuresBefore);
}
}
/**
* The feature service does not uninstall feature dependencies when uninstalling a single feature.
* So we need to make sure we uninstall all features that were newly installed.
*/
protected void uninstallNewFeatures(Set<Feature> featuresBefore) {
try {
Feature[] features = featuresService.listInstalledFeatures();
for (Feature curFeature : features) {
if (!featuresBefore.contains(curFeature)) {
try {
System.out.println("Uninstalling " + curFeature.getName());
featuresService.uninstallFeature(curFeature.getName(), curFeature.getVersion());
} catch (Exception e) {
// ignore
}
}
}
} catch (Exception e) {
throw ObjectHelper.wrapRuntimeCamelException(e);
}
}
}
| |
/*-
*
* * Copyright 2015 Skymind,Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*
*/
package org.nd4j.linalg.convolution;
import lombok.extern.slf4j.Slf4j;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.nd4j.linalg.BaseNd4jTest;
import org.nd4j.linalg.api.buffer.DataBuffer;
import org.nd4j.linalg.api.buffer.util.AllocUtil;
import org.nd4j.linalg.api.buffer.util.DataTypeUtil;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.api.ops.DynamicCustomOp;
import org.nd4j.linalg.api.ops.impl.layers.convolution.Pooling2D;
import org.nd4j.linalg.checkutil.NDArrayCreationUtil;
import org.nd4j.linalg.exception.ND4JIllegalStateException;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.factory.Nd4jBackend;
import org.nd4j.linalg.ops.transforms.Transforms;
import org.nd4j.linalg.primitives.Pair;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.nd4j.linalg.checkutil.NDArrayCreationUtil.getAll4dTestArraysWithShape;
/**
* Created by agibsonccc on 9/6/14.
*/
@Slf4j
@RunWith(Parameterized.class)
public class ConvolutionTestsC extends BaseNd4jTest {
public ConvolutionTestsC(Nd4jBackend backend) {
super(backend);
}
@Test
public void testConvOutWidthAndHeight() {
int outSize = Convolution.outSize(2, 1, 1, 2, 1, false);
assertEquals(6, outSize);
}
@Test
public void testIm2Col() {
INDArray linspaced = Nd4j.linspace(1, 16, 16).reshape(2, 2, 2, 2);
INDArray ret = Convolution.im2col(linspaced, 1, 1, 1, 1, 2, 2, 0, false);
INDArray im2colAssertion = Nd4j.create(new double[] {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 1.0, 2.0, 0.0, 0.0, 0.0, 0.0, 3.0, 4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
5.0, 6.0, 0.0, 0.0, 0.0, 0.0, 7.0, 8.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 9.0, 10.0,
0.0, 0.0, 0.0, 0.0, 11.0, 12.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 13.0, 14.0, 0.0, 0.0,
0.0, 0.0, 15.0, 16.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0},
new int[] {2, 2, 1, 1, 6, 6});
assertEquals(im2colAssertion, ret);
INDArray col2ImAssertion = Nd4j.create(new double[] {1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0,
12.0, 13.0, 14.0, 15.0, 16.0
}, new int[] {2, 2, 2, 2});
INDArray otherConv = Convolution.col2im(ret, 1, 1, 2, 2, 2, 2);
assertEquals(col2ImAssertion, otherConv);
}
@Test
public void testIm2Col2() {
int kh = 2;
int kw = 2;
int ph = 0;
int pw = 0;
int sy = 2;
int sx = 2;
int depth = 2;
INDArray assertion = Nd4j.create(new double[] {1, 1, 1, 1, 3, 3, 3, 3, 1, 1, 1, 1, 3, 3, 3, 3, 1, 1, 1, 1, 3, 3,
3, 3, 1, 1, 1, 1, 3, 3, 3, 3, 2, 2, 2, 2, 4, 4, 4, 4, 2, 2, 2, 2, 4, 4, 4, 4, 2, 2, 2, 2, 4, 4,
4, 4, 2, 2, 2, 2, 4, 4, 4, 4}, new int[] {1, 1, 2, 2, 4, 4});
INDArray ret = Nd4j.create(new double[] {1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3,
4, 4, 4, 4, 4, 4, 4, 4, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3,
4, 4, 4, 4, 4, 4, 4, 4}, new int[] {1, 1, 8, 8});
INDArray test = Convolution.im2col(ret, kh, kw, sy, sx, ph, pw, 0, false);
assertEquals(assertion, test);
}
@Test
@Ignore
public void testCompareIm2ColImpl() {
int[] miniBatches = {1, 3, 5};
int[] depths = {1, 3, 5};
int[] inHeights = {5, 21};
int[] inWidths = {5, 21};
int[] strideH = {1, 2};
int[] strideW = {1, 2};
int[] sizeW = {1, 2, 3};
int[] sizeH = {1, 2, 3};
int[] padH = {0, 1, 2};
int[] padW = {0, 1, 2};
boolean[] coverall = {false, true};
DataBuffer.Type[] types = new DataBuffer.Type[] {DataBuffer.Type.FLOAT, DataBuffer.Type.DOUBLE,
DataBuffer.Type.FLOAT, DataBuffer.Type.DOUBLE};
DataBuffer.AllocationMode[] modes =
new DataBuffer.AllocationMode[] {DataBuffer.AllocationMode.HEAP, DataBuffer.AllocationMode.HEAP,
DataBuffer.AllocationMode.DIRECT, DataBuffer.AllocationMode.DIRECT};
String factoryClassName = Nd4j.factory().getClass().toString().toLowerCase();
if (factoryClassName.contains("jcublas") || factoryClassName.contains("cuda")) {
//Only test direct for CUDA; test all for CPU
types = new DataBuffer.Type[] {DataBuffer.Type.FLOAT, DataBuffer.Type.DOUBLE};
modes = new DataBuffer.AllocationMode[] {DataBuffer.AllocationMode.DIRECT,
DataBuffer.AllocationMode.DIRECT};
}
DataBuffer.Type initialType = Nd4j.dataType();
for (int i = 0; i < types.length; i++) {
DataBuffer.Type type = types[i];
DataBuffer.AllocationMode mode = modes[i];
DataTypeUtil.setDTypeForContext(type);
Nd4j.alloc = mode;
AllocUtil.setAllocationModeForContext(mode);
for (int m : miniBatches) {
for (int d : depths) {
for (int h : inHeights) {
for (int w : inWidths) {
for (int sh : strideH) {
for (int sw : strideW) {
for (int kh : sizeH) {
for (int kw : sizeW) {
for (int ph : padH) {
for (int pw : padW) {
if ((w - kw + 2 * pw) % sw != 0 || (h - kh + 2 * ph) % sh != 0)
continue; //(w-kp+2*pw)/sw + 1 is not an integer, i.e., number of outputs doesn't fit
System.out.println("Running " + m + " " + d + " " + h + " " + w);
for (boolean cAll : coverall) {
INDArray in = Nd4j.rand(new int[] {m, d, h, w});
//assertEquals(in.data().allocationMode(), mode);
//assertEquals(in.data().dataType(), opType);
INDArray outOrig = OldConvolution.im2col(in, kh, kw, sh, sw, ph,
pw, -1, cAll); //Old implementation
INDArray outNew = Convolution.im2col(in, kh, kw, sh, sw, ph, pw,
cAll); //Current implementation
assertEquals(outOrig, outNew);
}
}
}
}
}
}
}
}
}
}
}
}
DataTypeUtil.setDTypeForContext(initialType);
}
@Test
public void testPooling2D_Same() {
int[] miniBatches = {1, 3, 5};
int[] depths = {1, 3, 5};
int[] inHeights = {5, 21};
int[] inWidths = {5, 21};
int[] strideH = {1, 2};
int[] strideW = {1, 2};
int[] sizeW = {1, 2, 3};
int[] sizeH = {1, 2, 3};
int[] padH = {0};
int[] padW = {0};
Pooling2D.Pooling2DType[] types = new Pooling2D.Pooling2DType[]{Pooling2D.Pooling2DType.AVG, Pooling2D.Pooling2DType.PNORM, Pooling2D.Pooling2DType.MAX,};
for (Pooling2D.Pooling2DType type : types) {
log.info("Trying pooling type: [{}]", type);
for (int m : miniBatches) {
for (int d : depths) {
for (int h : inHeights) {
for (int w : inWidths) {
for (int sh : strideH) {
for (int sw : strideW) {
for (int kh : sizeH) {
for (int kw : sizeW) {
INDArray in = Nd4j.rand(new int[]{m, d, h, w});
int[] outSize = getOutputSize(in, new int[]{kh, kw}, new int[]{sh, sw}, null, true);
//Calculate padding for same mode:
int pHTotal = (outSize[0]-1)*sh + kh - h;
int pWTotal = (outSize[1]-1)*sw + kw - w;
int padTop = pHTotal / 2;
int padLeft = pWTotal / 2;
INDArray col = Nd4j.createUninitialized(new int[]{m, d, outSize[0], outSize[1], kh, kw}, 'c');
INDArray col2 = col.permute(0, 1, 4, 5, 2, 3);
Convolution.im2col(in, kh, kw, sh, sw, padTop, padLeft, true, col2);
INDArray col2d = col.reshape('c', m * d * outSize[0] * outSize[1], kh * kw);
INDArray output = Nd4j.create(m * d * outSize[0] * outSize[1]);
INDArray reduced = null;
switch (type) {
case PNORM:
int pnorm = 3;
Transforms.abs(col2d, false);
Transforms.pow(col2d, pnorm, false);
reduced = col2d.sum(1);
Transforms.pow(reduced, (1.0 / pnorm), false);
Convolution.pooling2D(in, kh, kw, sh, sw, padTop, padLeft, 1, 1,
true, Pooling2D.Pooling2DType.PNORM, Pooling2D.Divisor.INCLUDE_PADDING,
(double) pnorm, outSize[0], outSize[1], output);
break;
case MAX:
Convolution.pooling2D(in, kh, kw, sh, sw, padTop, padLeft, 1, 1,
true, Pooling2D.Pooling2DType.MAX, Pooling2D.Divisor.INCLUDE_PADDING,
0.0, outSize[0], outSize[1], output);
reduced = col2d.max(1);
break;
case AVG:
Convolution.pooling2D(in, kh, kw, sh, sw, padTop, padLeft, 1, 1,
true, Pooling2D.Pooling2DType.AVG, Pooling2D.Divisor.INCLUDE_PADDING,
0.0, outSize[0], outSize[1], output);
reduced = col2d.mean(1);
break;
}
assertEquals("Failed opType: " + type, reduced, output);
}
}
}
}
}
}
}
}
}
}
@Test
public void testMoreIm2Col2() {
int kh = 2;
int kw = 2;
int ph = 0;
int pw = 0;
int sy = 2;
int sx = 2;
INDArray ret = Nd4j.create(new double[] {1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3,
4, 4, 4, 4, 4, 4, 4, 4, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3,
4, 4, 4, 4, 4, 4, 4, 4}, new int[] {1, 1, 8, 8});
INDArray assertion = Nd4j.create(new double[] {1, 1, 1, 1, 3, 3, 3, 3, 1, 1, 1, 1, 3, 3, 3, 3, 1, 1, 1, 1, 3, 3,
3, 3, 1, 1, 1, 1, 3, 3, 3, 3, 2, 2, 2, 2, 4, 4, 4, 4, 2, 2, 2, 2, 4, 4, 4, 4, 2, 2, 2, 2, 4, 4,
4, 4, 2, 2, 2, 2, 4, 4, 4, 4}, new int[] {1, 1, 2, 2, 4, 4});
INDArray im2colTest = Convolution.im2col(ret, kh, kw, sy, sx, ph, pw, 0, false);
assertEquals(assertion, im2colTest);
}
@Test
public void testCol2Im() {
int kh = 1;
int kw = 1;
int sy = 1;
int sx = 1;
int ph = 1;
int pw = 1;
INDArray linspaced = Nd4j.linspace(1, 64, 64).reshape(2, 2, 2, 2, 2, 2);
INDArray newTest = Convolution.col2im(linspaced, sy, sx, ph, pw, 2, 2);
INDArray assertion = OldConvolution.col2im(linspaced, sy, sx, ph, pw, 2, 2);
System.out.println("Assertion dimensions: " + Arrays.toString(assertion.shape()));
assertEquals(assertion, newTest);
}
@Test
public void testimcolim() {
int nEx = 2;
int depth = 3;
int width = 7;
int height = 7;
int[] kernel = {3, 2};
int[] stride = {2, 3};
int[] padding = {1, 2};
int prod = nEx * depth * width * height;
INDArray in = Nd4j.linspace(1, prod, prod).reshape(nEx, depth, width, height);
INDArray assertim2col = OldConvolution.im2col(in, kernel, stride, padding);
INDArray im2col = Convolution.im2col(in, kernel, stride, padding);
assertEquals(assertim2col, im2col);
INDArray assertcol2im = OldConvolution.col2im(im2col, stride, padding, height, width);
INDArray col2im = Convolution.col2im(im2col, stride, padding, height, width);
assertEquals(assertcol2im, col2im);
}
@Test
public void testMaxPoolBackprop(){
Nd4j.getRandom().setSeed(12345);
for( int i=0; i<5; i++ ) {
int[] inputShape = {1, 1, 4, 3};
int[] kernel = {2, 2};
int[] strides = {1, 1};
int[] pad = {0, 0};
int[] dilation = {1, 1}; //TODO non 1-1 dilation
boolean same = true;
String fn = "maxpool2d_bp";
int nIArgs = 11;
int[] a = new int[nIArgs];
a[0] = kernel[0];
a[1] = kernel[1];
a[2] = strides[0];
a[3] = strides[1];
a[4] = pad[0];
a[5] = pad[1];
a[6] = dilation[0];
a[7] = dilation[1];
a[8] = same ? 1 : 0;
//a[9]: Not used with max pooling
a[10] = 0; //For NCHW
List<Pair<INDArray, String>> inputs = NDArrayCreationUtil.getAll4dTestArraysWithShape(12345, inputShape);
for(Pair<INDArray,String> pIn : inputs){
INDArray input = pIn.getFirst();
int[] outShapeHW = getOutputSize(input, kernel, strides, pad, same);
List<Pair<INDArray, String>> eps = NDArrayCreationUtil.getAll4dTestArraysWithShape(12345, inputShape[0], inputShape[1], outShapeHW[0], outShapeHW[1]);
for(Pair<INDArray,String> pEps : eps){
INDArray epsilon = pEps.getFirst();
INDArray epsNext = Nd4j.create(inputShape, 'c');
//Runs fine with dups:
// input = input.dup('c');
epsilon = epsilon.dup('c');
DynamicCustomOp op = DynamicCustomOp.builder(fn)
.addInputs(input, epsilon)
.addOutputs(epsNext)
.addIntegerArguments(a)
.build();
Nd4j.getExecutioner().exec(op);
INDArray expEpsNext = expGradMaxPoolBackPropSame(input, epsilon, kernel, strides, same);
String msg = "input=" + pIn.getSecond() + ", eps=" + pEps.getSecond();
assertEquals(msg, expEpsNext, epsNext);
}
}
}
}
public static INDArray expGradMaxPoolBackPropSame(INDArray input, INDArray gradient, int[] k, int[] s, boolean same){
input = input.dup();
if(!same){
throw new UnsupportedOperationException("non-Same mode not yet supported here");
}
int outH = (int)Math.ceil(input.size(2)/(double)s[0]);
int outW = (int)Math.ceil(input.size(3)/(double)s[1]);
int totalPadH = (outH-1)*s[0] + k[0] - input.size(2);
int totalPadW = (outW-1)*s[1] + k[1] - input.size(3);
int topPad = totalPadH/2;
int bottomPad = totalPadH - topPad;
int leftPad = totalPadW/2;
int rightPad = totalPadW - leftPad;
INDArray outGrad = Nd4j.create(input.shape());
for( int m=0; m<input.size(0); m++ ){
for( int d=0; d<input.size(1); d++ ){
for( int y=0; y<outH; y++ ){
for( int x=0; x<outW; x++){
//First: work out the *original* position for this kernel...
int kTLy = y*s[0] - topPad;
int kTLx = x*s[1] - leftPad;
int[] maxPos = {kTLy,kTLx};
double max = -Double.MAX_VALUE;
for( int kY=0; kY<k[0]; kY++){
for( int kX=0; kX<k[1]; kX++){
if(kTLy + kY < 0 || kTLy + kY >= input.size(2) || kTLx + kX < 0 || kTLx + kX >= input.size(3)){
//Is padding
continue;
}
double v = input.getDouble(m, d, kTLy + kY, kTLx + kX);
if(v > max){
max = v;
maxPos = new int[]{kTLy + kY, kTLx + kX};
}
}
}
if(max == -Double.MAX_VALUE){
//All input values are padding, so can skip this input (should rarely happen)
continue;
}
//Now that we know *where* the max is from: add the gradient
double v = outGrad.getDouble(m, d, maxPos[0], maxPos[1]);
double toAdd = gradient.getDouble(m,d,y,x);
outGrad.putScalar(m, d, maxPos[0], maxPos[1], v + toAdd);
}
}
}
}
return outGrad;
}
protected static int[] getOutputSize(INDArray inputData, int[] kernel, int[] strides, int[] padding, boolean convolutionModeSame) {
int inH = inputData.size(2);
int inW = inputData.size(3);
if (convolutionModeSame != true && (kernel[0] <= 0 || kernel[0] > inH + 2 * padding[0])) {
throw new ND4JIllegalStateException();
}
if (convolutionModeSame != true && (kernel[1] <= 0 || kernel[1] > inW + 2 * padding[1])) {
throw new ND4JIllegalStateException();
}
if (convolutionModeSame != true) {
if ((inH - kernel[0] + 2 * padding[0]) % strides[0] != 0) {
double d = (inH - kernel[0] + 2 * padding[0]) / ((double) strides[0]) + 1.0;
String str = String.format("%.2f", d);
int truncated = (int) d;
int sameSize = (int) Math.ceil(inH / ((double) strides[0]));
throw new ND4JIllegalStateException();
}
if ((inW - kernel[1] + 2 * padding[1]) % strides[1] != 0) {
double d = (inW - kernel[1] + 2 * padding[1]) / ((double) strides[1]) + 1.0;
String str = String.format("%.2f", d);
int truncated = (int) d;
int sameSize = (int) Math.ceil(inW / ((double) strides[1]));
throw new ND4JIllegalStateException();
}
} else if (convolutionModeSame) {
//'Same' padding mode:
//outH = ceil(inHeight / strideH) decimal division
//outW = ceil(inWidth / strideW) decimal division
//padHeightSum = ((outH - 1) * strideH + kH - inHeight)
//padTop = padHeightSum / 2 integer division
//padBottom = padHeghtSum - padTop
//padWidthSum = ((outW - 1) * strideW + kW - inWidth)
//padLeft = padWidthSum / 2 integer division
//padRight = padWidthSum - padLeft
int outH = (int) Math.ceil(inH / ((double) strides[0]));
int outW = (int) Math.ceil(inW / ((double) strides[1]));
return new int[] {outH, outW};
}
int hOut = (inH - kernel[0] + 2 * padding[0]) / strides[0] + 1;
int wOut = (inW - kernel[1] + 2 * padding[1]) / strides[1] + 1;
return new int[] {hOut, wOut};
}
@Override
public char ordering() {
return 'c';
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.felix.ipojo.test.composite.provides;
import java.util.Properties;
import org.apache.felix.ipojo.ComponentFactory;
import org.apache.felix.ipojo.ComponentInstance;
import org.apache.felix.ipojo.Factory;
import org.apache.felix.ipojo.architecture.Architecture;
import org.apache.felix.ipojo.junit4osgi.OSGiTestCase;
import org.apache.felix.ipojo.test.composite.component.TotoProvider;
import org.apache.felix.ipojo.test.composite.service.Tota;
import org.apache.felix.ipojo.test.composite.service.Toto;
import org.apache.felix.ipojo.test.composite.util.Utils;
import org.osgi.framework.ServiceReference;
public class TestComp0 extends OSGiTestCase {
private ComponentFactory tataFactory;
private ComponentFactory totoFactory;
private ComponentFactory tataFactory2;
private ComponentInstance totoProv, totoProv2;
private ComponentInstance under;
public void setUp() {
tataFactory = (ComponentFactory) Utils.getFactoryByName(getContext(), "tata");
totoFactory = (ComponentFactory) Utils.getFactoryByName(getContext(), "toto");
tataFactory2 = (ComponentFactory) Utils.getFactoryByName(getContext(), "comp-6");
tataFactory2.stop();
tataFactory.stop();
Properties props = new Properties();
props.put("instance.name","toto provider");
try {
totoProv = totoFactory.createComponentInstance(props);
} catch(Exception e) {
fail("Cannot create an instance : " + e.getMessage());
}
Properties props3 = new Properties();
props3.put("instance.name","toto provider 2");
try {
totoProv2 = totoFactory.createComponentInstance(props3);
} catch(Exception e) {
fail("Cannot create an instance : " + e.getMessage());
}
totoProv.stop();
totoProv2.stop();
Factory factory = Utils.getFactoryByName(getContext(), "comp-0");
Properties props2 = new Properties();
props2.put("instance.name","ff");
try {
under = factory.createComponentInstance(props2);
} catch(Exception e) {
e.printStackTrace();
fail("Cannot create an instance : " + e.getMessage());
}
}
public void tearDown() {
tataFactory.start();
totoProv.dispose();
totoProv = null;
totoProv2.dispose();
totoProv2 = null;
tataFactory2.start();
// Reset counters
TotoProvider.toto = 0;
TotoProvider.toto_2 = 0;
TotoProvider.toto_3 = 0;
TotoProvider.toto_4 = 0;
TotoProvider.toto1 = 0;
}
public void testSimple() {
// Neither factory nor instance
assertTrue("Assert under state - 1", under.getState() == ComponentInstance.INVALID);
assertNull("Assert no tota service - 1", getContext().getServiceReference(Tota.class.getName()));
// Start the importer
totoProv.start();
assertNotNull("Assert toto service - 1", getContext().getServiceReference(Toto.class.getName()));
assertTrue("Assert under state - 2", under.getState() == ComponentInstance.INVALID);
assertNull("Assert no tota service - 2", getContext().getServiceReference(Tota.class.getName()));
// Start the factory
tataFactory.start();
assertTrue("Assert under state - 3", under.getState() == ComponentInstance.VALID);
assertNotNull("Assert tota service - 3", getContext().getServiceReference(Tota.class.getName()));
ServiceReference ref = getContext().getServiceReference(Tota.class.getName());
Tota tota = (Tota) getContext().getService(ref);
invokeAll(tota);
// Check toto
Properties props = tota.getProps();
Integer toto = (Integer) props.get("toto");
Integer toto_2 = (Integer) props.get("toto_2");
Integer toto_3 = (Integer) props.get("toto_3");
Integer toto_4 = (Integer) props.get("toto_4");
assertEquals("Assert toto - 3", toto.intValue(), 1);
assertEquals("Assert toto_2 - 3", toto_2.intValue(), 1);
assertEquals("Assert toto_3 - 3", toto_3.intValue(), 1);
assertEquals("Assert toto_4 - 3", toto_4.intValue(), 0);
//Check tata
props = tota.getPropsTata();
Integer tata = (Integer) props.get("tata");
Integer tataStr = (Integer) props.get("tataStr");
Integer tataStrs = (Integer) props.get("tataStrs");
Integer tata_2 = (Integer) props.get("tata_2");
Integer tata_3 = (Integer) props.get("tata_3");
Integer tata1 = (Integer) props.get("tata1");
Integer tata1_1 = (Integer) props.get("tata1_1");
Integer tata5 = (Integer) props.get("tata5");
Integer tata5_1 = (Integer) props.get("tata5_1");
Integer tata5_2 = (Integer) props.get("tata5_2");
assertEquals("Assert tata - 3", tata.intValue(), 1);
assertEquals("Assert tataStr - 3", tataStr.intValue(), 1);
assertEquals("Assert tataStrs - 3", tataStrs.intValue(), 0);
assertEquals("Assert tata_2 - 3", tata_2.intValue(), 1);
assertEquals("Assert tata_3 - 3", tata_3.intValue(), 1);
assertEquals("Assert tata1 - 3", tata1.intValue(), 1);
assertEquals("Assert tata1_1 - 3", tata1_1.intValue(), 1);
assertEquals("Assert tata5 - 3", tata5.intValue(), 1);
assertEquals("Assert tata5_1 - 3", tata5_1.intValue(), 1);
assertEquals("Assert tata5_2 - 3", tata5_2.intValue(), 1);
getContext().ungetService(ref);
tota = null;
// Start a second import
totoProv2.start();
assertTrue("Assert under state - 4", under.getState() == ComponentInstance.VALID);
assertNotNull("Assert tota service - 4", getContext().getServiceReference(Tota.class.getName()));
ref = getContext().getServiceReference(Tota.class.getName());
tota = (Tota) getContext().getService(ref);
invokeAll(tota);
// Check toto
props = tota.getProps();
toto = (Integer) props.get("toto");
toto_2 = (Integer) props.get("toto_2");
toto_3 = (Integer) props.get("toto_3");
toto_4 = (Integer) props.get("toto_4");
assertEquals("Assert toto - 4", toto.intValue(), 2);
assertEquals("Assert toto_2 - 4", toto_2.intValue(), 2);
assertEquals("Assert toto_3 - 4", toto_3.intValue(), 2);
assertEquals("Assert toto_4 - 4", toto_4.intValue(), 0);
//Check tata
props = tota.getPropsTata();
tata = (Integer) props.get("tata");
tataStr = (Integer) props.get("tataStr");
tataStrs = (Integer) props.get("tataStrs");
tata_2 = (Integer) props.get("tata_2");
tata_3 = (Integer) props.get("tata_3");
tata1 = (Integer) props.get("tata1");
tata1_1 = (Integer) props.get("tata1_1");
tata5 = (Integer) props.get("tata5");
tata5_1 = (Integer) props.get("tata5_1");
tata5_2 = (Integer) props.get("tata5_2");
assertEquals("Assert tata - 4", tata.intValue(), 2);
assertEquals("Assert tataStr - 4", tataStr.intValue(), 2);
assertEquals("Assert tataStrs - 4", tataStrs.intValue(), 0);
assertEquals("Assert tata_2 - 4", tata_2.intValue(), 2);
assertEquals("Assert tata_3 - 4", tata_3.intValue(), 2);
assertEquals("Assert tata1 - 4", tata1.intValue(), 2);
assertEquals("Assert tata1_1 - 4", tata1_1.intValue(), 2);
assertEquals("Assert tata5 - 4", tata5.intValue(), 2);
assertEquals("Assert tata5_1 - 4", tata5_1.intValue(), 2);
assertEquals("Assert tata5_2 - 4", tata5_2.intValue(), 2);
getContext().ungetService(ref);
tota = null;
tataFactory.stop();
assertTrue("Assert under state - 5", under.getState() == ComponentInstance.INVALID);
assertNull("Assert no tota service - 5", getContext().getServiceReference(Tota.class.getName()));
totoProv2.stop();
tataFactory.start();
assertTrue("Assert under state - 6", under.getState() == ComponentInstance.VALID);
assertNotNull("Assert tota service - 6", getContext().getServiceReference(Tota.class.getName()));
ref = getContext().getServiceReference(Tota.class.getName());
tota = (Tota) getContext().getService(ref);
invokeAll(tota);
// Check toto
props = tota.getProps();
toto = (Integer) props.get("toto");
toto_2 = (Integer) props.get("toto_2");
toto_3 = (Integer) props.get("toto_3");
toto_4 = (Integer) props.get("toto_4");
assertEquals("Assert toto - 6", toto.intValue(), 3);
assertEquals("Assert toto_2 - 6", toto_2.intValue(), 3);
assertEquals("Assert toto_3 - 6", toto_3.intValue(), 3);
assertEquals("Assert toto_4 - 6", toto_4.intValue(), 0);
//Check tata
props = tota.getPropsTata();
tata = (Integer) props.get("tata");
tataStr = (Integer) props.get("tataStr");
tataStrs = (Integer) props.get("tataStrs");
tata_2 = (Integer) props.get("tata_2");
tata_3 = (Integer) props.get("tata_3");
tata1 = (Integer) props.get("tata1");
tata1_1 = (Integer) props.get("tata1_1");
tata5 = (Integer) props.get("tata5");
tata5_1 = (Integer) props.get("tata5_1");
tata5_2 = (Integer) props.get("tata5_2");
assertEquals("Assert tata - 6", tata.intValue(), 1);
assertEquals("Assert tataStr - 6", tataStr.intValue(), 1);
assertEquals("Assert tataStrs - 6", tataStrs.intValue(), 0);
assertEquals("Assert tata_2 - 6", tata_2.intValue(), 1);
assertEquals("Assert tata_3 - 6", tata_3.intValue(), 1);
assertEquals("Assert tata1 - 6", tata1.intValue(), 1);
assertEquals("Assert tata1_1 - 6", tata1_1.intValue(), 1);
assertEquals("Assert tata5 - 6", tata5.intValue(), 1);
assertEquals("Assert tata5_1 - 6", tata5_1.intValue(), 1);
assertEquals("Assert tata5_2 - 6", tata5_2.intValue(), 1);
getContext().ungetService(ref);
tota = null;
// Is arch exposed
assertNotNull("Test arch", Utils.getServiceReferenceByName(getContext(), Architecture.class.getName(), "ff"));
totoProv.stop();
assertTrue("Assert under state - 7", under.getState() == ComponentInstance.INVALID);
assertNotNull("Test arch-2", Utils.getServiceReferenceByName(getContext(), Architecture.class.getName(), "ff"));
assertNull("Assert no tota service - 7", getContext().getServiceReference(Tota.class.getName()));
under.dispose();
under = null;
}
private void invoke(Tota tota) {
tota.tata();
assertEquals("Assert invoke tataint", tota.tataInt(2), 2);
assertEquals("Assert invoke tataLong", tota.tataLong(2), 2);
assertEquals("Assert invoke tataDouble", tota.tataDouble(2), 2);
assertEquals("Assert invoke tataChar", tota.tataChar('a'), 'a');
assertTrue("Assert invoke tataBoolean", tota.tataBoolean(true));
assertEquals("Assert invoke tataByte", tota.tataByte((byte)2), 2);
assertEquals("Assert invoke tataShort", tota.tataShort((short)5), 5);
assertEquals("Assert invoke tataFloat", tota.tataFloat(5), 5);
}
private void invokeArrays(Tota tota) {
int[] a = new int[] {1,2,3};
assertEquals("Assert invoke tataint[]", tota.tataInts(a), a);
long[] b = new long[] {1,2,3};
assertEquals("Assert invoke tataLong[]", tota.tataLongs(b), b);
double[] c = new double[] {1,2,3};
assertEquals("Assert invoke tataDouble[]", tota.tataDoubles(c), c);
char[] d = new char[] {'a','b', 'c'};
assertEquals("Assert invoke tataChar[]", tota.tataChars(d), d);
boolean[] e = new boolean[] {true, false};
assertEquals("Assert invoke tataBoolean[]", tota.tataBooleans(e), e);
byte[] f = new byte[] {(byte) 1};
assertEquals("Assert invoke tataByte[]", tota.tataBytes(f), f);
short[] g = new short[] {(short) 1};
assertEquals("Assert invoke tataShort[]", tota.tataShorts(g), g);
float[] h = new float[] {5,6,7};
assertEquals("Assert invoke tataFloat[]", tota.tataFloats(h), h);
}
private void invokeStr(Tota tota) {
tota.tataStr();
}
private void invokeTata(Tota tota) {
tota.tata(1,2);
tota.tata("tototototo");
}
private void invokeTata1(Tota tota) {
assertEquals("Assert tata1", tota.tata1("foo"), "foo");
assertEquals("Assert tata1 - 2", tota.tata1(new char[] {'a','b','c'}), "abc");
}
private void invokeTata5(Tota tota) {
assertEquals("Assert tata5 -1", tota.tata5("foo",1), "foo"+1);
assertEquals("Assert tata5 - 2", tota.tata5(new String[] {"a","b","c"}, 1), "31");
assertEquals("Assert tata5 - 3", tota.tata5("foo", new int[] {1,2,3}), "foo3");
}
private void invokeAdd(Tota tota) {
assertEquals("Assert add", tota.add(1,1,1), 3);
}
private void invokeToto(Tota tota) {
tota.toto();
assertEquals("Assert toto", tota.toto("foo"), "foo");
tota.toto(1,2);
}
private void invokeAll(Tota tota) {
invoke(tota);
invokeArrays(tota);
invokeStr(tota);
invokeTata(tota);
invokeTata1(tota);
invokeTata5(tota);
invokeAdd(tota);
invokeToto(tota);
}
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util.xmlb;
import com.intellij.openapi.util.JDOMUtil;
import com.intellij.util.ArrayUtil;
import com.intellij.util.xmlb.annotations.MapAnnotation;
import org.jdom.Attribute;
import org.jdom.Content;
import org.jdom.Element;
import org.jdom.Text;
import org.jetbrains.annotations.NotNull;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Map;
import java.util.Set;
import static com.intellij.util.xmlb.Constants.*;
class MapBinding implements Binding {
private final Binding myKeyBinding;
private final Binding myValueBinding;
private final MapAnnotation myMapAnnotation;
private static final Comparator<Object> KEY_COMPARATOR = new Comparator<Object>() {
@Override
public int compare(final Object o1, final Object o2) {
if (o1 instanceof Comparable && o2 instanceof Comparable) {
Comparable c1 = (Comparable)o1;
Comparable c2 = (Comparable)o2;
return c1.compareTo(c2);
}
return 0;
}
};
public MapBinding(ParameterizedType type, Accessor accessor) {
Type[] arguments = type.getActualTypeArguments();
Type keyType = arguments[0];
Type valueType = arguments[1];
myKeyBinding = XmlSerializerImpl.getBinding(keyType);
myValueBinding = XmlSerializerImpl.getBinding(valueType);
myMapAnnotation = XmlSerializerImpl.findAnnotation(accessor.getAnnotations(), MapAnnotation.class);
}
@Override
public Object serialize(Object o, Object context, SerializationFilter filter) {
Map map = (Map)o;
Element m;
if (myMapAnnotation == null || myMapAnnotation.surroundWithTag()) {
m = new Element(Constants.MAP);
}
else {
m = (Element)context;
}
final Set keySet = map.keySet();
final Object[] keys = ArrayUtil.toObjectArray(keySet);
if (myMapAnnotation == null || myMapAnnotation.sortBeforeSave()) {
Arrays.sort(keys, KEY_COMPARATOR);
}
for (Object k : keys) {
Object v = map.get(k);
Element entry = new Element(getEntryAttributeName());
m.addContent(entry);
Object kNode = myKeyBinding.serialize(k, entry, filter);
if (kNode instanceof Text) {
Text text = (Text)kNode;
entry.setAttribute(getKeyAttributeValue(), text.getText());
}
else {
if (myMapAnnotation != null && !myMapAnnotation.surroundKeyWithTag()) {
entry.addContent((Content)kNode);
}
else {
Element key = new Element(getKeyAttributeValue());
entry.addContent(key);
key.addContent((Content)kNode);
}
}
Object vNode = myValueBinding.serialize(v, entry, filter);
if (vNode instanceof Text) {
Text text = (Text)vNode;
entry.setAttribute(getValueAttributeName(), text.getText());
}
else {
if (myMapAnnotation != null && !myMapAnnotation.surroundValueWithTag()) {
entry.addContent((Element)vNode);
}
else {
Element value = new Element(getValueAttributeName());
entry.addContent(value);
value.addContent((Content)vNode);
}
}
}
return m;
}
private String getEntryAttributeName() {
return myMapAnnotation == null ? ENTRY : myMapAnnotation.entryTagName();
}
private String getValueAttributeName() {
return myMapAnnotation == null ? VALUE : myMapAnnotation.valueAttributeName();
}
private String getKeyAttributeValue() {
return myMapAnnotation == null ? KEY : myMapAnnotation.keyAttributeName();
}
@Override
public Object deserialize(Object o, @NotNull Object... nodes) {
Map map = (Map)o;
map.clear();
final Object[] childNodes;
if (myMapAnnotation == null || myMapAnnotation.surroundWithTag()) {
assert nodes.length == 1;
Element m = (Element)nodes[0];
childNodes = JDOMUtil.getContent(m);
}
else {
childNodes = nodes;
}
for (Object childNode : childNodes) {
if (XmlSerializerImpl.isIgnoredNode(childNode)) continue;
Element entry = (Element)childNode;
Object k = null;
Object v = null;
assert entry.getName().equals(getEntryAttributeName());
Attribute keyAttr = entry.getAttribute(getKeyAttributeValue());
if (keyAttr != null) {
k = myKeyBinding.deserialize(o, keyAttr);
}
else {
if (myMapAnnotation != null && !myMapAnnotation.surroundKeyWithTag()) {
final Object[] children = JDOMUtil.getContent(entry);
for (Object child : children) {
if (myKeyBinding.isBoundTo(child)) {
k = myKeyBinding.deserialize(o, child);
break;
}
}
assert k != null : "no key found";
}
else {
final Object keyNode = entry.getChildren(getKeyAttributeValue()).get(0);
k = myKeyBinding.deserialize(o, JDOMUtil.getContent((Element)keyNode));
}
}
Attribute valueAttr = entry.getAttribute(getValueAttributeName());
if (valueAttr != null) {
v = myValueBinding.deserialize(o, valueAttr);
}
else {
if (myMapAnnotation != null && !myMapAnnotation.surroundValueWithTag()) {
final Object[] children = JDOMUtil.getContent(entry);
for (Object child : children) {
if (myValueBinding.isBoundTo(child)) {
v = myValueBinding.deserialize(o, child);
break;
}
}
assert v != null : "no value found";
}
else {
final Object valueNode = entry.getChildren(getValueAttributeName()).get(0);
v = myValueBinding.deserialize(o, XmlSerializerImpl.getNotIgnoredContent((Element)valueNode));
}
}
//noinspection unchecked
map.put(k, v);
}
return map;
}
@Override
public boolean isBoundTo(Object node) {
if (!(node instanceof Element)) return false;
if (myMapAnnotation != null && !myMapAnnotation.surroundWithTag()) {
return myMapAnnotation.entryTagName().equals(((Element)node).getName());
}
return ((Element)node).getName().equals(Constants.MAP);
}
@Override
public Class getBoundNodeType() {
return Element.class;
}
@Override
public void init() {
}
}
| |
package uk.co.kasl.topicclient;
import java.awt.Color;
import java.awt.Container;
import java.awt.Cursor;
import java.awt.GridBagLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.util.Vector;
import javax.swing.BorderFactory;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import uk.co.kasl.topicclient.MyComponents.MyBagConstraints;
import uk.co.kasl.topicclient.MyComponents.MyJFrame;
public class TradeDetail extends MyJFrame{
/**
*
*/
private static final long serialVersionUID = 1L;
public Vector<String> topicList = new Vector<String>();
public TradeDetail(Vector<String> topics){
frame = new JFrame();
frame.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
frame.setTitle("Topic Dashboard");
addComponentsToFrame(frame.getContentPane(), topics);
topics.clear();
frame.setResizable(false);
frame.setVisible(true);
frame.addWindowListener(new WindowAdapter(){
@Override
public void windowClosing(WindowEvent e) {
// TODO Auto-generated method stub
super.windowClosing(e);
listeners.closing();
}
});
frame.pack();
}
public TradeDetail(){
frame = new JFrame();
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
addComponentsToFrame(frame.getContentPane());
frame.setResizable(false);
frame.setVisible(true);
frame.pack();
}
private void addComponentsToFrame(Container pane, Vector<String> oldTopics) {
// TODO Auto-generated method stub
topicPanel = new JPanel();
GridBagLayout gb = new GridBagLayout();
pane.setLayout(gb);
topicPanel.setLayout(gb);
addTopics(oldTopics);
/*JLabel topic;
for(int i=0; i<oldTopics.size(); i++){
topic = new JLabel(oldTopics.get(i));
System.out.println(oldTopics.get(i));
topic.setBorder(BorderFactory.createEmptyBorder(10, 10, 10, 0));
topicPanel.add(topic,new MyBagConstraints(0, i, 1, 1));
topicPanel.revalidate();
topics.add(topic);
oldTopics.remove(i);
}*/
pane.add(topicPanel, new MyBagConstraints(0, 0, 1, 1));
myPane = pane;
addTopic(1);
addEventListeners();
}
private void addComponentsToFrame(Container pane) {
// TODO Auto-generated method stub
topicPanel = new JPanel();
GridBagLayout gb = new GridBagLayout();
pane.setLayout(gb);
topicPanel.setLayout(gb);
JLabel topic = new JLabel("Politics");
/*topic.setSize(new Dimension(50,100));
topic.setBorder(BorderFactory.createEmptyBorder(10, 10, 10, 0));*/
topicPanel.add(topic,new MyBagConstraints(0, 0, 1, 1));
topics.add(topic);
topic = new JLabel("Sports");
topic.setBorder(BorderFactory.createEmptyBorder(10, 10, 10, 0));
topicPanel.add(topic,new MyBagConstraints(0, 1, 1, 1));
topics.add(topic);
topic = new JLabel("Entertainment");
topic.setBorder(BorderFactory.createEmptyBorder(10, 10, 10, 0));
topicPanel.add(topic,new MyBagConstraints(0, 2, 1, 1));
topics.add(topic);
pane.add(topicPanel, new MyBagConstraints(0, 0, 1, 1));
myPane = pane;
addTopic(1);
addEventListeners();
}
public void addTopic(int loc){
addTopic = new JButton("Add a new Topic");
myPane.add(addTopic,new MyBagConstraints(0, loc, 1, 1));
addTopic.addActionListener(new ActionListener(){
@Override
public void actionPerformed(ActionEvent e) {
// TODO Auto-generated method stub
String newTopic = null;
newTopic = JOptionPane.showInputDialog(getParent(), "What are you interested in?","New Topic",JOptionPane.PLAIN_MESSAGE);
if(newTopic != null && newTopic.trim().length() > 0){
/*JLabel newLabel = new JLabel(newTopic);
topicPanel.add(newLabel, new MyBagConstraints(0, topics.size(), 1, 1));
topicPanel.revalidate();
topics.add(newLabel);
addEventListeners();
frame.pack();*/
listeners.addTopic(newTopic);
//addTopic(topics.size());
}
}
});
}
private void addEventListeners() {
// TODO Auto-generated method stub
for(int i =0; i< topics.size(); i++){
JLabel myLabel = topics.get(i);
myLabel.addMouseListener(new MouseListener(){
@Override
public void mouseClicked(MouseEvent e) {
// TODO Auto-generated method stub
listeners.selectTopic(myLabel.getText());
}
@Override
public void mousePressed(MouseEvent e) {
// TODO Auto-generated method stub
myLabel.setForeground(Color.RED);
}
@Override
public void mouseReleased(MouseEvent e) {
// TODO Auto-generated method stub
}
@Override
public void mouseEntered(MouseEvent e) {
// TODO Auto-generated method stub
myLabel.setForeground(Color.BLUE);
myLabel.setCursor(new Cursor(Cursor.HAND_CURSOR));
}
@Override
public void mouseExited(MouseEvent e) {
// TODO Auto-generated method stub
myLabel.setForeground(Color.BLACK);
}
});
}
}
@Override
public void disposeFrame() {
// TODO Auto-generated method stub
frame.dispatchEvent(new WindowEvent(frame,WindowEvent.WINDOW_CLOSING));
}
public interface Listener{
public void selectTopic(String topic);
public void addTopic(String topic);
public void closing();
}
public void addListener(Listener l){
listeners = l;
}
public void removeListener(Listener l){
listeners = null;
}
public static void main(String args[]){
TradeDetail tradeDetail = new TradeDetail();
tradeDetail.addListener(new Listener(){
@Override
public void selectTopic(String topic) {
// TODO Auto-generated method stub
JOptionPane.showMessageDialog(tradeDetail, "Stop Joking mate!! You can't access " + topic );
}
@Override
public void addTopic(String topic) {
// TODO Auto-generated method stub
JOptionPane.showMessageDialog(tradeDetail, "Do you even expect you can make a difference? " + topic);
}
@Override
public void closing() {
// TODO Auto-generated method stub
JOptionPane.showMessageDialog(tradeDetail, "Thanks for having fun! See you later!!");
}
});
}
JFrame frame;
//Vector<Listener> listeners = new Vector<Listener>();
Listener listeners;
Vector<JLabel> topics = new Vector<JLabel>();
JButton addTopic;
Container myPane;
JPanel topicPanel;
public void updateTopics(Vector<String> oldTopics) {
// TODO Auto-generated method stub
topicPanel.removeAll();
topicPanel.revalidate();
topics.clear();
addTopics(oldTopics);
}
private void addTopics(Vector<String> oldTopics) {
// TODO Auto-generated method stub
JLabel topic;
for(int i=0; i<oldTopics.size(); i++){
topic = new JLabel(oldTopics.get(i));
System.out.println(oldTopics.get(i));
topic.setBorder(BorderFactory.createEmptyBorder(10, 10, 10, 0));
topicPanel.add(topic,new MyBagConstraints(0, i, 1, 1));
topicPanel.revalidate();
frame.pack();
topics.add(topic);
addEventListeners();
}
}
@Override
public void setTitle(String title) {
// TODO Auto-generated method stub
frame.setTitle(title);
}
}
| |
/*
* Copyright (c) 2011, NORDUnet A/S
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* * Neither the name of the NORDUnet nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.nordu.crowd.sso;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.rmi.RemoteException;
import java.util.List;
import java.util.Set;
import javax.servlet.ServletException;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.atlassian.crowd.exception.DirectoryNotFoundException;
import com.atlassian.crowd.exception.InactiveAccountException;
import com.atlassian.crowd.exception.InvalidAuthenticationException;
import com.atlassian.crowd.exception.InvalidAuthorizationTokenException;
import com.atlassian.crowd.exception.ObjectNotFoundException;
import com.atlassian.crowd.exception.OperationFailedException;
import com.atlassian.crowd.exception.UserNotFoundException;
import com.atlassian.crowd.integration.Constants;
import com.atlassian.crowd.integration.http.HttpAuthenticator;
import com.atlassian.crowd.integration.springsecurity.CrowdSSOAuthenticationToken;
import com.atlassian.crowd.manager.application.ApplicationAccessDeniedException;
import com.atlassian.crowd.manager.application.ApplicationManager;
import com.atlassian.crowd.manager.application.ApplicationService;
import com.atlassian.crowd.manager.authentication.TokenAuthenticationManager;
import com.atlassian.crowd.model.application.Application;
import com.atlassian.crowd.model.application.RemoteAddress;
import com.atlassian.crowd.model.authentication.UserAuthenticationContext;
import com.atlassian.crowd.model.authentication.ValidationFactor;
import com.atlassian.crowd.model.user.User;
import com.atlassian.crowd.service.client.ClientProperties;
import com.atlassian.crowd.service.soap.client.SecurityServerClient;
import org.apache.log4j.Logger;
/**
* Servlet for setting the SSO cookie and redirecting to the wanted destination
* @author juha
*/
public class SSOCookieServlet extends HttpServlet {
private static final Logger log = Logger.getLogger(SSOCookieServlet.class);
private ApplicationService applicationService;
private ApplicationManager applicationManager;
private SecurityServerClient securityServerClient;
private TokenAuthenticationManager tokenAuthenticationManager;
private HttpAuthenticator httpAuthenticator;
private ClientProperties clientProperties;
public static final String REDIRECT_ATTRIBUTE = "ssocookie.redirect";
/**
* Whether to use the ClaimServlet (Nordunet-specific)
*/
private boolean useClaimServlet = false;
public SSOCookieServlet(ApplicationService applicationService, ApplicationManager applicationManager, SecurityServerClient securityServerClient, TokenAuthenticationManager tokenAuthenticationManager, HttpAuthenticator httpAuthenticator, ClientProperties clientProperties) {
this.applicationService = applicationService;
this.applicationManager = applicationManager;
this.securityServerClient = securityServerClient;
this.tokenAuthenticationManager = tokenAuthenticationManager;
this.httpAuthenticator = httpAuthenticator;
this.clientProperties = clientProperties;
}
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException {
String requestedApplicationName = clientProperties.getApplicationName();
String originalRequestUrl = req.getParameter("redirectTo");
UserAuthenticationContext authCtx = new UserAuthenticationContext();
String username = req.getHeader("REMOTE_USER");
if (username == null || (username != null && username.length() == 0)) {
log.error("No REMOTE_USER header");
errorPage(res, "Unknown user");
return;
}
List<Application> applications = null;
try {
final User user = applicationService.findUserByName(applicationManager.findByName(clientProperties.getApplicationName()), username);
applications = tokenAuthenticationManager.findAuthorisedApplications(user, "crowd");
} catch (ObjectNotFoundException e) {
log.error("Could not find user", e);
} catch (UserNotFoundException e) {
log.error("Could not find user", e);
} catch (DirectoryNotFoundException e) {
log.error("Could not find directory", e);
} catch (OperationFailedException e) {
log.error(e);
}
URL reqURL = null;
// Try to guess the application we want to set the cookie for
try {
reqURL = new URL(originalRequestUrl);
for (Application app : applications) {
Set<RemoteAddress> remoteAddresses = app.getRemoteAddresses();
for (RemoteAddress address : remoteAddresses) {
if (address.getAddress().equals(reqURL.getHost())) {
requestedApplicationName = app.getName();
break;
}
}
}
} catch (MalformedURLException e) {
}
authCtx.setName(username);
authCtx.setApplication(requestedApplicationName);
ValidationFactor[] validationFactors = httpAuthenticator.getValidationFactors(req);
authCtx.setValidationFactors(validationFactors);
CrowdSSOAuthenticationToken crowdAuthRequest = null;
try {
crowdAuthRequest = new CrowdSSOAuthenticationToken(tokenAuthenticationManager.authenticateUserWithoutValidatingPassword(authCtx).getRandomHash());
} catch (InvalidAuthenticationException e) {
log.error(e);
errorPage(res, e.getMessage());
return;
} catch (ApplicationAccessDeniedException e) {
log.error(e);
errorPage(res, "access denied to application " + requestedApplicationName);
return;
} catch (InactiveAccountException e) {
log.error("Account is inactive: " + e.getMessage());
errorPage(res, e.getMessage());
return;
} catch (ObjectNotFoundException e) {
log.error("Object not found: " + e.getMessage());
accessDeniedPage(res);
return;
} catch (OperationFailedException e) {
log.error(e);
errorPage(res, e.getMessage());
}
// fix for Confluence where the response filter is sometimes null.
if (res != null && crowdAuthRequest != null && crowdAuthRequest.getCredentials() != null) {
log.trace("Creating cookie");
// create the cookie sent to the client
Cookie tokenCookie = buildCookie(crowdAuthRequest.getCredentials().toString());
if (log.isTraceEnabled()) {
log.trace("Cookie: " + tokenCookie.getDomain() + " - " + tokenCookie.getName() + " " + tokenCookie.getValue());
}
res.addCookie(tokenCookie);
} else {
errorPage(res, null);
return;
}
String referer = req.getHeader("referer");
String gotoUrl = null;
if (originalRequestUrl != null && originalRequestUrl.length() > 0) {
gotoUrl = res.encodeRedirectURL(originalRequestUrl);
} else {
gotoUrl = res.encodeRedirectURL(referer);
}
if (useClaimServlet && req.getSession().getAttribute("new.user") != null) {
if (log.isDebugEnabled()) {
log.debug("New user; redirecting to account claim servlet");
}
req.getSession().setAttribute("new.user", null);
req.getSession().setAttribute(REDIRECT_ATTRIBUTE, gotoUrl);
String claimAccountUrl = res.encodeRedirectURL("/crowd/plugins/servlet/claimAccount");
res.sendRedirect(claimAccountUrl);
return;
}
if (log.isTraceEnabled()) {
log.trace("Redirecting to " + gotoUrl);
}
res.sendRedirect(gotoUrl);
return;
}
/**
* Creates the cookie and sets attributes such as path, domain, and "secure" flag.
* @param token The SSO token to be included in the cookie
*/
private Cookie buildCookie(String token) {
Cookie tokenCookie = new Cookie(getCookieTokenKey(), token);
// path
tokenCookie.setPath(Constants.COOKIE_PATH);
try {
// domain
tokenCookie.setDomain(securityServerClient.getCookieInfo().getDomain());
} catch (RemoteException e) {
log.error(e);
} catch (InvalidAuthorizationTokenException e) {
log.error(e);
} catch (InvalidAuthenticationException e) {
log.error(e);
}
// "Secure" flag
tokenCookie.setSecure(Boolean.FALSE);
return tokenCookie;
}
// TODO A real error page
private void errorPage(HttpServletResponse res, String error) throws IOException {
if (error != null) {
res.getWriter().write("ERROR: " + error);
} else {
res.getWriter().write("Undefined error");
}
}
private void accessDeniedPage(HttpServletResponse res) throws IOException {
res.sendError(res.SC_UNAUTHORIZED, "You do not have access to the application");
}
public String getCookieTokenKey() {
return clientProperties.getCookieTokenKey();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import java.nio.ByteBuffer;
import org.apache.ignite.internal.processors.cache.version.GridCacheVersion;
import org.apache.ignite.internal.processors.cache.version.GridCacheVersionEx;
import org.apache.ignite.internal.util.GridUnsafe;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteBiTuple;
import org.apache.ignite.lang.IgniteUuid;
import org.jetbrains.annotations.Nullable;
import sun.misc.Unsafe;
/**
* Swap entry.
*/
public class GridCacheSwapEntryImpl implements GridCacheSwapEntry {
/** */
private static final Unsafe UNSAFE = GridUnsafe.unsafe();
/** */
private static final long BYTE_ARR_OFF = UNSAFE.arrayBaseOffset(byte[].class);
/** */
static final int EXPIRE_TIME_OFFSET = 8;
/** */
static final int VERSION_OFFSET = 16;
/** */
static final int VERSION_SIZE = 24;
/** */
static final int VERSION_EX_SIZE = 48;
/** */
static final int GUID_SIZE = 24;
/** Value bytes. */
private ByteBuffer valBytes;
/** Value. */
private CacheObject val;
/** Type. */
private byte type;
/** Class loader ID. */
private IgniteUuid keyClsLdrId;
/** Class loader ID. */
private IgniteUuid valClsLdrId;
/** Version. */
private GridCacheVersion ver;
/** Time to live. */
private long ttl;
/** Expire time. */
private long expireTime;
/**
* @param valBytes Value.
* @param type Type.
* @param ver Version.
* @param ttl Entry time to live.
* @param expireTime Expire time.
* @param keyClsLdrId Class loader ID for entry key (can be {@code null} for local class loader).
* @param valClsLdrId Class loader ID for entry value (can be {@code null} for local class loader).
*/
public GridCacheSwapEntryImpl(
ByteBuffer valBytes,
byte type,
GridCacheVersion ver,
long ttl,
long expireTime,
@Nullable IgniteUuid keyClsLdrId,
@Nullable IgniteUuid valClsLdrId) {
assert ver != null;
this.valBytes = valBytes;
this.type = type;
this.ver = ver;
this.ttl = ttl;
this.expireTime = expireTime;
this.valClsLdrId = valClsLdrId;
this.keyClsLdrId = keyClsLdrId;
}
/**
* @param bytes Entry bytes.
* @return TTL.
*/
public static long timeToLive(byte[] bytes) {
return UNSAFE.getLong(bytes, BYTE_ARR_OFF);
}
/**
* @param bytes Entry bytes.
* @return Expire time.
*/
public static long expireTime(byte[] bytes) {
return UNSAFE.getLong(bytes, BYTE_ARR_OFF + EXPIRE_TIME_OFFSET);
}
/**
* @param bytes Entry bytes.
* @return Version.
*/
public static GridCacheVersion version(byte[] bytes) {
int off = VERSION_OFFSET; // Skip ttl, expire time.
boolean verEx = bytes[off++] != 0;
return U.readVersion(bytes, off, verEx);
}
/**
* @param bytes Entry bytes.
* @return Value if value is byte array, otherwise {@code null}.
*/
@Nullable public static IgniteBiTuple<byte[], Byte> getValue(byte[] bytes) {
long off = BYTE_ARR_OFF + VERSION_OFFSET; // Skip ttl, expire time.
boolean verEx = UNSAFE.getByte(bytes, off++) != 0;
off += verEx ? VERSION_EX_SIZE : VERSION_SIZE;
int arrLen = UNSAFE.getInt(bytes, off);
off += 4;
byte type = UNSAFE.getByte(bytes, off++);
byte[] valBytes = new byte[arrLen];
UNSAFE.copyMemory(bytes, off, valBytes, BYTE_ARR_OFF, arrLen);
return new IgniteBiTuple<>(valBytes, type);
}
/**
* @param bytes Entry bytes.
* @return Value bytes offset.
*/
public static int valueOffset(byte[] bytes) {
assert bytes.length > 40 : bytes.length;
int off = VERSION_OFFSET; // Skip ttl, expire time.
boolean verEx = bytes[off++] != 0;
off += verEx ? VERSION_EX_SIZE : VERSION_SIZE;
off += 5; // Byte array flag + array size.
assert bytes.length >= off;
return off;
}
/** {@inheritDoc} */
@Override public byte[] valueBytes() {
if (valBytes != null) {
assert valBytes.capacity() == valBytes.limit();
return valBytes.array();
}
return null;
}
/** {@inheritDoc} */
@Override public void valueBytes(@Nullable byte[] valBytes) {
this.valBytes = valBytes != null ? ByteBuffer.wrap(valBytes) : null;
}
/** {@inheritDoc} */
@Override public CacheObject value() {
return val;
}
/** {@inheritDoc} */
@Override public void value(CacheObject val) {
this.val = val;
}
/** {@inheritDoc} */
@Override public byte type() {
return type;
}
/** {@inheritDoc} */
@Override public GridCacheVersion version() {
return ver;
}
/** {@inheritDoc} */
@Override public long ttl() {
return ttl;
}
/** {@inheritDoc} */
@Override public long expireTime() {
return expireTime;
}
/** {@inheritDoc} */
@Nullable @Override public IgniteUuid keyClassLoaderId() {
return keyClsLdrId;
}
/** {@inheritDoc} */
@Nullable @Override public IgniteUuid valueClassLoaderId() {
return valClsLdrId;
}
/** {@inheritDoc} */
@Override public long offheapPointer() {
return 0;
}
/**
* @return Entry bytes.
*/
public byte[] marshal() {
// Ttl + expire time + Ex Version flag + Version.
int size = 16 + 1 + ((ver instanceof GridCacheVersionEx) ? VERSION_EX_SIZE : VERSION_SIZE);
size += 1; // Plain byte array flag.
int len = valBytes.limit();
size += len + 4; // Value bytes.
size += (valClsLdrId == null ? 1 : (1 + GUID_SIZE));
size += (keyClsLdrId == null ? 1 : (1 + GUID_SIZE));
byte[] arr = new byte[size];
long off = BYTE_ARR_OFF;
UNSAFE.putLong(arr, off, ttl);
off += 8;
UNSAFE.putLong(arr, off, expireTime);
off += 8;
off = U.writeVersion(arr, off, ver);
UNSAFE.putInt(arr, off, len);
off += 4;
UNSAFE.putByte(arr, off++, type);
UNSAFE.copyMemory(valBytes.array(), BYTE_ARR_OFF, arr, off, len);
off += len;
off = U.writeGridUuid(arr, off, valClsLdrId);
U.writeGridUuid(arr, off, keyClsLdrId);
return arr;
}
/**
* @param arr Entry bytes.
* @return Entry.
*/
public static GridCacheSwapEntryImpl unmarshal(byte[] arr) {
long off = BYTE_ARR_OFF;
long ttl = UNSAFE.getLong(arr, off);
off += 8;
long expireTime = UNSAFE.getLong(arr, off);
off += 8;
boolean verEx = UNSAFE.getBoolean(arr, off++);
GridCacheVersion ver = U.readVersion(arr, off, verEx);
off += verEx ? VERSION_EX_SIZE : VERSION_SIZE;
int arrLen = UNSAFE.getInt(arr, off);
off += 4;
byte type = UNSAFE.getByte(arr, off++);
byte[] valBytes = new byte[arrLen];
UNSAFE.copyMemory(arr, off, valBytes, BYTE_ARR_OFF, arrLen);
off += arrLen;
IgniteUuid valClsLdrId = U.readGridUuid(arr, off);
off += valClsLdrId == null ? 1 : (1 + GUID_SIZE);
IgniteUuid keyClsLdrId = U.readGridUuid(arr, off);
return new GridCacheSwapEntryImpl(ByteBuffer.wrap(valBytes),
type,
ver,
ttl,
expireTime,
keyClsLdrId,
valClsLdrId);
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(GridCacheSwapEntryImpl.class, this);
}
}
| |
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is part of dcm4che, an implementation of DICOM(TM) in
* Java(TM), available at http://sourceforge.net/projects/dcm4che.
*
* The Initial Developer of the Original Code is
* Agfa HealthCare.
* Portions created by the Initial Developer are Copyright (C) 2006-2008
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* See listed authors below.
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
package org.dcm4chex.archive.ejb.session;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import javax.ejb.CreateException;
import javax.ejb.EJBException;
import javax.ejb.FinderException;
import javax.ejb.ObjectNotFoundException;
import javax.ejb.RemoveException;
import javax.ejb.SessionBean;
import javax.ejb.SessionContext;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import org.apache.log4j.Logger;
import org.dcm4che.data.Dataset;
import org.dcm4che.data.DcmElement;
import org.dcm4che.data.DcmObjectFactory;
import org.dcm4che.dict.Status;
import org.dcm4che.dict.Tags;
import org.dcm4che.net.DcmServiceException;
import org.dcm4chex.archive.common.PatientMatching;
import org.dcm4chex.archive.ejb.interfaces.InstanceLocal;
import org.dcm4chex.archive.ejb.interfaces.InstanceLocalHome;
import org.dcm4chex.archive.ejb.interfaces.MPPSLocalHome;
import org.dcm4chex.archive.ejb.interfaces.MWLItemLocalHome;
import org.dcm4chex.archive.ejb.interfaces.PatientLocal;
import org.dcm4chex.archive.ejb.interfaces.PatientLocalHome;
import org.dcm4chex.archive.ejb.interfaces.SeriesLocal;
import org.dcm4chex.archive.ejb.interfaces.SeriesLocalHome;
import org.dcm4chex.archive.ejb.interfaces.StudyLocal;
import org.dcm4chex.archive.ejb.interfaces.StudyLocalHome;
import org.dcm4chex.archive.exceptions.NonUniquePatientException;
/**
* @author Gunter Zeilinger <gunterze@gmail.com>
* @version $Revision$ $Date$
* @since Aug 6, 2009
*
* @ejb.bean name="AttributesModification" type="Stateless" view-type="remote"
* jndi-name="ejb/AttributesModification"
* @ejb.ejb-ref ejb-name="Patient" view-type="local" ref-name="ejb/Patient"
* @ejb.ejb-ref ejb-name="Study" view-type="local" ref-name="ejb/Study"
* @ejb.ejb-ref ejb-name="Series" view-type="local" ref-name="ejb/Series"
* @ejb.ejb-ref ejb-name="Instance" view-type="local" ref-name="ejb/Instance"
* @ejb.ejb-ref ejb-name="MPPS" view-type="local" ref-name="ejb/MPPS"
* @ejb.ejb-ref ejb-name="MWLItem" view-type="local" ref-name="ejb/MWLItem"
*
* @ejb.transaction-type type="Container"
* @ejb.transaction type="Required"
*/
public abstract class AttributesModificationBean implements SessionBean {
private static Logger log = Logger.getLogger(StorageBean.class);
private PatientLocalHome patHome;
private StudyLocalHome studyHome;
private SeriesLocalHome seriesHome;
private InstanceLocalHome instHome;
private MPPSLocalHome mppsHome;
private MWLItemLocalHome mwlHome;
public void setSessionContext(SessionContext ctx) {
Context jndiCtx = null;
try {
jndiCtx = new InitialContext();
patHome = (PatientLocalHome) jndiCtx
.lookup("java:comp/env/ejb/Patient");
studyHome = (StudyLocalHome) jndiCtx
.lookup("java:comp/env/ejb/Study");
seriesHome = (SeriesLocalHome) jndiCtx
.lookup("java:comp/env/ejb/Series");
instHome = (InstanceLocalHome) jndiCtx
.lookup("java:comp/env/ejb/Instance");
mppsHome = (MPPSLocalHome) jndiCtx
.lookup("java:comp/env/ejb/MPPS");
mwlHome = (MWLItemLocalHome) jndiCtx
.lookup("java:comp/env/ejb/MWLItem");
} catch (NamingException e) {
throw new EJBException(e);
} finally {
if (jndiCtx != null) {
try {
jndiCtx.close();
} catch (NamingException ignore) {
}
}
}
}
public void unsetSessionContext() {
patHome = null;
studyHome = null;
seriesHome = null;
instHome = null;
}
/**
* @ejb.interface-method
*/
public boolean modifyAttributes(Dataset attrs, Date time, String system,
String reason, boolean updateOriginalAttributesSeq,
int entityNotFoundErrorCode) throws DcmServiceException {
try {
String level = attrs.getString(Tags.QueryRetrieveLevel);
if (level == null)
throw new IllegalArgumentException(
"Missing Query/Retrieve Level");
if ("IMAGE".equals(level))
return updateInstanceAttrs(attrs, time, system, reason,
updateOriginalAttributesSeq);
if ("SERIES".equals(level))
return updateSeriesAttrs(attrs, time, system, reason,
updateOriginalAttributesSeq);
if ("STUDY".equals(level))
return updateStudyAttrs(attrs, time, system, reason,
updateOriginalAttributesSeq);
throw new IllegalArgumentException(
"Illegal Query/Retrieve Level: " + level);
} catch (IllegalArgumentException e) {
throw new DcmServiceException(
Status.DataSetDoesNotMatchSOPClassError, e.getMessage());
} catch (ObjectNotFoundException e) {
if (entityNotFoundErrorCode != 0) {
throw new DcmServiceException(entityNotFoundErrorCode,
"No entity with specified uid found");
}
log.info("No entity with specified uid found - ignore update:");
log.debug(attrs);
return false;
} catch (FinderException e) {
throw new EJBException(e);
}
}
private boolean updateStudyAttrs(Dataset attrs, Date time, String system,
String reason, boolean updateOriginalAttributesSeq)
throws FinderException {
String styiuid = attrs.getString(Tags.StudyInstanceUID);
if (styiuid == null)
throw new IllegalArgumentException("Missing Study Instance UID");
StudyLocal study = studyHome.findByStudyIuid(styiuid);
Dataset origAttrs = updateOriginalAttributesSeq
? DcmObjectFactory.getInstance().newDataset()
: null;
if (!study.updateAttributes(attrs, origAttrs))
return false;
if (updateOriginalAttributesSeq)
updateOriginalAttributesSeq(study, time, system, reason, origAttrs);
return true;
}
private boolean updateSeriesAttrs(Dataset attrs, Date time, String system,
String reason, boolean updateOriginalAttributesSeq)
throws FinderException {
String seriuid = attrs.getString(Tags.SeriesInstanceUID);
if (seriuid == null)
throw new IllegalArgumentException("Missing Series Instance UID");
SeriesLocal series = seriesHome.findBySeriesIuid(seriuid);
Dataset origAttrs = updateOriginalAttributesSeq
? DcmObjectFactory.getInstance().newDataset()
: null;
if (!series.updateAttributes(attrs, true, origAttrs))
return false;
if (updateOriginalAttributesSeq)
updateOriginalAttributesSeq(series, time, system, reason, origAttrs);
return true;
}
private boolean updateInstanceAttrs(Dataset attrs, Date time,
String system, String reason, boolean updateOriginalAttributesSeq)
throws FinderException {
String sopiuid = attrs.getString(Tags.SOPInstanceUID);
if (sopiuid == null)
throw new IllegalArgumentException("Missing SOP Instance UID");
InstanceLocal inst = instHome.findBySopIuid(sopiuid);
Dataset origAttrs = updateOriginalAttributesSeq
? DcmObjectFactory.getInstance().newDataset()
: null;
if (!inst.updateAttributes(attrs, origAttrs))
return false;
if (updateOriginalAttributesSeq)
updateOriginalAttributesSeq(inst, time, system, reason, origAttrs);
return true;
}
/**
* @throws CreateException
* @throws RemoveException
* @ejb.interface-method
*/
public boolean moveStudyToPatient(Dataset attrs, PatientMatching matching, boolean create) throws FinderException, RemoveException, CreateException {
String[] suids = attrs.getStrings(Tags.StudyInstanceUID);
if (suids == null || suids.length == 0) {
throw new IllegalArgumentException("Missing Study Instance UID for moveStudyToPatient");
}
StudyLocal study = studyHome.findByStudyIuid(suids[0]);
PatientLocal pat = this.getPatient(attrs, matching, create);
if (pat != null && !pat.isIdentical(study.getPatient())) {
for (int i = 0 ; i < suids.length ; i++) {
pat.getMpps().addAll(mppsHome.findByStudyIuid(suids[i]));
pat.getMwlItems().addAll(mwlHome.findByStudyIuid(suids[i]));
pat.getStudies().add(i==0 ? study : studyHome.findByStudyIuid(suids[i]));
}
return true;
}
return false;
}
private void updateOriginalAttributesSeq(StudyLocal study,
Date time, String system, String reason, Dataset origAttrs) {
for (Iterator iter = study.getSeries().iterator(); iter.hasNext();)
updateOriginalAttributesSeq((SeriesLocal) iter.next(),
time, system, reason, origAttrs);
}
private void updateOriginalAttributesSeq(SeriesLocal series,
Date time, String system, String reason, Dataset origAttrs) {
for (Iterator iter = series.getInstances().iterator(); iter.hasNext();)
updateOriginalAttributesSeq((InstanceLocal) iter.next(),
time, system, reason, origAttrs);
}
private void updateOriginalAttributesSeq(InstanceLocal inst,
Date time, String system, String reason, Dataset origAttrs) {
Dataset attrs = inst.getAttributes(false);
DcmElement origAttrsSeq = attrs.get(Tags.OriginalAttributesSeq);
if (origAttrsSeq == null)
origAttrsSeq = attrs.putSQ(Tags.OriginalAttributesSeq);
Dataset origAttrsItem = origAttrsSeq.addNewItem();
origAttrsItem.putLO(Tags.SourceOfPreviousValues);
origAttrsItem.putDT(Tags.AttributeModificationDatetime, time);
origAttrsItem.putLO(Tags.ModifyingSystem, system);
origAttrsItem.putCS(Tags.ReasonForTheAttributeModification, reason);
origAttrsItem.putSQ(Tags.ModifiedAttributesSeq).addItem(origAttrs);
inst.setAttributes(attrs);
}
private PatientLocal getPatient(Dataset attrs, PatientMatching matching, boolean create)
throws FinderException, NonUniquePatientException, RemoveException, CreateException {
PatientLocal pat;
try {
return patHome.selectPatient(attrs, matching, true);
} catch (ObjectNotFoundException onfe) {
if (create) {
try {
pat = patHome.create(attrs);
// Check if patient record was also inserted by concurrent thread
try {
return patHome.selectPatient(attrs, matching, true);
} catch (NonUniquePatientException nupe) {
pat.remove();
pat = patHome.selectPatient(attrs, matching, true);
} catch (ObjectNotFoundException onfe2) {
// Just inserted Patient not found because of missing value
// of attribute configured as required for Patient Matching
return pat;
}
} catch (CreateException ce) {
// Check if patient record was inserted by concurrent thread
// with unique index on (pat_id, pat_id_issuer)
try {
pat = patHome.selectPatient(attrs, matching, true);
} catch (ObjectNotFoundException onfe2) {
throw ce;
}
}
} else {
throw onfe;
}
} catch (NonUniquePatientException nupe) {
if (create)
return patHome.create(attrs);
else
throw nupe;
}
return pat;
}
}
| |
package io.github.satr.yzwebshop.servlets;
import io.github.satr.yzwebshop.entities.Product;
import io.github.satr.yzwebshop.helpers.DispatchHelper;
import io.github.satr.yzwebshop.helpers.Env;
import io.github.satr.yzwebshop.helpers.ParameterHelper;
import io.github.satr.yzwebshop.repositories.ProductRepository;
import io.github.satr.yzwebshop.repositories.Repository;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import static io.github.satr.yzwebshop.helpers.Env.setRequestAttr;
import static io.github.satr.yzwebshop.helpers.StringHelper.isEmptyOrWhitespace;
@WebServlet(value = {"/products", "/product/detail/*", "/product/add/*", "/product/edit/*"})
public class ProductServlet extends HttpServlet {
private Repository<Product> productRepository;
@Override
public void init(ServletConfig config) throws ServletException {
super.init(config);
productRepository = new ProductRepository();
}
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
switch(request.getServletPath()) {
case ActionPath.ADD:
processAdd(request, response);
break;
case ActionPath.EDIT:
processEdit(request, response);
break;
default:
showList(request, response);
break;
}
}
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
switch(request.getServletPath()) {
case ActionPath.DETAIL:
showDetail(request, response);
break;
case ActionPath.ADD:
showAdd(request, response);
break;
case ActionPath.EDIT:
showEdit(request, response);
break;
default:
showList(request, response);
break;
}
}
private void processAdd(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
ArrayList<String> errorMessages = new ArrayList<>();
EditableProduct editableProduct = new EditableProduct();
populateFromRequest(request, editableProduct, errorMessages);
validateProduct(editableProduct, errorMessages);
if (errorMessages.size() > 0) {
dispatchEdit(request, response, editableProduct, Action.ADD, errorMessages);
return;
}
try {
Product product = new Product();
updateProductFromEditable(product, editableProduct);
productRepository.save(product);
} catch (SQLException e) {
DispatchHelper.dispatchError(request, response, e.getMessage());
return;
}
showList(request, response);
}
private void processEdit(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
ArrayList<String> errorMessages = new ArrayList<>();
EditableProduct editableProduct = new EditableProduct();
populateFromRequest(request, editableProduct, errorMessages);
editableProduct.setAmount(ParameterHelper.getInt(request, RequestParam.AMOUNT, errorMessages));
validateProduct(editableProduct, errorMessages);
Product product = null;
if (errorMessages.size() == 0)
product = getRequestedProduct(request, response, errorMessages);
if (errorMessages.size() > 0) {
dispatchEdit(request, response, editableProduct, Action.EDIT, errorMessages);
return;
}
try {
updateProductFromEditable(product, editableProduct);
productRepository.save(product);
} catch (SQLException e) {
DispatchHelper.dispatchError(request, response, e.getMessage());
return;
}
showList(request, response);
}
private void dispatchEdit(HttpServletRequest request, HttpServletResponse response, EditableProduct editableProduct, String action, ArrayList<String> errorMessages) throws ServletException, IOException {
setRequestAttr(request, Env.RequestAttr.ERRORS, errorMessages);
setRequestAttr(request, ContextAttr.PRODUCT, editableProduct);
setRequestAttr(request, ContextAttr.ACTION, action);
DispatchHelper.dispatchWebInf(request, response, ProductPage.EDIT);
}
private void updateProductFromEditable(Product product, EditableProduct editableProduct) {
product.setName(editableProduct.getName());
product.setPrice(editableProduct.getPrice());
product.setAmount(editableProduct.getAmount());
}
private void validateProduct(EditableProduct editableProduct, ArrayList<String> errorMessages) {
if(isEmptyOrWhitespace(editableProduct.getName()))
errorMessages.add("Missed Name.");
}
private void populateFromRequest(HttpServletRequest request, EditableProduct editableProduct, ArrayList<String> errorMessages) {
editableProduct.setId(ParameterHelper.getInt(request, RequestParam.ID, errorMessages));
editableProduct.setName(ParameterHelper.getString(request, RequestParam.NAME, errorMessages));
editableProduct.setPrice(ParameterHelper.getDouble(request, RequestParam.PRICE, errorMessages));
}
private void showAdd(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
setRequestAttr(request, ContextAttr.PRODUCT, new Product());
setRequestAttr(request, ContextAttr.ACTION, Action.ADD);
DispatchHelper.dispatchWebInf(request, response, ProductPage.EDIT);
}
private void showEdit(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
ArrayList<String> errorMessages = new ArrayList<>();
Product product = getRequestedProduct(request, response, errorMessages);
if(product == null || errorMessages.size() > 0) {
DispatchHelper.dispatchError(request, response, errorMessages);
return;
}
setRequestAttr(request, ContextAttr.PRODUCT, new EditableProduct().copyFrom(product));
setRequestAttr(request, ContextAttr.ACTION, Action.EDIT);
DispatchHelper.dispatchWebInf(request, response, ProductPage.EDIT);
}
private void showDetail(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
ArrayList<String> errorMessages = new ArrayList<>();
Product product = getRequestedProduct(request, response, errorMessages);
if (product == null || errorMessages.size() > 0) {
DispatchHelper.dispatchError(request, response, errorMessages);
return;
}
setRequestAttr(request, ContextAttr.PRODUCT, product);
DispatchHelper.dispatchWebInf(request, response, ProductPage.DETAIL);
}
private Product getRequestedProduct(HttpServletRequest request, HttpServletResponse response, ArrayList<String> errorMessages) throws ServletException, IOException {
int id = ParameterHelper.getInt(request, RequestParam.ID, errorMessages);
if(errorMessages.size() > 0)
return null;
try {
Product product = productRepository.get(id);
if(product == null) {
errorMessages.add("Product not found by SKU");
return null;
}
return product;
} catch (SQLException e) {
DispatchHelper.dispatchError(request, response, e.getMessage());
return null;
}
}
private void showList(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
try {
List<Product> products = productRepository.getList();
setRequestAttr(request, ContextAttr.PRODUCT_LIST, products);
} catch (SQLException e) {
DispatchHelper.dispatchError(request, response, e.getMessage());
return;
}
DispatchHelper.dispatchWebInf(request, response, ProductPage.LIST);
}
//-- Constants --
private class ProductPage {
public static final String EDIT = "product/ProductEdit.jsp";
public static final String DETAIL = "product/ProductDetail.jsp";
public static final String LIST = "product/ProductList.jsp";
}
private class ContextAttr {
public final static String PRODUCT = "product";
public final static String PRODUCT_LIST = "productList";
public static final String ACTION = "action";
}
private class RequestParam {
public static final String ID = "id";
public static final String NAME = "name";
public static final String PRICE = "price";
public static final String AMOUNT = "amount";
}
private class ActionPath {
public static final String ADD = "/product/add";
public static final String EDIT = "/product/edit";
public static final String DETAIL = "/product/detail";
}
private class Action {
public static final String ADD = "add";
public static final String EDIT = "edit";
}
public class EditableProduct extends Product {
public EditableProduct copyFrom(Product product) {
setId(product.getId());
setName(product.getName());
setPrice(product.getPrice());
setAmount(product.getAmount());
return this;
}
}
}
| |
/*
* Copyright (C) 2008 The Android Open Source Project
* Copyright (C) 2013 Fairphone Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fairphone.launcher;
import java.util.ArrayList;
import java.util.List;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
/**
* Stores the list of all applications for the all apps view.
*/
class AllAppsList {
public static final int DEFAULT_APPLICATIONS_NUMBER = 42;
/** The list off all apps. */
public ArrayList<ApplicationInfo> data =
new ArrayList<ApplicationInfo>(DEFAULT_APPLICATIONS_NUMBER);
/** The list of apps that have been added since the last notify() call. */
public ArrayList<ApplicationInfo> added =
new ArrayList<ApplicationInfo>(DEFAULT_APPLICATIONS_NUMBER);
/** The list of apps that have been removed since the last notify() call. */
public ArrayList<ApplicationInfo> removed = new ArrayList<ApplicationInfo>();
/** The list of apps that have been modified since the last notify() call. */
public ArrayList<ApplicationInfo> modified = new ArrayList<ApplicationInfo>();
private IconCache mIconCache;
/**
* Boring constructor.
*/
public AllAppsList(IconCache iconCache) {
mIconCache = iconCache;
}
/**
* Add the supplied ApplicationInfo objects to the list, and enqueue it into the
* list to broadcast when notify() is called.
*
* If the app is already in the list, doesn't add it.
*/
public void add(ApplicationInfo info) {
if (findActivity(data, info.componentName)) {
return;
}
data.add(info);
added.add(info);
}
public void clear() {
data.clear();
// TODO: do we clear these too?
added.clear();
removed.clear();
modified.clear();
}
public int size() {
return data.size();
}
public ApplicationInfo get(int index) {
return data.get(index);
}
/**
* Add the icons for the supplied apk called packageName.
*/
public void addPackage(Context context, String packageName) {
final List<ResolveInfo> matches = findActivitiesForPackage(context, packageName);
if (matches.size() > 0) {
for (ResolveInfo info : matches) {
add(new ApplicationInfo(context.getPackageManager(), info, mIconCache, null));
}
}
}
/**
* Remove the apps for the given apk identified by packageName.
*/
public void removePackage(String packageName) {
final List<ApplicationInfo> data = this.data;
for (int i = data.size() - 1; i >= 0; i--) {
ApplicationInfo info = data.get(i);
final ComponentName component = info.intent.getComponent();
if (packageName.equals(component.getPackageName())) {
removed.add(info);
data.remove(i);
}
}
// This is more aggressive than it needs to be.
mIconCache.flush();
}
/**
* Add and remove icons for this package which has been updated.
*/
public void updatePackage(Context context, String packageName) {
final List<ResolveInfo> matches = findActivitiesForPackage(context, packageName);
if (matches.size() > 0) {
// Find disabled/removed activities and remove them from data and add them
// to the removed list.
for (int i = data.size() - 1; i >= 0; i--) {
final ApplicationInfo applicationInfo = data.get(i);
final ComponentName component = applicationInfo.intent.getComponent();
if (packageName.equals(component.getPackageName())) {
if (!findActivity(matches, component)) {
removed.add(applicationInfo);
mIconCache.remove(component);
data.remove(i);
}
}
}
// Find enabled activities and add them to the adapter
// Also updates existing activities with new labels/icons
int count = matches.size();
for (int i = 0; i < count; i++) {
final ResolveInfo info = matches.get(i);
ApplicationInfo applicationInfo = findApplicationInfoLocked(
info.activityInfo.applicationInfo.packageName,
info.activityInfo.name);
if (applicationInfo == null) {
add(new ApplicationInfo(context.getPackageManager(), info, mIconCache, null));
} else {
mIconCache.remove(applicationInfo.componentName);
mIconCache.getTitleAndIcon(applicationInfo, info, null);
modified.add(applicationInfo);
}
}
} else {
// Remove all data for this package.
for (int i = data.size() - 1; i >= 0; i--) {
final ApplicationInfo applicationInfo = data.get(i);
final ComponentName component = applicationInfo.intent.getComponent();
if (packageName.equals(component.getPackageName())) {
removed.add(applicationInfo);
mIconCache.remove(component);
data.remove(i);
}
}
}
}
/**
* Query the package manager for MAIN/LAUNCHER activities in the supplied package.
*/
private static List<ResolveInfo> findActivitiesForPackage(Context context, String packageName) {
final PackageManager packageManager = context.getPackageManager();
final Intent mainIntent = new Intent(Intent.ACTION_MAIN, null);
mainIntent.addCategory(Intent.CATEGORY_LAUNCHER);
mainIntent.setPackage(packageName);
final List<ResolveInfo> apps = packageManager.queryIntentActivities(mainIntent, 0);
return apps != null ? apps : new ArrayList<ResolveInfo>();
}
/**
* Returns whether <em>apps</em> contains <em>component</em>.
*/
private static boolean findActivity(List<ResolveInfo> apps, ComponentName component) {
final String className = component.getClassName();
for (ResolveInfo info : apps) {
final ActivityInfo activityInfo = info.activityInfo;
if (activityInfo.name.equals(className)) {
return true;
}
}
return false;
}
/**
* Returns whether <em>apps</em> contains <em>component</em>.
*/
private static boolean findActivity(ArrayList<ApplicationInfo> apps, ComponentName component) {
final int N = apps.size();
for (int i=0; i<N; i++) {
final ApplicationInfo info = apps.get(i);
if (info.componentName.equals(component)) {
return true;
}
}
return false;
}
/**
* Find an ApplicationInfo object for the given packageName and className.
*/
private ApplicationInfo findApplicationInfoLocked(String packageName, String className) {
for (ApplicationInfo info: data) {
final ComponentName component = info.intent.getComponent();
if (packageName.equals(component.getPackageName())
&& className.equals(component.getClassName())) {
return info;
}
}
return null;
}
}
| |
package org.javasimon;
import org.javasimon.callback.Callback;
import org.javasimon.callback.CompositeCallback;
import org.javasimon.callback.CompositeCallbackImpl;
import org.javasimon.clock.SimonClock;
import org.javasimon.utils.SimonUtils;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* Implements fully functional {@link Manager} in the enabled state. Does not support
* {@link #enable()}/{@link #disable()} - for this use {@link SwitchingManager}.
*
* @author <a href="mailto:virgo47@gmail.com">Richard "Virgo" Richter</a>
*/
public final class EnabledManager implements Manager {
private UnknownSimon rootSimon;
private final Map<String, AbstractSimon> allSimons = new ConcurrentHashMap<>();
private final CompositeCallback callback = new CompositeCallbackImpl();
private final ManagerConfiguration configuration;
private final SimonClock clock;
/** Creates new enabled manager. */
public EnabledManager() {
this(SimonClock.SYSTEM);
}
public EnabledManager(SimonClock clock) {
this.clock = clock;
rootSimon = new UnknownSimon(ROOT_SIMON_NAME, this);
allSimons.put(ROOT_SIMON_NAME, rootSimon);
configuration = new ManagerConfiguration(this);
callback.initialize(this);
}
@Override
public Simon getSimon(String name) {
return allSimons.get(name);
}
@Override
public synchronized void destroySimon(String name) {
if (name.equals(ROOT_SIMON_NAME)) {
throw new SimonException("Root Simon cannot be destroyed!");
}
AbstractSimon simon = allSimons.remove(name);
if (simon.getChildren().size() > 0) {
replaceUnknownSimon(simon, UnknownSimon.class);
} else {
((AbstractSimon) simon.getParent()).replaceChild(simon, null);
}
callback.onSimonDestroyed(simon);
}
@Override
public synchronized void clear() {
allSimons.clear();
rootSimon = new UnknownSimon(ROOT_SIMON_NAME, this);
allSimons.put(ROOT_SIMON_NAME, rootSimon);
callback.onManagerClear();
}
@Override
public Counter getCounter(String name) {
return (Counter) getOrCreateSimon(name, CounterImpl.class);
}
@Override
public Meter getMeter(String name) {
return (Meter) getOrCreateSimon(name, MeterImpl.class);
}
@Override
public Stopwatch getStopwatch(String name) {
return (Stopwatch) getOrCreateSimon(name, StopwatchImpl.class);
}
@Override
public Simon getRootSimon() {
return rootSimon;
}
@Override
public Collection<String> getSimonNames() {
return Collections.unmodifiableCollection(allSimons.keySet());
}
@SuppressWarnings({"unchecked"})
@Override
public Collection<Simon> getSimons(SimonFilter simonFilter) {
if (simonFilter == null) {
return Collections.unmodifiableCollection((Collection) allSimons.values());
}
Collection<Simon> simons = new ArrayList<>();
for (AbstractSimon simon : allSimons.values()) {
if (simonFilter.accept(simon)) {
simons.add(simon);
}
}
return simons;
}
/**
* Even with ConcurrentHashMap we want to synchronize here, so newly created Simons can be fully
* set up with {@link Callback#onSimonCreated(Simon)}. ConcurrentHashMap still works fine for
* listing Simons, etc.
*/
private synchronized Simon getOrCreateSimon(String name, Class<? extends AbstractSimon> simonClass) {
if (name == null) {
// create an "anonymous" Simon - Manager does not care about it anymore
return instantiateSimon(null, simonClass);
}
if (name.equals(ROOT_SIMON_NAME)) {
throw new SimonException("Root Simon cannot be replaced or recreated!");
}
AbstractSimon simon = allSimons.get(name);
if (simon != null && simonClass.isInstance(simon)) {
return simon;
} else if (simon == null) {
SimonUtils.validateSimonName(name);
simon = newSimon(name, simonClass);
} else if (simon instanceof UnknownSimon) {
simon = replaceUnknownSimon(simon, simonClass);
} else {
throw new SimonException("Simon named '" + name + "' already exists and its type is '" + simon.getClass().getName() + "' while requested type is '" + simonClass.getName() + "'.");
}
callback.onSimonCreated(simon);
return simon;
}
// called from synchronized method
private AbstractSimon replaceUnknownSimon(AbstractSimon simon, Class<? extends AbstractSimon> simonClass) {
AbstractSimon newSimon = instantiateSimon(simon.getName(), simonClass);
newSimon.enabled = simon.enabled;
// fixes parent link and parent's children list
((AbstractSimon) simon.getParent()).replaceChild(simon, newSimon);
// fixes children list and all children's parent link
for (Simon child : simon.getChildren()) {
newSimon.addChild((AbstractSimon) child);
((AbstractSimon) child).setParent(newSimon);
}
allSimons.put(simon.getName(), newSimon);
return newSimon;
}
// called from synchronized method
private AbstractSimon newSimon(String name, Class<? extends AbstractSimon> simonClass) {
AbstractSimon simon = instantiateSimon(name, simonClass);
if (name != null) {
addToHierarchy(simon, name);
SimonConfiguration config = configuration.getConfig(name);
if (config.getState() != null) {
simon.setState(config.getState(), false);
}
allSimons.put(name, simon);
}
return simon;
}
private AbstractSimon instantiateSimon(String name, Class<? extends AbstractSimon> simonClass) {
AbstractSimon simon;
try {
Constructor<? extends AbstractSimon> constructor = simonClass.getDeclaredConstructor(String.class, Manager.class);
simon = constructor.newInstance(name, this);
} catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException | InstantiationException e) {
throw new SimonException(e);
}
return simon;
}
private void addToHierarchy(AbstractSimon simon, String name) {
int ix = name.lastIndexOf(HIERARCHY_DELIMITER);
AbstractSimon parent = rootSimon;
if (ix != -1) {
String parentName = name.substring(0, ix);
parent = allSimons.get(parentName);
if (parent == null) {
parent = new UnknownSimon(parentName, this);
addToHierarchy(parent, parentName);
allSimons.put(parentName, parent);
}
}
parent.addChild(simon);
}
@Override
public CompositeCallback callback() {
return callback;
}
@Override
public ManagerConfiguration configuration() {
return configuration;
}
/** Throws {@link UnsupportedOperationException}. */
@Override
public void enable() {
throw new UnsupportedOperationException("Only SwitchingManager supports this operation.");
}
/** Throws {@link UnsupportedOperationException}. */
@Override
public void disable() {
throw new UnsupportedOperationException("Only SwitchingManager supports this operation.");
}
/**
* Returns true.
*
* @return true
*/
@Override
public boolean isEnabled() {
return true;
}
@Override
public void message(String message) {
callback.onManagerMessage(message);
}
@Override
public void warning(String warning, Exception cause) {
callback.onManagerWarning(warning, cause);
}
@Override
public long nanoTime() {
return clock.nanoTime();
}
@Override
public long milliTime() {
return clock.milliTime();
}
@Override
public long millisForNano(long nanos) {
return clock.millisForNano(nanos);
}
synchronized void purgeIncrementalSimonsOlderThan(long thresholdMs) {
for (Simon simon : allSimons.values()) {
if (simon instanceof AbstractSimon) {
AbstractSimon abstractSimon = (AbstractSimon) simon;
abstractSimon.purgeIncrementalSimonsOlderThan(thresholdMs);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.udf.generic;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
import org.apache.hadoop.io.Text;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonParser.Feature;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.type.TypeFactory;
import org.codehaus.jackson.type.JavaType;
/**
* GenericUDTFJSONTuple: this
*
*/
@Description(name = "json_tuple",
value = "_FUNC_(jsonStr, p1, p2, ..., pn) - like get_json_object, but it takes multiple names and return a tuple. " +
"All the input parameters and output column types are string.")
public class GenericUDTFJSONTuple extends GenericUDTF {
private static final Logger LOG = LoggerFactory.getLogger(GenericUDTFJSONTuple.class.getName());
private static final JsonFactory JSON_FACTORY = new JsonFactory();
static {
// Allows for unescaped ASCII control characters in JSON values
JSON_FACTORY.enable(Feature.ALLOW_UNQUOTED_CONTROL_CHARS);
// Enabled to accept quoting of all character backslash qooting mechanism
JSON_FACTORY.enable(Feature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER);
}
private static final ObjectMapper MAPPER = new ObjectMapper(JSON_FACTORY);
private static final JavaType MAP_TYPE = TypeFactory.fromClass(Map.class);
int numCols; // number of output columns
String[] paths; // array of path expressions, each of which corresponds to a column
private transient Text[] retCols; // array of returned column values
//object pool of non-null Text, avoid creating objects all the time
private transient Text[] cols;
private transient Object[] nullCols; // array of null column values
private transient ObjectInspector[] inputOIs; // input ObjectInspectors
boolean pathParsed = false;
boolean seenErrors = false;
//An LRU cache using a linked hash map
static class HashCache<K, V> extends LinkedHashMap<K, V> {
private static final int CACHE_SIZE = 16;
private static final int INIT_SIZE = 32;
private static final float LOAD_FACTOR = 0.6f;
HashCache() {
super(INIT_SIZE, LOAD_FACTOR);
}
private static final long serialVersionUID = 1;
@Override
protected boolean removeEldestEntry(Map.Entry<K, V> eldest) {
return size() > CACHE_SIZE;
}
}
private transient Map<String, Object> jsonObjectCache;
@Override
public void close() throws HiveException {
}
@Override
public StructObjectInspector initialize(ObjectInspector[] args)
throws UDFArgumentException {
inputOIs = args;
numCols = args.length - 1;
jsonObjectCache = new HashCache<>();
if (numCols < 1) {
throw new UDFArgumentException("json_tuple() takes at least two arguments: " +
"the json string and a path expression");
}
for (int i = 0; i < args.length; ++i) {
if (args[i].getCategory() != ObjectInspector.Category.PRIMITIVE ||
!args[i].getTypeName().equals(serdeConstants.STRING_TYPE_NAME)) {
throw new UDFArgumentException("json_tuple()'s arguments have to be string type");
}
}
seenErrors = false;
pathParsed = false;
paths = new String[numCols];
cols = new Text[numCols];
retCols = new Text[numCols];
nullCols = new Object[numCols];
for (int i = 0; i < numCols; ++i) {
cols[i] = new Text();
retCols[i] = cols[i];
nullCols[i] = null;
}
// construct output object inspector
ArrayList<String> fieldNames = new ArrayList<String>(numCols);
ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>(numCols);
for (int i = 0; i < numCols; ++i) {
// column name can be anything since it will be named by UDTF as clause
fieldNames.add("c" + i);
// all returned type will be Text
fieldOIs.add(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
}
return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}
@SuppressWarnings("unchecked")
@Override
public void process(Object[] o) throws HiveException {
if (o[0] == null) {
forward(nullCols);
return;
}
// get the path expression for the 1st row only
if (!pathParsed) {
for (int i = 0;i < numCols; ++i) {
paths[i] = ((StringObjectInspector) inputOIs[i+1]).getPrimitiveJavaObject(o[i+1]);
}
pathParsed = true;
}
String jsonStr = ((StringObjectInspector) inputOIs[0]).getPrimitiveJavaObject(o[0]);
if (jsonStr == null) {
forward(nullCols);
return;
}
try {
Object jsonObj = jsonObjectCache.get(jsonStr);
if (jsonObj == null) {
try {
jsonObj = MAPPER.readValue(jsonStr, MAP_TYPE);
} catch (Exception e) {
reportInvalidJson(jsonStr);
forward(nullCols);
return;
}
jsonObjectCache.put(jsonStr, jsonObj);
}
if (!(jsonObj instanceof Map)) {
reportInvalidJson(jsonStr);
forward(nullCols);
return;
}
for (int i = 0; i < numCols; ++i) {
if (retCols[i] == null) {
retCols[i] = cols[i]; // use the object pool rather than creating a new object
}
Object extractObject = ((Map<String, Object>)jsonObj).get(paths[i]);
if (extractObject instanceof Map || extractObject instanceof List) {
retCols[i].set(MAPPER.writeValueAsString(extractObject));
} else if (extractObject != null) {
retCols[i].set(extractObject.toString());
} else {
retCols[i] = null;
}
}
forward(retCols);
return;
} catch (Throwable e) {
LOG.error("JSON parsing/evaluation exception" + e);
forward(nullCols);
}
}
@Override
public String toString() {
return "json_tuple";
}
private void reportInvalidJson(String jsonStr) {
if (!seenErrors) {
LOG.error("The input is not a valid JSON string: " + jsonStr +
". Skipping such error messages in the future.");
seenErrors = true;
}
}
}
| |
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.ml4j.nn.unsupervised;
import org.ml4j.Matrix;
import org.ml4j.MatrixFactory;
import org.ml4j.nn.neurons.NeuronsActivationContext;
import org.ml4j.nn.neurons.NeuronsActivationFeatureOrientation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Encapsulates the activation activities of a set of Neurons.
*
* @author Michael Lavelle
*/
public class NeuronsActivationWithPossibleBiasUnit {
private static final Logger LOGGER = LoggerFactory.getLogger(NeuronsActivationWithPossibleBiasUnit.class);
/**
* The matrix of activations.
*/
private Matrix activations;
/**
* Defines whether the features of the activations are represented by the
* columns or the rows of the activations Matrix.
*/
private NeuronsActivationFeatureOrientation featureOrientation;
/**
* Whether the activations include an activation from a bias unit.
*/
private boolean biasUnitIncluded;
/**
* Constructs a NeuronsActivation instance from a matrix of activations.
*
* @param activations A matrix of activations
* @param biasUnitIncluded Whether a bias unit is included in the activation
* features
* @param featureOrientation The orientation of the features of the activation
* matrix
* @param resetBiasValues Whether to reset the bias values of the activations
* matrix
*/
public NeuronsActivationWithPossibleBiasUnit(Matrix activations, boolean biasUnitIncluded,
NeuronsActivationFeatureOrientation featureOrientation, boolean resetBiasValues) {
LOGGER.debug("Creating new NeuronsActivationWithPossibleBiasUnit");
this.activations = activations;
this.biasUnitIncluded = biasUnitIncluded;
this.featureOrientation = featureOrientation;
if (biasUnitIncluded) {
if (resetBiasValues) {
resetBiasActivations(activations, featureOrientation);
}
validateBiasActivations(activations, featureOrientation);
}
}
/**
* Obtain the feature orientation of the Matrix representing the activations -
* whether the features are represented by the rows or the columns.
*
* @return the feature orientation of the Matrix representing the activations -
* whether the features are represented by the rows or the columns
*/
public NeuronsActivationFeatureOrientation getFeatureOrientation() {
return featureOrientation;
}
public Matrix getActivations() {
return activations;
}
/**
* Getting activations without bias.
*
* @return activations without bias.
*/
public Matrix getActivationsWithoutBias() {
if (biasUnitIncluded) {
if (featureOrientation == NeuronsActivationFeatureOrientation.COLUMNS_SPAN_FEATURE_SET) {
return removeFirstColumn(this.activations);
} else {
return removeFirstRow(this.activations);
}
} else {
return activations;
}
}
public Matrix getActivationsWithBias() {
return activations;
}
/**
* Indicates whether the features represented by this NeuronsActivation include
* a bias unit.
*
* @return Whether the features represented by this NeuronsActivation include a
* bias unit
*/
public boolean isBiasUnitIncluded() {
return biasUnitIncluded;
}
/**
* Obtain the number of features ( including any bias ) represented by this
* NeuronsActivation.
*
* @return the number of features ( including any bias ) represented by this
* NeuronsActivation.
*/
public int getFeatureCountIncludingBias() {
if (featureOrientation == NeuronsActivationFeatureOrientation.COLUMNS_SPAN_FEATURE_SET) {
int featureCount = activations.getColumns();
return featureCount;
} else {
int featureCount = activations.getRows();
return featureCount;
}
}
/**
* Obtain the number of features ( excluding any bias ) represented by this
* NeuronsActivation.
*
* @return the number of features ( excluding any bias ) represented by this
* NeuronsActivation.
*/
public int getFeatureCountExcludingBias() {
int featureCountIncludingBias = getFeatureCountIncludingBias();
return biasUnitIncluded ? (featureCountIncludingBias - 1) : featureCountIncludingBias;
}
/**
* Returns this NeuronsActivation ensuring the presence of a bias unit is
* consistent with the requested withBiasUnit parameter.
*
* @param withBiasUnit Whether a bias unit should be included in the
* returned activation
* @param neuronsActivationContext The activation context
* @return this NeuronsActivation ensuring the presence of a bias unit is
* consistent with the requested withBiasUnit parameter.
*/
public NeuronsActivationWithPossibleBiasUnit withBiasUnit(boolean withBiasUnit,
NeuronsActivationContext neuronsActivationContext) {
MatrixFactory matrixFactory = neuronsActivationContext.getMatrixFactory();
if (isBiasUnitIncluded()) {
if (withBiasUnit) {
return this;
} else {
if (featureOrientation == NeuronsActivationFeatureOrientation.COLUMNS_SPAN_FEATURE_SET) {
LOGGER.debug("Removing bias unit from activations");
return new NeuronsActivationWithPossibleBiasUnit(removeFirstColumn(activations), false,
NeuronsActivationFeatureOrientation.COLUMNS_SPAN_FEATURE_SET, false);
} else if (featureOrientation == NeuronsActivationFeatureOrientation.ROWS_SPAN_FEATURE_SET) {
LOGGER.debug("Removing bias unit from activations");
return new NeuronsActivationWithPossibleBiasUnit(removeFirstRow(activations), false,
NeuronsActivationFeatureOrientation.ROWS_SPAN_FEATURE_SET, false);
} else {
throw new IllegalStateException("Unsupported feature orientation type:" + featureOrientation);
}
}
} else {
if (withBiasUnit) {
if (featureOrientation == NeuronsActivationFeatureOrientation.COLUMNS_SPAN_FEATURE_SET) {
Matrix bias = matrixFactory.createOnes(activations.getRows(), 1);
LOGGER.debug("Adding bias unit to activations");
Matrix activationsWithBias = bias.appendHorizontally(activations);
return new NeuronsActivationWithPossibleBiasUnit(activationsWithBias, true,
NeuronsActivationFeatureOrientation.COLUMNS_SPAN_FEATURE_SET, false);
} else if (featureOrientation == NeuronsActivationFeatureOrientation.ROWS_SPAN_FEATURE_SET) {
Matrix bias = matrixFactory.createOnes(1, activations.getColumns());
LOGGER.debug("Adding bias unit to activations");
Matrix activationsWithBias = bias.appendVertically(activations);
return new NeuronsActivationWithPossibleBiasUnit(activationsWithBias, true,
NeuronsActivationFeatureOrientation.ROWS_SPAN_FEATURE_SET, false);
} else {
throw new IllegalStateException("Unsupported feature orientation type:" + featureOrientation);
}
} else {
return this;
}
}
}
private void validateBiasActivations(Matrix activations, NeuronsActivationFeatureOrientation featureOrientation) {
if (!biasUnitIncluded) {
throw new IllegalStateException("Cannot validate bias activations as bias unit not included");
}
LOGGER.debug("Validating bias activations");
if (NeuronsActivationFeatureOrientation.COLUMNS_SPAN_FEATURE_SET == featureOrientation) {
Matrix firstColumn = activations.getColumn(0);
for (int r = 0; r < activations.getRows(); r++) {
if (firstColumn.get(r, 0) != 1d) {
throw new IllegalArgumentException("Values of bias unit is not 1");
}
}
} else if (NeuronsActivationFeatureOrientation.ROWS_SPAN_FEATURE_SET == featureOrientation) {
Matrix firstRow = activations.getRow(0);
for (int c = 0; c < activations.getColumns(); c++) {
if (firstRow.get(0, c) != 1d) {
throw new IllegalArgumentException("Values of bias unit is not 1");
}
}
} else {
throw new IllegalStateException("Unsupported feature orientation type:" + featureOrientation);
}
}
private Matrix removeFirstColumn(Matrix activations) {
int[] rows = new int[activations.getRows()];
for (int r = 0; r < rows.length; r++) {
rows[r] = r;
}
int[] cols = new int[activations.getColumns() - 1];
for (int c = 0; c < cols.length; c++) {
cols[c] = c + 1;
}
return activations.get(rows, cols);
}
private Matrix removeFirstRow(Matrix activations) {
int[] rows = new int[activations.getRows() - 1];
for (int r = 0; r < rows.length; r++) {
rows[r] = r + 1;
}
int[] cols = new int[activations.getColumns()];
for (int c = 0; c < cols.length; c++) {
cols[c] = c;
}
return activations.get(rows, cols);
}
private void resetBiasActivations(Matrix activations, NeuronsActivationFeatureOrientation featureOrientation) {
if (!biasUnitIncluded) {
throw new IllegalStateException("Cannot reset bias activations as bias unit not included");
}
LOGGER.debug("Resetting bias activations");
if (NeuronsActivationFeatureOrientation.COLUMNS_SPAN_FEATURE_SET == featureOrientation) {
for (int r = 0; r < activations.getRows(); r++) {
activations.asEditableMatrix().put(r, 0, 1);
}
}
if (NeuronsActivationFeatureOrientation.ROWS_SPAN_FEATURE_SET == featureOrientation) {
for (int c = 0; c < activations.getColumns(); c++) {
activations.asEditableMatrix().put(0, c, 1);
}
}
}
}
| |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.unsafe.batchinsert;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.neo4j.helpers.collection.IterableWrapper;
import org.neo4j.kernel.impl.store.AbstractRecordStore;
import org.neo4j.kernel.impl.store.record.AbstractBaseRecord;
import org.neo4j.kernel.impl.transaction.state.RecordAccess;
import org.neo4j.kernel.impl.util.statistics.IntCounter;
/**
* Provides direct access to records in a store. Changes are batched up and written whenever {@link #commit()}
* is called, or {@link #close()} for that matter.
*/
public class DirectRecordAccess<KEY extends Comparable<KEY>,RECORD extends AbstractBaseRecord,ADDITIONAL>
implements RecordAccess<KEY,RECORD,ADDITIONAL>
{
private final AbstractRecordStore<RECORD> store;
private final Loader<KEY, RECORD, ADDITIONAL> loader;
private final Map<KEY,DirectRecordProxy> batch = new HashMap<>();
private final IntCounter changeCounter = new IntCounter();
public DirectRecordAccess( AbstractRecordStore<RECORD> store, Loader<KEY, RECORD, ADDITIONAL> loader )
{
this.store = store;
this.loader = loader;
}
@Override
public RecordProxy<KEY, RECORD, ADDITIONAL> getOrLoad( KEY key, ADDITIONAL additionalData )
{
DirectRecordProxy loaded = batch.get( key );
if ( loaded != null )
{
return loaded;
}
return proxy( key, loader.load( key, additionalData ), additionalData, false );
}
private RecordProxy<KEY, RECORD, ADDITIONAL> putInBatch( KEY key, DirectRecordProxy proxy )
{
DirectRecordProxy previous = batch.put( key, proxy );
assert previous == null;
return proxy;
}
@Override
public RecordProxy<KEY, RECORD, ADDITIONAL> create( KEY key, ADDITIONAL additionalData )
{
return proxy( key, loader.newUnused( key, additionalData ), additionalData, true );
}
@Override
public RecordProxy<KEY,RECORD,ADDITIONAL> getIfLoaded( KEY key )
{
return batch.get( key );
}
@Override
public void setTo( KEY key, RECORD newRecord, ADDITIONAL additionalData )
{
throw new UnsupportedOperationException( "Not supported" );
}
@Override
public int changeSize()
{
return changeCounter.value();
}
@Override
public Iterable<RecordProxy<KEY,RECORD,ADDITIONAL>> changes()
{
return new IterableWrapper<RecordProxy<KEY,RECORD,ADDITIONAL>,DirectRecordProxy>(
batch.values() )
{
@Override
protected RecordProxy<KEY,RECORD,ADDITIONAL> underlyingObjectToObject( DirectRecordProxy object )
{
return object;
}
};
}
private DirectRecordProxy proxy( final KEY key, final RECORD record, final ADDITIONAL additionalData, boolean created )
{
return new DirectRecordProxy( key, record, additionalData, created );
}
private class DirectRecordProxy implements RecordProxy<KEY,RECORD,ADDITIONAL>
{
private KEY key;
private RECORD record;
private ADDITIONAL additionalData;
private boolean changed = false;
private final boolean created;
public DirectRecordProxy( KEY key, RECORD record, ADDITIONAL additionalData, boolean created )
{
this.key = key;
this.record = record;
this.additionalData = additionalData;
if ( created )
{
prepareChange();
}
this.created = created;
}
@Override
public KEY getKey()
{
return key;
}
@Override
public RECORD forChangingLinkage()
{
prepareChange();
return record;
}
private void prepareChange()
{
if ( !changed )
{
changed = true;
putInBatch( key, this );
changeCounter.increment();
}
}
@Override
public RECORD forChangingData()
{
loader.ensureHeavy( record );
prepareChange();
return record;
}
@Override
public RECORD forReadingLinkage()
{
return record;
}
@Override
public RECORD forReadingData()
{
loader.ensureHeavy( record );
return record;
}
@Override
public ADDITIONAL getAdditionalData()
{
return additionalData;
}
@Override
public RECORD getBefore()
{
return loader.load( key, additionalData );
}
@Override
public String toString()
{
return record.toString();
}
public void store()
{
if ( changed )
{
store.updateRecord( record );
}
}
@Override
public boolean isChanged()
{
return changed;
}
@Override
public boolean isCreated()
{
return created;
}
}
@Override
public void close()
{
commit();
}
public void commit()
{
if ( changeCounter.value() == 0 )
{
return;
}
List<DirectRecordProxy> directRecordProxies = new ArrayList<>( batch.values() );
Collections.sort(directRecordProxies, new Comparator<DirectRecordProxy>()
{
@Override
public int compare( DirectRecordProxy o1, DirectRecordProxy o2 )
{
return -o1.getKey().compareTo( o2.getKey() );
}
} );
for ( DirectRecordProxy proxy : directRecordProxies )
{
proxy.store();
}
changeCounter.clear();
batch.clear();
}
}
| |
/*******************************************************************************
* Copyright 2012 Ivan Shubin http://mindengine.net
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package net.mindengine.oculus.experior.test.descriptors;
import java.io.Serializable;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import net.mindengine.oculus.experior.exception.LoopedDependencyException;
import net.mindengine.oculus.experior.exception.TestConfigurationException;
import net.mindengine.oculus.experior.test.TestRunnerConfiguration;
import net.mindengine.oculus.experior.test.testloader.TestLoaderFactory;
/**
* Defines the test meta-data such as: class-path, name, parameters,
* dependencies
*
* @author Ivan Shubin
*
*/
public class TestDefinition implements Serializable {
private static final long serialVersionUID = -345871240987352L;
/**
* The customId field is used only in suite when there is need to fetch test
* dependencies
*/
private String customId;
private Long testId;
/**
* These test will be run inside the TestRunner of current test so it will give an ability to unite tests into test-group and share the same test-session
*/
private List<TestDefinition> injectedTests;
/**
* Contains the path to the test. By default is used as a "classpath". Format:
* "loaderName:path"
*
* where
* <ul>
* <li>loaderName - is the name of loader configured in TestLoadFactory
* <li>path - a path which will be later passed to the specified concrete
* TestLoadFactory
* </ul>
*
*/
private String mapping;
private String name;
private String description; // Used for more information about the test run
private String project;
private Map<String, TestParameter> parameters = new HashMap<String, TestParameter>();
private Collection<TestDependency> parameterDependencies = new LinkedList<TestDependency>();
private Collection<String> dependencies;
public boolean hasDependencies(TestDefinition testDefinition) {
for (TestDependency dependency : getParameterDependencies()) {
if (dependency.getRefTestId().equals(testDefinition.getCustomId())) {
return true;
}
}
if(dependencies!=null) {
if(dependencies.contains(testDefinition.getCustomId())){
return true;
}
}
return false;
}
public TestDependency getDependency(String parameterName) {
if(getParameterDependencies()!=null){
for (TestDependency testDependency : getParameterDependencies()) {
if (parameterName.equals(testDependency.getDependentParameterName()))
return testDependency;
}
}
return null;
}
public String getCustomId() {
return customId;
}
public void setCustomId(String customId) {
this.customId = customId;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getProject() {
return project;
}
public void setProject(String project) {
this.project = project;
}
public Map<String, TestParameter> getParameters() {
return parameters;
}
public void setParameters(Map<String, TestParameter> parameters) {
this.parameters = parameters;
}
public Class<?> fetchTestClass(TestRunnerConfiguration configuration) {
try {
if (mapping == null || mapping.isEmpty()){
//Looking for dummy test
Class<?> dummyClass = configuration.getDummyTestClass();
if(dummyClass==null) {
throw new TestConfigurationException("Dummy test is not specified in configuration");
}
return dummyClass;
}
int pos = mapping.indexOf(":");
String path = null;
String testLoaderName = null;
if (pos > 0) {
// fetching the specified test loader
testLoaderName = mapping.substring(0, pos);
path = mapping.substring(pos + 1);
} else {
// Using the default test loader
testLoaderName = TestLoaderFactory.getTestLoaderFactory().getDefaultTestLoaderName();
path = mapping;
}
return TestLoaderFactory.forTestLoader(testLoaderName).loadTestClass(path);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static void checkCrossReferences(List<TestDefinition> testList) throws LoopedDependencyException {
if(testList!=null && testList.size()>0) {
for(int i=0;i<testList.size()-1; i++) {
for(int j=i+1; j<testList.size(); j++) {
TestDefinition td1 = testList.get(i);
TestDefinition td2 = testList.get(j);
if(td1.hasDependencies(td2)) {
throw new LoopedDependencyException("Test #"+td1.getCustomId());
}
}
}
}
}
public static List<TestDefinition> sortTestsByDependencies(List<TestDefinition> tests) throws LoopedDependencyException {
/*
* Here is used the bubble sorting algorithm. Each test is compared with
* other test by dependency to each other If on of them has a dependency
* to other test - it will have less weight then its prerequisite If
* both tests have a dependency to each other the
* LoopedDependencyException will be thrown
*/
// Sorting the array
boolean b1 = false;
boolean b2 = false;
for (int i = 0; i < tests.size() - 1; i++) {
for (int j = i + 1; j < tests.size(); j++) {
TestDefinition ti = tests.get(i);
TestDefinition tj = tests.get(j);
b1 = ti.hasDependencies(tj);
b2 = tj.hasDependencies(ti);
if (b1 & b2)
throw new LoopedDependencyException("Tests: '" + ti.getName() + "' and '" + tj.getName() + "' have dependencies on each other");
if (b1) {
Collections.swap(tests, i, j);
}
}
}
return tests;
}
public String getMapping() {
return mapping;
}
public void setMapping(String mapping) {
this.mapping = mapping;
}
@Override
public String toString() {
return "TestDefinition: " + name + " (" + mapping + ")";
}
public void setDescription(String description) {
this.description = description;
}
public String getDescription() {
return description;
}
public void setInjectedTests(List<TestDefinition> injectedTests) {
this.injectedTests = injectedTests;
}
public List<TestDefinition> getInjectedTests() {
return injectedTests;
}
public void setParameterDependencies(Collection<TestDependency> parameterDependencies) {
this.parameterDependencies = parameterDependencies;
}
public Collection<TestDependency> getParameterDependencies() {
return parameterDependencies;
}
public void setDependencies(Collection<String> dependencies) {
this.dependencies = dependencies;
}
public Collection<String> getDependencies() {
return dependencies;
}
public Long getTestId() {
return testId;
}
public void setTestId(Long testId) {
this.testId = testId;
}
}
| |
/*******************************************************************************
* COPYRIGHT(c) 2015 STMicroelectronics
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. Neither the name of STMicroelectronics nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
******************************************************************************/
package com.st.BlueSTSDK;
import android.bluetooth.BluetoothGattCharacteristic;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import com.st.BlueSTSDK.Utils.BLENodeDefines;
import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.UUID;
/**
* Class used for write and read from the debug console
*
* @author STMicroelectronics - Central Labs.
* @version 1.0
*/
public class Debug {
/**
* node that will send the data to this class
*/
private final Node mNode;
/**
* characteristics used for send/read stdin/out
*/
private final BluetoothGattCharacteristic mTermChar;
/**
* characteristics used for read the stdErr
*/
private final BluetoothGattCharacteristic mErrChar;
/**
* thread used for call the user listener
*/
private Handler mNotifyThread;
/**
* class where the notify that we receive a new data
*/
private DebugOutputListener mListener;
/**
* Max size of string to sent in the input char
*/
public static final int MAX_STRING_SIZE_TO_SENT = 20;
private void initHandler(){
HandlerThread temp = new HandlerThread(Debug.class.getCanonicalName());
//if you send a lot of data through the debug interface, you need this for avoid delay that
//can trigger a timeout
temp.setPriority(Thread.MAX_PRIORITY);
temp.start();
mNotifyThread= new Handler(temp.getLooper());
}//initHandler
/**
* @param n node that will send the data
* @param termChar characteristic used for write/notify the stdin/out
* @param errChar characteristic used used for notify the stderr
*/
Debug(Node n, BluetoothGattCharacteristic termChar,
BluetoothGattCharacteristic errChar) {
mNode = n;
mTermChar = termChar;
mErrChar = errChar;
initHandler();
}//Debug
/**
* write a message to the stdIn, the message can be split in more ble writes
* the string will be converted in a byte array using the default charset configuration
*
* @param message message to send
* @return number of char sent in Terminal standard characteristic
*/
public int write(String message) {
return write(message.getBytes());
}
/**
* write an array of byte into the stdIn. the array can be split in more ble write
*
* @param data array to write
* @return number of byte sent
*/
public int write(byte[] data){
return write(data,0,data.length);
}
/**
* Write an array of byte into the stdInt, the array can be split in more ble write
* @param data array to write
* @param offset offset in the array where start read data
* @param byteToSend number of byte to send
* @return number of byte sent
*/
public int write(byte[] data,int offset, int byteToSend){
int byteSend=offset;
//write the message with chunk of MAX_STRING_SIZE_TO_SENT bytes
while((byteToSend-byteSend) > MAX_STRING_SIZE_TO_SENT){
mNode.enqueueCharacteristicsWrite(mTermChar,
Arrays.copyOfRange(data,byteSend, byteSend + MAX_STRING_SIZE_TO_SENT));
byteSend+=MAX_STRING_SIZE_TO_SENT;
}//while
//send the remaining data
if(byteSend!=byteToSend){
mNode.enqueueCharacteristicsWrite(mTermChar,
Arrays.copyOfRange(data,byteSend,byteToSend));
}//if
return byteToSend;
}
/**
* set the output listener, only one listener can be set in this class
* <p>
* If the listener is null we stop the notification
* </p>
*
* @param listener class with the callback when something appear in the debug console
*/
public void setDebugOutputListener(DebugOutputListener listener) {
if(mListener==listener)
return;
mListener = listener;
boolean enable = mListener != null;
mNode.changeNotificationStatus(mTermChar, enable);
mNode.changeNotificationStatus(mErrChar, enable);
}
private String encodeMessageString(byte[] value){
//convert to standard ascii characters
return new String(value, Charset.forName("ISO-8859-1"));
}
/**
* the node had received an update on this characteristics, if it is a debug characteristic we
* sent its data to the listener
*
* @param characteristic characteristic that has been updated
*/
void receiveCharacteristicsUpdate(final BluetoothGattCharacteristic characteristic) {
if (mListener == null)
return;
UUID charUuid = characteristic.getUuid();
final String msg = encodeMessageString(characteristic.getValue());
if (charUuid.equals(BLENodeDefines.Services.Debug.DEBUG_STDERR_UUID)) {
// mListener.onStdErrReceived(Debug.this, characteristic.getStringValue(0));
mNotifyThread.post(new Runnable() {
@Override
public void run() {
if(mListener!=null)
mListener.onStdErrReceived(Debug.this, msg);
}
});
} else if (charUuid.equals(BLENodeDefines.Services.Debug.DEBUG_TERM_UUID)) {
//mListener.onStdOutReceived(Debug.this, characteristic.getStringValue(0));
mNotifyThread.post(new Runnable() {
@Override
public void run() {
if(mListener!=null)
mListener.onStdOutReceived(Debug.this, msg);
}
});
}//if-else-if
}//receiveCharacteristicsUpdate
/**
* the node had finish to write a characteristics
*
* @param characteristic characteristic that has been write
* @param status true if the write end correctly, false otherwise
*/
void receiveCharacteristicsWriteUpdate(final BluetoothGattCharacteristic characteristic,
final boolean status) {
if (mListener == null)
return;
UUID charUuid = characteristic.getUuid();
if (charUuid.equals(BLENodeDefines.Services.Debug.DEBUG_TERM_UUID)) {
final String str = encodeMessageString(characteristic.getValue());
if(str.length()>MAX_STRING_SIZE_TO_SENT) {
mNotifyThread.post(new Runnable() {
@Override
public void run() {
if (mListener == null)
return;
mListener.onStdInSent(Debug.this,
str.substring(0,MAX_STRING_SIZE_TO_SENT), status);
}
});
}else {
mNotifyThread.post(new Runnable() {
@Override
public void run() {
if (mListener == null)
return;
mListener.onStdInSent(Debug.this, str, status);
}
});
}//if-else
}//if
}//receiveCharacteristicsWriteUpdate
/**
* get the node that write/listen in this debug console
*
* @return get the node that write on this debug console
*/
public Node getNode() {
return mNode;
}//getNode
/**
* Interface used for notify to the user the console activity
* The data received/send from/to the node are encoded with ISO-8859-1 charset.
* @author STMicroelectronics - Central Labs.
*/
public interface DebugOutputListener {
/**
* a new message appear on the standard output
*
* @param debug object that send the message
* @param message message that someone write in the debug console
*/
void onStdOutReceived(Debug debug, String message);
/**
* a new message appear on the standard error
*
* @param debug object that send the message
* @param message message that someone write in the error console
*/
void onStdErrReceived(Debug debug, String message);
/**
* call when a message is send to the debug console
*
* @param debug object that received the message
* @param message message that someone write in the debug console
* @param writeResult true if the message is correctly send
*/
void onStdInSent(Debug debug, String message, boolean writeResult);
}//DebugOutputListener
}//Debug
| |
package edu.lognet.reputation.model.user;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import edu.lognet.reputation.controller.simulations.Simulation;
import edu.lognet.reputation.controller.utils.Gaussian;
import edu.lognet.reputation.model.experience.Credibility;
import edu.lognet.reputation.model.experience.Experience;
import edu.lognet.reputation.model.service.Service;
/**
* Represent an user in the System
* The User can be a Provider, a Consumer or a Rater
*
* @author Laurent Vanni, Thao Nguyen
*/
public class User extends AbstractUser implements IProvider, IConsumer, IRater {
public static enum collusionGroup {
C1, C2, C3
}
public static enum victimGroup {
V1, V2, V3
}
/* --------------------------------------------------------- */
/* Attributes */
/* --------------------------------------------------------- */
private Service currentService;
private providerType myProviderType;
private raterType myRaterType;
private collusionGroup collusionCode;
private victimGroup victimCode;
private double initQoS;
private double QoS;
private int resourceAvailable;
private int frequencyOfFluctuation;
private Map<Service, Map<IProvider, Experience>> experiences;
private Map<Service, Map<IRater, Credibility>> credibilityOfRater;
private double reputedScore;
private double ratingTol;
public int numProvison;
/* --------------------------------------------------------- */
/* Constructors */
/* --------------------------------------------------------- */
public User(String id, String name, int age, Service currentService,
providerType pType, raterType rType,
collusionGroup cGroup, double QoS, int resourceAvailable, int frequencyOfFluctuation) {
super(id, name, age);
this.currentService = currentService;
this.myProviderType = pType;
this.myRaterType = rType;
this.collusionCode = cGroup;
this.initQoS = QoS;
this.QoS = QoS;
this.resourceAvailable = resourceAvailable;
this.frequencyOfFluctuation = frequencyOfFluctuation;
//initiate
this.experiences = new HashMap<Service, Map<IProvider, Experience>>();
this.credibilityOfRater = new HashMap<Service, Map<IRater, Credibility>>();
this.reputedScore = 0;
if (rType.compareTo(raterType.HONEST) == 0) {
this.ratingTol = 0.0;
} else if (rType.compareTo(raterType.RANDOM) == 0) {
this.ratingTol = 1.0; // difference is any random number [0,1)
} else { // DISHONEST and COLLUSIVE
this.ratingTol = 0.5;
}
this.numProvison = 0;
}
/* --------------------------------------------------------- */
/* Implements IConsumer */
/* --------------------------------------------------------- */
/**
* @param service
* @return Map<IRater, Credibility>
*/
public Map<IRater, Credibility> getCredibilityOfRater(Service service) {
Map<IRater, Credibility> cor;
if ((cor = credibilityOfRater.get(service)) == null) {
cor = new HashMap<IRater, Credibility>();
this.credibilityOfRater.put(service, cor);
}
return cor;
}
/**
* @param provider
* @param service
* @return Experience
*/
public Experience getConsumerExp(IProvider provider, Service service) {
if (experiences.get(service) == null) {
return null;
} else {
return experiences.get(service).get(provider);
}
}
/**
* @param provider
* @param service
*/
public void setConsumerExp(IProvider provider, Service service,
Experience experience) {
if (experiences.get(service) == null) {
experiences.put(service, new HashMap<IProvider, Experience>());
}
experiences.get(service).put(provider, experience);
}
/**
* @param providers
* @param service
* @param dataLostPercent
* @param credibilityOfRater
* @param choosingStrategy
*/
public IProvider chooseProvider(List<ReputedProvider> reputedProviderList,
Service service, int dataLostPercent, int choosingStrategy) {
if (Simulation.LOG_ENABLED == 1) {
System.out.println("INFO: " + getName() + ".chooseProvider("
+ service.getName() + ")");
System.out.println("INFO: Provider list:");
System.out
.println("\tId\t\tName\t\tAge\t\tService\t\tQoS\t\tReputation\tStatistic");
for (ReputedProvider reputedProvider : reputedProviderList) {
System.out.println(reputedProvider);
}
}
IProvider chosenProvider = null;
double db;
int size = reputedProviderList.size();
switch (choosingStrategy) {
case 1: { // choose highest
db = reputedProviderList.get(size - 1).getReputation();
chosenProvider = reputedProviderList.get(size - 1).getProvider();
break;
}
case 2: { // crop the provider list first before applying Gaussian
// choosing
int index = 0;
db = reputedProviderList.get(size - 1).getReputation();
while (index < size) {
double temp = reputedProviderList.get(index).getReputation();
if (Math.abs(db - temp) > 0.5) {
index++;
} else {
break;
}
}
//take the list from index up to (size-1)
List<ReputedProvider> croppedProvList = new ArrayList<ReputedProvider>();
for (int i = index; i < size; i++) {
croppedProvList.add(reputedProviderList.get(i));//not really copy, just referring
}
//apply Gaussian to croppedProvList
size = croppedProvList.size();
Gaussian gaussian = new Gaussian(Math.sqrt(size) / 2, 0.0);
double[] d = new double[size];
double sum = 0.0;
for (int x = 0; x < size; x++) {
d[x] = gaussian.getY(x - size + 1) * 100;//take the left side of Gaussian distribution because provider list is ascending
}
Map<Double, Double> percentMap = new HashMap<Double, Double>();
for (int i = 0; i < size; i++) {
sum += d[i];
}
double percentSum = 0.0;
for (int i = 0; i < size; i++) {
double percent = (100 * d[i]) / sum;
percentMap.put(d[i], percent);
croppedProvList.get(i).setStatisticFeedBack(percent);
percentSum += percent;
croppedProvList.get(i).setStatisticFactor(percentSum);
}
if (Simulation.LOG_ENABLED == 1) {
System.out.println("INFO: Cropped Provider list:");
for (ReputedProvider reputedProvider : croppedProvList) {
System.out.println(reputedProvider);
}
}
double statisticFactor = Math.random() * 100;
// Dichotomic search: find provider having StaFactor closest to the random value
int i = size / 2;
while (size != 0) {
if (croppedProvList.get(i).getStatisticFactor() == statisticFactor) {
break;
} else if (croppedProvList.get(i).getStatisticFactor() < statisticFactor) {
//size is odd
if (size % 2 == 1) {
i += size / 4 + 1;
} else {
i += size / 4;
}
} else {
i -= size / 4;
}
size /= 2;
}
db = croppedProvList.get(i).getReputation();
chosenProvider = croppedProvList.get(i).getProvider();
break;
}
case 3: { // UPDATE THE STATISTIC FACTOR OF EACH PROVIDER
Gaussian gaussian = new Gaussian(Math.sqrt(size) / 2, 0.0);
double[] d = new double[size];
double sum = 0.0;
for (int x = 0; x < size; x++) {
d[x] = gaussian.getY(x - size + 1) * 100;
}
Map<Double, Double> percentMap = new HashMap<Double, Double>();
for (int i = 0; i < size; i++) {
sum += d[i];
}
double percentSum = 0.0;
for (int i = 0; i < size; i++) {
double percent = (100 * d[i]) / sum;
percentMap.put(d[i], percent);
reputedProviderList.get(i).setStatisticFeedBack(percent);
percentSum += percent;
reputedProviderList.get(i).setStatisticFactor(percentSum);
}
if (Simulation.LOG_ENABLED == 1) {
System.out.println("INFO: Provider list with updated statistic factor:");
for (ReputedProvider reputedProvider : reputedProviderList) {
System.out.println(reputedProvider);
}
}
double statisticFactor = Math.random() * 100;
int i = size / 2;
while (size != 0) {
if (reputedProviderList.get(i).getStatisticFactor() == statisticFactor) {
break;
} else if (reputedProviderList.get(i).getStatisticFactor() < statisticFactor) {
if (size % 2 == 1) {
i += size / 4 + 1;
} else {
i += size / 4;
}
} else {
i -= size / 4;
}
size /= 2;
}
db = reputedProviderList.get(i).getReputation();
chosenProvider = reputedProviderList.get(i).getProvider();
break;
}
default: {
//choose highest
db = reputedProviderList.get(size - 1).getReputation();
chosenProvider = reputedProviderList.get(size - 1).getProvider();
break;
}
}
chosenProvider.setReputedScore(db);
if (Simulation.LOG_ENABLED == 1) {
System.out.println("INFO: "
+ getName()
+ " has choosen "
+ ((User) chosenProvider).getName());
}
return chosenProvider;
}
public void changeBehaviour() {
if (myProviderType==providerType.GOODTURNSBAD) {
if (numProvison==resourceAvailable/2) {
QoS = Math.max(0, Math.round((initQoS-0.7)*100)/(double)100); // actually always > 0 because initQoS>=0.75
}
return;
}
if (myProviderType==providerType.FLUCTUATE) {
if ((numProvison!=0)&&(numProvison%(frequencyOfFluctuation*resourceAvailable/100)==0)) {
if (QoS == initQoS) {
QoS = Math.max(0, Math.round((initQoS-0.7)*100)/(double)100);
} else {
QoS = initQoS;
}
}
return;
}
if (myProviderType==providerType.BADTURNSGOOD) {
if (numProvison==resourceAvailable/2) {
QoS = Math.min(1, Math.round((initQoS+0.7)*100)/(double)100); // actually always <1 since initQoS<=0.25
}
return;
}
}
/* --------------------------------------------------------- */
/* Others Override Methods */
/* --------------------------------------------------------- */
@Override
public String toString() {
return "\t" + getId() + "\t\t" + getName() + "\t\t" + getAge() + "\t\t"
+ getProvidedService().getId() + "\t\t" + getQoS();
}
@Override
public double getRatingTol() {
return ratingTol;
}
@Override
public collusionGroup getCollusionCode() {
return collusionCode;
}
@Override
public victimGroup getVictimCode() {
return victimCode;
}
@Override
public Service getProvidedService() {
return currentService;
}
@Override
public void setProvidedService(Service providedService) {
this.currentService = providedService;
}
public double getInitQoS() {
return initQoS;
}
@Override
public double getQoS() {
return QoS;
}
@Override
public void setQoS(double QoS) {
this.QoS = QoS;
}
@Override
public void setReputedScore(double db) {
reputedScore = db;
}
@Override
public double getReputedScore() {
return reputedScore;
}
@Override
public void increaseNumProvison() {
numProvison++;
}
@Override
public int getNumProvision() {
return numProvison;
}
@Override
public int getResourceAvailable() {
return resourceAvailable;
}
@Override
public providerType getProviderType() {
return myProviderType;
}
public void setProviderType(providerType type){
myProviderType = type;
}
@Override
public raterType getRaterType() {
return myRaterType;
}
public void setRaterType(raterType type){
myRaterType = type;
}
}
| |
package org.xbib.elasticsearch.util;
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
import org.elasticsearch.client.support.AbstractClient;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.node.MockNode;
import org.elasticsearch.node.Node;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.transport.TransportInfo;
import org.testng.Assert;
import java.io.IOException;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
public class NodeTestUtils extends Assert {
protected final static ESLogger logger = ESLoggerFactory.getLogger("test");
private Map<String, Node> nodes = new HashMap<>();
private Map<String, AbstractClient> clients = new HashMap<>();
// note, this must be same name as in json specs
protected final String index = "my_index";
protected final String type = "my_type";
private List<String> hosts;
public void startNodes() {
try {
startNode("1");
findNodeAddresses();
logger.info("ready");
} catch (Throwable t) {
logger.error("startNodes failed", t);
}
}
public void stopNodes() {
try {
closeNodes();
} catch (Exception e) {
logger.error("can not close nodes", e);
} finally {
try {
deleteFiles();
logger.info("data files wiped");
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
}
}
protected String[] getHosts() {
return hosts != null ? hosts.toArray(new String[hosts.size()]) : new String[]{};
}
protected Settings getNodeSettings() {
return settingsBuilder()
.put("cluster.name", "elasticsearch")
.put("http.enabled", false)
.put("index.number_of_replicas", 0)
.put("path.home", getHome())
.build();
}
protected String getHome() {
return System.getProperty("path.home");
}
public void startNode(String id) throws IOException {
buildNode(id).start();
}
public AbstractClient client(String id) {
return clients.get(id);
}
private void closeNodes() throws IOException {
logger.info("closing all clients");
for (AbstractClient client : clients.values()) {
client.close();
}
clients.clear();
logger.info("closing all nodes");
for (Node node : nodes.values()) {
if (node != null) {
node.close();
}
}
nodes.clear();
logger.info("all nodes closed");
}
protected void findNodeAddresses() {
NodesInfoRequest nodesInfoRequest = new NodesInfoRequest().transport(true);
NodesInfoResponse response = client("1").admin().cluster().nodesInfo(nodesInfoRequest).actionGet();
Iterator<NodeInfo> it = response.iterator();
hosts = new LinkedList<>();
hosts = new LinkedList<>();
while (it.hasNext()) {
NodeInfo nodeInfo = it.next();
TransportInfo transportInfo = nodeInfo.getTransport();
TransportAddress address = transportInfo.getAddress().publishAddress();
if (address instanceof InetSocketTransportAddress) {
InetSocketTransportAddress inetSocketTransportAddress = (InetSocketTransportAddress) address;
hosts.add(inetSocketTransportAddress.address().getHostName() + ":" + inetSocketTransportAddress.address().getPort());
}
}
}
private Node buildNode(String id) throws IOException {
Settings nodeSettings = settingsBuilder()
.put(getNodeSettings())
.put("name", id)
.build();
logger.info("settings={}", nodeSettings.getAsMap());
Node node = new MockNode(nodeSettings);
AbstractClient client = (AbstractClient)node.client();
nodes.put(id, node);
clients.put(id, client);
logger.info("clients={}", clients);
return node;
}
private static void deleteFiles() throws IOException {
Path directory = Paths.get(System.getProperty("path.home") + "/data");
Files.walkFileTree(directory, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
Files.delete(file);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
Files.delete(dir);
return FileVisitResult.CONTINUE;
}
});
}
protected void assertHits(String id, int expectedHits) {
client(id).admin().indices().prepareRefresh(index).execute().actionGet();
long hitsFound = client(id).prepareSearch(index).setTypes(type).execute().actionGet().getHits().getTotalHits();
logger.info("{}/{} = {} hits", index, type, hitsFound);
assertEquals(hitsFound, expectedHits);
}
protected void assertTimestampSort(String id, int expectedHits) {
client(id).admin().indices().prepareRefresh(index).execute().actionGet();
QueryBuilder queryBuilder = QueryBuilders.matchAllQuery();
SortBuilder sortBuilder = SortBuilders.fieldSort("_timestamp").order(SortOrder.DESC);
SearchHits hits = client(id).prepareSearch(index).setTypes(type)
.setQuery(queryBuilder)
.addSort(sortBuilder)
.addFields("_source", "_timestamp")
.setSize(expectedHits)
.execute().actionGet().getHits();
Long prev = Long.MAX_VALUE;
for (SearchHit hit : hits) {
if (hit.getFields().get("_timestamp") == null) {
logger.warn("type mapping was not correctly applied for _timestamp field");
}
Long curr = hit.getFields().get("_timestamp").getValue();
logger.info("timestamp = {}", curr);
assertTrue(curr <= prev);
prev = curr;
}
logger.info("{}/{} = {} hits", index, type, hits.getTotalHits());
assertEquals(hits.getTotalHits(), expectedHits);
}
}
| |
/*=========================================================================
* Copyright (c) 2010-2014 Pivotal Software, Inc. All Rights Reserved.
* This product is protected by U.S. and international copyright
* and intellectual property laws. Pivotal products are covered by
* one or more patents listed at http://www.pivotal.io/patents.
*=========================================================================
*/
/*
* RemoveAllMultiVmDUnitTest.java
*
* Adapted from PutAllMultiVmDUnitTest
*/
package com.gemstone.gemfire.cache30;
import dunit.*;
import com.gemstone.gemfire.cache.*;
import java.util.*;
import com.gemstone.gemfire.distributed.DistributedSystem;
/**
*
* @author darrel
*/
public class RemoveAllMultiVmDUnitTest extends DistributedTestCase {
/** Creates a new instance of RemoveAllMultiVmDUnitTest */
public RemoveAllMultiVmDUnitTest(String name) {
super(name);
}
static Cache cache;
static Properties props = new Properties();
static Properties propsWork = new Properties();
static DistributedSystem ds = null;
static Region region;
static Region mirroredRegion;
static CacheTransactionManager cacheTxnMgr;
@Override
public void setUp() throws Exception {
super.setUp();
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
vm0.invoke(RemoveAllMultiVmDUnitTest.class, "createCache");
vm1.invoke(RemoveAllMultiVmDUnitTest.class, "createCache");
}
public void tearDown2(){
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
vm0.invoke(RemoveAllMultiVmDUnitTest.class, "closeCache");
vm1.invoke(RemoveAllMultiVmDUnitTest.class, "closeCache");
cache = null;
invokeInEveryVM(new SerializableRunnable() { public void run() { cache = null; } });
}
public static void createCache(){
try{
ds = (new RemoveAllMultiVmDUnitTest("temp")).getSystem(props);
cache = CacheFactory.create(ds);
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
RegionAttributes attr = factory.create();
region = cache.createRegion("map", attr);
} catch (Exception ex){
ex.printStackTrace();
}
}//end of createCache
public static void createMirroredRegion(){
try{
AttributesFactory factory = new AttributesFactory();
factory.setDataPolicy(DataPolicy.REPLICATE);
factory.setScope(Scope.DISTRIBUTED_ACK);
RegionAttributes attr = factory.create();
mirroredRegion = cache.createRegion("mirrored", attr);
} catch (Exception ex){
ex.printStackTrace();
}
}//end of createCache
public static void closeCache(){
try{
//System.out.println("closing cache cache cache cache cache 33333333");
cache.close();
ds.disconnect();
//System.out.println("closed cache cache cache cache cache 44444444");
} catch (Exception ex){
ex.printStackTrace();
}
}//end of closeCache
//tests methods
public void testLocalRemoveAll(){
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
vm0.invoke(new CacheSerializableRunnable("testLocalRemoveAll"){
public void run2() throws CacheException {
int cntr = 0, cntr1 = 0;
for(int i=1; i<6; i++) {
region.put(Integer.valueOf(i), new String("testLocalRemoveAll"+i));
cntr++;
}
int size1 = region.size();
assertEquals(5, size1);
region.removeAll(Collections.EMPTY_SET);
assertEquals(size1, region.size());
region.removeAll(Collections.singleton(Integer.valueOf(666)));
assertEquals(size1, region.size());
assertEquals(true, region.containsKey(Integer.valueOf(1)));
region.removeAll(Collections.singleton(Integer.valueOf(1)));
assertEquals(false, region.containsKey(Integer.valueOf(1)));
assertEquals(size1-1, region.size());
size1--;
region.removeAll(Arrays.asList(Integer.valueOf(2), Integer.valueOf(3)));
assertEquals(size1-2, region.size());
size1 -= 2;
}
} );
}
public void testLocalTxRemoveAll(){
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
vm0.invoke(new CacheSerializableRunnable("testSimpleRemoveAllTx"){
public void run2() throws CacheException {
cacheTxnMgr = cache.getCacheTransactionManager();
int cntr = 0;
for(int i=1; i<6; i++) {
region.put(Integer.valueOf(i), new String("testLocalTxRemoveAll"+i));
cntr++;
}
int size1 = region.size();
assertEquals(5, size1);
cacheTxnMgr.begin();
region.removeAll(Arrays.asList(Integer.valueOf(1), Integer.valueOf(2)));
cacheTxnMgr.rollback();
assertEquals(size1, region.size());
cacheTxnMgr.begin();
region.removeAll(Arrays.asList(Integer.valueOf(666), Integer.valueOf(1), Integer.valueOf(2)));
cacheTxnMgr.commit();
int size2 = region.size();
assertEquals(size1-2, size2);
assertEquals(true, region.containsKey(Integer.valueOf(3)));
assertEquals(false, region.containsKey(Integer.valueOf(2)));
assertEquals(false, region.containsKey(Integer.valueOf(1)));
}
} );
}
public void testDistributedRemoveAll(){
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
vm1.invoke(new CacheSerializableRunnable("create mirrored region"){
public void run2() throws CacheException {
createMirroredRegion();
}
});
vm0.invoke(new CacheSerializableRunnable("testDistributedRemoveAll1"){
public void run2() throws CacheException {
createMirroredRegion();
int cntr = 0, cntr1 = 0;
for(int i=1; i<6; i++) {
mirroredRegion.put(Integer.valueOf(i), new String("testDistributedRemoveAll"+i));
cntr++;
}
int size1 = mirroredRegion.size();
assertEquals(5, size1);
mirroredRegion.removeAll(Collections.EMPTY_SET);
assertEquals(size1, mirroredRegion.size());
mirroredRegion.removeAll(Collections.singleton(Integer.valueOf(666)));
assertEquals(size1, mirroredRegion.size());
assertEquals(true, mirroredRegion.containsKey(Integer.valueOf(1)));
mirroredRegion.removeAll(Collections.singleton(Integer.valueOf(1)));
assertEquals(false, mirroredRegion.containsKey(Integer.valueOf(1)));
assertEquals(size1-1, mirroredRegion.size());
size1--;
mirroredRegion.removeAll(Arrays.asList(Integer.valueOf(2), Integer.valueOf(3)));
assertEquals(size1-2, mirroredRegion.size());
size1 -= 2;
}
} );
vm1.invoke(new CacheSerializableRunnable("testDistributedRemoveAllVerifyRemote"){
public void run2() throws CacheException {
assertEquals(true, mirroredRegion.containsKey(Integer.valueOf(5)));
assertEquals(true, mirroredRegion.containsKey(Integer.valueOf(4)));
assertEquals(false, mirroredRegion.containsKey(Integer.valueOf(3)));
assertEquals(false, mirroredRegion.containsKey(Integer.valueOf(2)));
assertEquals(false, mirroredRegion.containsKey(Integer.valueOf(1)));
assertEquals(2, mirroredRegion.size());
}
} );
}
public void testDistributedTxRemoveAll(){
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
vm1.invoke(new CacheSerializableRunnable("create mirrored region"){
public void run2() throws CacheException {
createMirroredRegion();
}
});
vm0.invoke(new CacheSerializableRunnable("testDistributedTxRemoveAll1"){
public void run2() throws CacheException {
createMirroredRegion();
int cntr = 0, cntr1 = 0;
for(int i=1; i<6; i++) {
mirroredRegion.put(Integer.valueOf(i), new String("testDistributedTxRemoveAll"+i));
cntr++;
}
int size1 = mirroredRegion.size();
assertEquals(5, size1);
cacheTxnMgr = cache.getCacheTransactionManager();
cacheTxnMgr.begin();
mirroredRegion.removeAll(Collections.EMPTY_SET);
cacheTxnMgr.commit();
assertEquals(size1, mirroredRegion.size());
cacheTxnMgr.begin();
mirroredRegion.removeAll(Collections.singleton(Integer.valueOf(666)));
cacheTxnMgr.commit();
assertEquals(size1, mirroredRegion.size());
assertEquals(true, mirroredRegion.containsKey(Integer.valueOf(1)));
cacheTxnMgr.begin();
mirroredRegion.removeAll(Collections.singleton(Integer.valueOf(1)));
cacheTxnMgr.commit();
assertEquals(false, mirroredRegion.containsKey(Integer.valueOf(1)));
assertEquals(size1-1, mirroredRegion.size());
size1--;
cacheTxnMgr.begin();
mirroredRegion.removeAll(Arrays.asList(Integer.valueOf(2), Integer.valueOf(3)));
cacheTxnMgr.commit();
assertEquals(size1-2, mirroredRegion.size());
size1 -= 2;
}
} );
vm1.invoke(new CacheSerializableRunnable("testDistributedTxRemoveAllVerifyRemote"){
public void run2() throws CacheException {
assertEquals(true, mirroredRegion.containsKey(Integer.valueOf(5)));
assertEquals(true, mirroredRegion.containsKey(Integer.valueOf(4)));
assertEquals(false, mirroredRegion.containsKey(Integer.valueOf(3)));
assertEquals(false, mirroredRegion.containsKey(Integer.valueOf(2)));
assertEquals(false, mirroredRegion.containsKey(Integer.valueOf(1)));
assertEquals(2, mirroredRegion.size());
}
} );
}
}//end of RemoveAllMultiVmDUnitTest
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.tools;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.tools.DistCpOptions.FileAttribute;
import org.apache.hadoop.tools.util.DistCpUtils;
import org.apache.hadoop.tools.util.ProducerConsumer;
import org.apache.hadoop.tools.util.WorkReport;
import org.apache.hadoop.tools.util.WorkRequest;
import org.apache.hadoop.tools.util.WorkRequestProcessor;
import org.apache.hadoop.mapreduce.security.TokenCache;
import org.apache.hadoop.security.Credentials;
import com.google.common.annotations.VisibleForTesting;
import java.io.*;
import java.util.ArrayList;
import static org.apache.hadoop.tools.DistCpConstants
.HDFS_RESERVED_RAW_DIRECTORY_NAME;
/**
* The SimpleCopyListing is responsible for making the exhaustive list of
* all files/directories under its specified list of input-paths.
* These are written into the specified copy-listing file.
* Note: The SimpleCopyListing doesn't handle wild-cards in the input-paths.
*/
public class SimpleCopyListing extends CopyListing {
private static final Log LOG = LogFactory.getLog(SimpleCopyListing.class);
private long totalPaths = 0;
private long totalDirs = 0;
private long totalBytesToCopy = 0;
private int numListstatusThreads = 1;
private final int maxRetries = 3;
/**
* Protected constructor, to initialize configuration.
*
* @param configuration The input configuration, with which the source/target FileSystems may be accessed.
* @param credentials - Credentials object on which the FS delegation tokens are cached. If null
* delegation token caching is skipped
*/
protected SimpleCopyListing(Configuration configuration, Credentials credentials) {
super(configuration, credentials);
numListstatusThreads = getConf().getInt(
DistCpConstants.CONF_LABEL_LISTSTATUS_THREADS,
DistCpConstants.DEFAULT_LISTSTATUS_THREADS);
}
@VisibleForTesting
protected SimpleCopyListing(Configuration configuration, Credentials credentials,
int numListstatusThreads) {
super(configuration, credentials);
this.numListstatusThreads = numListstatusThreads;
}
@Override
protected void validatePaths(DistCpOptions options)
throws IOException, InvalidInputException {
Path targetPath = options.getTargetPath();
FileSystem targetFS = targetPath.getFileSystem(getConf());
boolean targetIsFile = targetFS.isFile(targetPath);
targetPath = targetFS.makeQualified(targetPath);
final boolean targetIsReservedRaw =
Path.getPathWithoutSchemeAndAuthority(targetPath).toString().
startsWith(HDFS_RESERVED_RAW_DIRECTORY_NAME);
//If target is a file, then source has to be single file
if (targetIsFile) {
if (options.getSourcePaths().size() > 1) {
throw new InvalidInputException("Multiple source being copied to a file: " +
targetPath);
}
Path srcPath = options.getSourcePaths().get(0);
FileSystem sourceFS = srcPath.getFileSystem(getConf());
if (!sourceFS.isFile(srcPath)) {
throw new InvalidInputException("Cannot copy " + srcPath +
", which is not a file to " + targetPath);
}
}
if (options.shouldAtomicCommit() && targetFS.exists(targetPath)) {
throw new InvalidInputException("Target path for atomic-commit already exists: " +
targetPath + ". Cannot atomic-commit to pre-existing target-path.");
}
for (Path path: options.getSourcePaths()) {
FileSystem fs = path.getFileSystem(getConf());
if (!fs.exists(path)) {
throw new InvalidInputException(path + " doesn't exist");
}
if (Path.getPathWithoutSchemeAndAuthority(path).toString().
startsWith(HDFS_RESERVED_RAW_DIRECTORY_NAME)) {
if (!targetIsReservedRaw) {
final String msg = "The source path '" + path + "' starts with " +
HDFS_RESERVED_RAW_DIRECTORY_NAME + " but the target path '" +
targetPath + "' does not. Either all or none of the paths must " +
"have this prefix.";
throw new InvalidInputException(msg);
}
} else if (targetIsReservedRaw) {
final String msg = "The target path '" + targetPath + "' starts with " +
HDFS_RESERVED_RAW_DIRECTORY_NAME + " but the source path '" +
path + "' does not. Either all or none of the paths must " +
"have this prefix.";
throw new InvalidInputException(msg);
}
}
if (targetIsReservedRaw) {
options.preserveRawXattrs();
getConf().setBoolean(DistCpConstants.CONF_LABEL_PRESERVE_RAWXATTRS, true);
}
/* This is requires to allow map tasks to access each of the source
clusters. This would retrieve the delegation token for each unique
file system and add them to job's private credential store
*/
Credentials credentials = getCredentials();
if (credentials != null) {
Path[] inputPaths = options.getSourcePaths().toArray(new Path[1]);
TokenCache.obtainTokensForNamenodes(credentials, inputPaths, getConf());
}
}
/** {@inheritDoc} */
@Override
public void doBuildListing(Path pathToListingFile, DistCpOptions options) throws IOException {
doBuildListing(getWriter(pathToListingFile), options);
}
/**
* Collect the list of
* {@literal <sourceRelativePath, sourceFileStatus>}
* to be copied and write to the sequence file. In essence, any file or
* directory that need to be copied or sync-ed is written as an entry to the
* sequence file, with the possible exception of the source root:
* when either -update (sync) or -overwrite switch is specified, and if
* the the source root is a directory, then the source root entry is not
* written to the sequence file, because only the contents of the source
* directory need to be copied in this case.
* See {@link org.apache.hadoop.tools.util.DistCpUtils#getRelativePath} for
* how relative path is computed.
* See computeSourceRootPath method for how the root path of the source is
* computed.
* @param fileListWriter
* @param options
* @throws IOException
*/
@VisibleForTesting
public void doBuildListing(SequenceFile.Writer fileListWriter,
DistCpOptions options) throws IOException {
if (options.getNumListstatusThreads() > 0) {
numListstatusThreads = options.getNumListstatusThreads();
}
try {
for (Path path: options.getSourcePaths()) {
FileSystem sourceFS = path.getFileSystem(getConf());
final boolean preserveAcls = options.shouldPreserve(FileAttribute.ACL);
final boolean preserveXAttrs = options.shouldPreserve(FileAttribute.XATTR);
final boolean preserveRawXAttrs = options.shouldPreserveRawXattrs();
path = makeQualified(path);
FileStatus rootStatus = sourceFS.getFileStatus(path);
Path sourcePathRoot = computeSourceRootPath(rootStatus, options);
FileStatus[] sourceFiles = sourceFS.listStatus(path);
boolean explore = (sourceFiles != null && sourceFiles.length > 0);
if (!explore || rootStatus.isDirectory()) {
CopyListingFileStatus rootCopyListingStatus =
DistCpUtils.toCopyListingFileStatus(sourceFS, rootStatus,
preserveAcls, preserveXAttrs, preserveRawXAttrs);
writeToFileListingRoot(fileListWriter, rootCopyListingStatus,
sourcePathRoot, options);
}
if (explore) {
ArrayList<FileStatus> sourceDirs = new ArrayList<FileStatus>();
for (FileStatus sourceStatus: sourceFiles) {
if (LOG.isDebugEnabled()) {
LOG.debug("Recording source-path: " + sourceStatus.getPath() + " for copy.");
}
CopyListingFileStatus sourceCopyListingStatus =
DistCpUtils.toCopyListingFileStatus(sourceFS, sourceStatus,
preserveAcls && sourceStatus.isDirectory(),
preserveXAttrs && sourceStatus.isDirectory(),
preserveRawXAttrs && sourceStatus.isDirectory());
writeToFileListing(fileListWriter, sourceCopyListingStatus,
sourcePathRoot, options);
if (sourceStatus.isDirectory()) {
if (LOG.isDebugEnabled()) {
LOG.debug("Adding source dir for traverse: " + sourceStatus.getPath());
}
sourceDirs.add(sourceStatus);
}
}
traverseDirectory(fileListWriter, sourceFS, sourceDirs,
sourcePathRoot, options);
}
}
fileListWriter.close();
printStats();
LOG.info("Build file listing completed.");
fileListWriter = null;
} finally {
IOUtils.cleanup(LOG, fileListWriter);
}
}
private Path computeSourceRootPath(FileStatus sourceStatus,
DistCpOptions options) throws IOException {
Path target = options.getTargetPath();
FileSystem targetFS = target.getFileSystem(getConf());
final boolean targetPathExists = options.getTargetPathExists();
boolean solitaryFile = options.getSourcePaths().size() == 1
&& !sourceStatus.isDirectory();
if (solitaryFile) {
if (targetFS.isFile(target) || !targetPathExists) {
return sourceStatus.getPath();
} else {
return sourceStatus.getPath().getParent();
}
} else {
boolean specialHandling = (options.getSourcePaths().size() == 1 && !targetPathExists) ||
options.shouldSyncFolder() || options.shouldOverwrite();
return specialHandling && sourceStatus.isDirectory() ? sourceStatus.getPath() :
sourceStatus.getPath().getParent();
}
}
/**
* Provide an option to skip copy of a path, Allows for exclusion
* of files such as {@link org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter#SUCCEEDED_FILE_NAME}
* @param path - Path being considered for copy while building the file listing
* @param options - Input options passed during DistCp invocation
* @return - True if the path should be considered for copy, false otherwise
*/
protected boolean shouldCopy(Path path, DistCpOptions options) {
return true;
}
/** {@inheritDoc} */
@Override
protected long getBytesToCopy() {
return totalBytesToCopy;
}
/** {@inheritDoc} */
@Override
protected long getNumberOfPaths() {
return totalPaths;
}
private Path makeQualified(Path path) throws IOException {
final FileSystem fs = path.getFileSystem(getConf());
return path.makeQualified(fs.getUri(), fs.getWorkingDirectory());
}
private SequenceFile.Writer getWriter(Path pathToListFile) throws IOException {
FileSystem fs = pathToListFile.getFileSystem(getConf());
if (fs.exists(pathToListFile)) {
fs.delete(pathToListFile, false);
}
return SequenceFile.createWriter(getConf(),
SequenceFile.Writer.file(pathToListFile),
SequenceFile.Writer.keyClass(Text.class),
SequenceFile.Writer.valueClass(CopyListingFileStatus.class),
SequenceFile.Writer.compression(SequenceFile.CompressionType.NONE));
}
/*
* Private class to implement WorkRequestProcessor interface. It processes
* each directory (represented by FileStatus item) and returns a list of all
* file-system objects in that directory (files and directories). In case of
* retriable exceptions it increments retry counter and returns the same
* directory for later retry.
*/
private static class FileStatusProcessor
implements WorkRequestProcessor<FileStatus, FileStatus[]> {
private FileSystem fileSystem;
public FileStatusProcessor(FileSystem fileSystem) {
this.fileSystem = fileSystem;
}
/*
* Processor for FileSystem.listStatus().
*
* @param workRequest Input work item that contains FileStatus item which
* is a parent directory we want to list.
* @return Outputs WorkReport<FileStatus[]> with a list of objects in the
* directory (array of objects, empty if parent directory is
* empty). In case of intermittent exception we increment retry
* counter and return the list containing the parent directory).
*/
public WorkReport<FileStatus[]> processItem(
WorkRequest<FileStatus> workRequest) {
FileStatus parent = workRequest.getItem();
int retry = workRequest.getRetry();
WorkReport<FileStatus[]> result = null;
try {
if (retry > 0) {
int sleepSeconds = 2;
for (int i = 1; i < retry; i++) {
sleepSeconds *= 2;
}
try {
Thread.sleep(1000 * sleepSeconds);
} catch (InterruptedException ie) {
LOG.debug("Interrupted while sleeping in exponential backoff.");
}
}
result = new WorkReport<FileStatus[]>(
fileSystem.listStatus(parent.getPath()), retry, true);
} catch (FileNotFoundException fnf) {
LOG.error("FileNotFoundException exception in listStatus: " +
fnf.getMessage());
result = new WorkReport<FileStatus[]>(new FileStatus[0], retry, true,
fnf);
} catch (Exception e) {
LOG.error("Exception in listStatus. Will send for retry.");
FileStatus[] parentList = new FileStatus[1];
parentList[0] = parent;
result = new WorkReport<FileStatus[]>(parentList, retry + 1, false, e);
}
return result;
}
}
private void printStats() {
LOG.info("Paths (files+dirs) cnt = " + totalPaths +
"; dirCnt = " + totalDirs);
}
private void maybePrintStats() {
if (totalPaths % 100000 == 0) {
printStats();
}
}
private void traverseDirectory(SequenceFile.Writer fileListWriter,
FileSystem sourceFS,
ArrayList<FileStatus> sourceDirs,
Path sourcePathRoot,
DistCpOptions options)
throws IOException {
final boolean preserveAcls = options.shouldPreserve(FileAttribute.ACL);
final boolean preserveXAttrs = options.shouldPreserve(FileAttribute.XATTR);
final boolean preserveRawXattrs = options.shouldPreserveRawXattrs();
assert numListstatusThreads > 0;
LOG.debug("Starting thread pool of " + numListstatusThreads +
" listStatus workers.");
ProducerConsumer<FileStatus, FileStatus[]> workers =
new ProducerConsumer<FileStatus, FileStatus[]>(numListstatusThreads);
for (int i = 0; i < numListstatusThreads; i++) {
workers.addWorker(
new FileStatusProcessor(sourcePathRoot.getFileSystem(getConf())));
}
for (FileStatus status : sourceDirs) {
workers.put(new WorkRequest<FileStatus>(status, 0));
}
while (workers.hasWork()) {
try {
WorkReport<FileStatus[]> workResult = workers.take();
int retry = workResult.getRetry();
for (FileStatus child: workResult.getItem()) {
if (LOG.isDebugEnabled()) {
LOG.debug("Recording source-path: " + child.getPath() + " for copy.");
}
if (workResult.getSuccess()) {
CopyListingFileStatus childCopyListingStatus =
DistCpUtils.toCopyListingFileStatus(sourceFS, child,
preserveAcls && child.isDirectory(),
preserveXAttrs && child.isDirectory(),
preserveRawXattrs && child.isDirectory());
writeToFileListing(fileListWriter, childCopyListingStatus,
sourcePathRoot, options);
}
if (retry < maxRetries) {
if (child.isDirectory()) {
if (LOG.isDebugEnabled()) {
LOG.debug("Traversing into source dir: " + child.getPath());
}
workers.put(new WorkRequest<FileStatus>(child, retry));
}
} else {
LOG.error("Giving up on " + child.getPath() +
" after " + retry + " retries.");
}
}
} catch (InterruptedException ie) {
LOG.error("Could not get item from childQueue. Retrying...");
}
}
workers.shutdown();
}
private void writeToFileListingRoot(SequenceFile.Writer fileListWriter,
CopyListingFileStatus fileStatus, Path sourcePathRoot,
DistCpOptions options) throws IOException {
boolean syncOrOverwrite = options.shouldSyncFolder() ||
options.shouldOverwrite();
if (fileStatus.getPath().equals(sourcePathRoot) &&
fileStatus.isDirectory() && syncOrOverwrite) {
// Skip the root-paths when syncOrOverwrite
if (LOG.isDebugEnabled()) {
LOG.debug("Skip " + fileStatus.getPath());
}
return;
}
writeToFileListing(fileListWriter, fileStatus, sourcePathRoot, options);
}
private void writeToFileListing(SequenceFile.Writer fileListWriter,
CopyListingFileStatus fileStatus,
Path sourcePathRoot,
DistCpOptions options) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("REL PATH: " + DistCpUtils.getRelativePath(sourcePathRoot,
fileStatus.getPath()) + ", FULL PATH: " + fileStatus.getPath());
}
FileStatus status = fileStatus;
if (!shouldCopy(fileStatus.getPath(), options)) {
return;
}
fileListWriter.append(new Text(DistCpUtils.getRelativePath(sourcePathRoot,
fileStatus.getPath())), status);
fileListWriter.sync();
if (!fileStatus.isDirectory()) {
totalBytesToCopy += fileStatus.getLen();
} else {
totalDirs++;
}
totalPaths++;
maybePrintStats();
}
}
| |
package com.thaze.peakmatch.processors;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.thaze.peakmatch.EventProcessorConf;
import com.thaze.peakmatch.Util;
import com.thaze.peakmatch.event.Event;
import com.thaze.peakmatch.event.EventException;
import fj.Effect;
import fj.F;
import fj.F2;
import fj.P;
import fj.P2;
import fj.P3;
import fj.data.Array;
import org.jgrapht.UndirectedGraph;
import org.jgrapht.alg.ConnectivityInspector;
import org.jgrapht.graph.DefaultEdge;
import org.jgrapht.graph.SimpleGraph;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* @author srodgers
* created: 14/06/13
*/
public class ClusteringProcessor implements Processor {
private final EventProcessorConf _conf;
public ClusteringProcessor(EventProcessorConf conf) throws EventException {
_conf = conf;
}
public void process() throws EventException {
System.out.println("clustering ...");
final Set<String> centreNames = Sets.newLinkedHashSet(Lists.newArrayList(_conf.getClusterCentres().split(",")));
final List<EventAndFeatures> eventFeatures = Lists.newArrayList();
final List<EventAndFeatures> centres = Lists.newArrayList();
System.out.println(centreNames + " centres defined");
Util.executePerEvent(_conf, new Util.EventAction() {
@Override
public void run(Event event) throws EventException {
EventAndFeatures eaf = new EventAndFeatures(_conf, event);
eventFeatures.add(eaf);
if (centreNames.contains(event.getName())){
centres.add(eaf);
}
}
});
System.out.println(centres.size() + " centres found");
if (!centres.isEmpty()){
class EAFScore implements Comparable<EAFScore>{
final EventAndFeatures eaf;
final double score;
EAFScore(EventAndFeatures eaf, double score) {
this.eaf = eaf;
this.score = score;
}
@Override
public int compareTo(EAFScore o) {
return Double.compare(score, o.score); // ASCENDING - distance function ,,, smaller=better
}
}
for (EventAndFeatures centre: centres){
List<EAFScore> scores = Lists.newArrayList();
for (EventAndFeatures eaf: eventFeatures){
if (eaf == centre)
continue;
double score = DISTANCE_FN.f(centre, eaf);
if (score <= _conf.getClusterCentreThreshold())
scores.add(new EAFScore(eaf, score));
}
Collections.sort(scores);
System.out.println();
System.out.println(centre.toString());
for (EAFScore eafScore: scores){
System.out.print(eafScore.eaf.toString());
if (_conf.isPlot1dTiny())
System.out.println("\t" + Util.NF.format(eafScore.score));
else
System.out.println("\n" + " distance to first: " + Util.NF.format(eafScore.score));
}
}
} else {
Array a = Array.array(eventFeatures.toArray());
KNNClusterer<EventAndFeatures> clusterer = new KNNClusterer<>(_conf.getClusterK(), _conf.getClusterEta(), DISTANCE_FN);
Array<P2<EventAndFeatures,Set<EventAndFeatures>>> clusters = clusterer.cluster(a, 200);
Set<EventAndFeatures> done = Sets.newHashSet();
for (P2<EventAndFeatures,Set<EventAndFeatures>> cluster: clusters){
EventAndFeatures ef1 = cluster._1();
if (!done.contains(ef1) && !cluster._2().isEmpty()){
System.out.println();
System.out.println(ef1.toString());
for (EventAndFeatures ef2: cluster._2()){
System.out.print(ef2.toString());
double distance = DISTANCE_FN.f(ef1, ef2);
if (_conf.isPlot1dTiny())
System.out.println("\t" + Util.NF.format(distance));
else
System.out.println("\n" + " distance to first: " + Util.NF.format(distance));
done.add(ef2);
}
}
done.add(ef1);
}
}
}
final class EventAndFeatures implements Comparable<EventAndFeatures> {
final Event _event;
final Map<Double, Double> _normalisedBands;
final EventProcessorConf _conf;
EventAndFeatures(EventProcessorConf conf, Event event){
_event=event;
_conf=conf;
Map<Double, Double> bandMeans = Util.getBandMeans(_event.getD(), conf);
// normalise to unit vector
double sumsquares=0d;
for (double mean: bandMeans.values())
sumsquares += mean*mean;
double vectorLength = Math.sqrt(sumsquares);
_normalisedBands = Maps.newLinkedHashMap();
for (Map.Entry<Double, Double> e: bandMeans.entrySet()){
_normalisedBands.put(e.getKey(), e.getValue() / vectorLength);
}
}
public String toString(){
return Plot1DProcessor.formatEvent(_event, _normalisedBands, _conf, _conf.isPlot1dTiny());
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
EventAndFeatures that = (EventAndFeatures) o;
if (!_event.getName().equals(that._event.getName())) return false;
return true;
}
@Override
public int hashCode() {
return _event.getName().hashCode();
}
@Override
public int compareTo(EventAndFeatures o) {
return _event.getName().compareTo(o._event.getName());
}
}
static final F2<EventAndFeatures, EventAndFeatures, Double> EUCLIDIAN_DISTANCE_FN = new F2<EventAndFeatures, EventAndFeatures, Double>() {
@Override
public Double f(EventAndFeatures e1, EventAndFeatures e2) {
double s=0d;
for (Map.Entry<Double, Double> e: e1._normalisedBands.entrySet()){
Double e2Val = e2._normalisedBands.get(e.getKey());
if (null != e2Val){
double diff = e.getValue() - e2Val;
s += diff*diff;
}
}
return Math.sqrt(s);
}
};
static final F2<EventAndFeatures, EventAndFeatures, Double> COSINE_DISTANCE_FN = new F2<EventAndFeatures, EventAndFeatures, Double>() {
@Override
public Double f(EventAndFeatures e1, EventAndFeatures e2) {
double cosTheta=0d;
for (Map.Entry<Double, Double> e: e1._normalisedBands.entrySet()){
Double e2Val = e2._normalisedBands.get(e.getKey());
if (null != e2Val)
cosTheta += e.getValue() * e2Val;
}
return 1d-cosTheta;
// double ret = (2 * Math.acos(cosTheta) / Math.PI);
// System.out.println(cosTheta + " " + ret);
// return ret;
}
};
static final F2<EventAndFeatures, EventAndFeatures, Double> DISTANCE_FN = EUCLIDIAN_DISTANCE_FN;
public class KNNClusterer<T> {
private final int k;
private final double eta;
private final F2<T, T, Double> distanceFn;
public KNNClusterer(int k, double eta, F2<T,T,Double> distanceFn) {
this.k = k;
this.eta = eta;
this.distanceFn = distanceFn;
}
public Array<P2<T, Set<T>>> cluster(Array<T> instances, final int maxClusterSize) {
// find the graph containing a connected subcomponent corresponding to each cluster
UndirectedGraph<T, DefaultEdge> graph = graphify(instances);
// extract the clusters
final fj.data.List<Set<T>> clusters = fj.data.List.iterableList(new ConnectivityInspector<T, DefaultEdge>(graph).connectedSets());
// format the output by assigning a (possibly empty) set of fellow members to each instance
return instances.map( new F<T,P2<T, Set<T>>>(){
@Override public P2<T, Set<T>> f(final T t) {
Set<T> cluster = new HashSet<T>();
Set<T> clusterables = clusters.find( new F<Set<T>, Boolean>(){
@Override public Boolean f(Set<T> c) {
return c.contains(t);
}} )
.orSome(Collections.<T>emptySet());
// limit cluster sizes
for( T instance : clusterables ) {
if( cluster.size() == maxClusterSize ) { break; }
if( instance != t ) {
cluster.add(instance);
}
}
return P.p(t, cluster);
}} );
}
private UndirectedGraph<T, DefaultEdge> graphify(Array<T> instances) {
final UndirectedGraph<T, DefaultEdge> graph = new SimpleGraph<T, DefaultEdge>(DefaultEdge.class);
instances.foreach( new Effect<T>() {
@Override public void e(T t) { graph.addVertex(t); }} );
final java.util.List<P3<T,T,Double>> distances = calculateAllDistancePairs(instances);
Collections.sort(distances, new Comparator<P3<T,T,Double>>(){
@Override public int compare(P3<T, T, Double> o1, P3<T, T, Double> o2) {
return o1._3().compareTo(o2._3());
}});
// for pairs relating to each instance, add an edge for any of the k nearest pairs if distance for that pair is < eta
instances.foreach( new Effect<T>(){
@Override public void e(T t) {
Iterator<P3<T, T, Double>> it = distances.iterator();
int count = 0;
while( it.hasNext() && count < k ) {
P3<T, T, Double> e = it.next();
// deliberate reference equality
if( t == e._1() || t == e._2() ) {
count += 1;
// it.remove(); // this is probably more trouble than it is worth
if( e._3() < eta ) { graph.addEdge(e._1(), e._2()); }
else { break; }
}
}
}} );
return graph;
}
private java.util.List<P3<T, T, Double>> calculateAllDistancePairs(Array<T> instances) {
final int n = instances.length();
ArrayList<P3<T,T,Double>> distances = new ArrayList<P3<T,T,Double>>( (n*(n+1))/2 );
for( int i = 0; i < n; i++ ) {
T t1 = instances.get(i);
for( int j = i + 1; j < n; j++ ) {
T t2 = instances.get(j);
distances.add(P.p(t1, t2, distanceFn.f(t1,t2)));
}
}
return distances;
}
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.diff.tools.util;
import com.intellij.diff.tools.util.base.TextDiffViewerUtil;
import com.intellij.diff.util.DiffDividerDrawUtil;
import com.intellij.diff.util.DiffDrawUtil;
import com.intellij.diff.util.DiffUtil;
import com.intellij.diff.util.LineRange;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.FoldRegion;
import com.intellij.openapi.editor.event.DocumentAdapter;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.ex.DocumentEx;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.editor.ex.FoldingListener;
import com.intellij.openapi.editor.ex.FoldingModelEx;
import com.intellij.openapi.editor.markup.RangeHighlighter;
import com.intellij.openapi.util.BooleanGetter;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.UserDataHolder;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.Function;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.HashSet;
import gnu.trove.TIntFunction;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.awt.*;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
/**
* This class allows to add custom foldings to hide unchanged regions in diff.
* EditorSettings#isAutoCodeFoldingEnabled() should be true, to avoid collisions with language-specific foldings
* (as it's impossible to create partially overlapped folding regions)
*
* @see DiffUtil#setFoldingModelSupport(EditorEx)
*/
public class FoldingModelSupport {
public static final String PLACEHOLDER = " ";
private static final Key<FoldingCache> CACHE_KEY = Key.create("Diff.FoldingUtil.Cache");
protected final int myCount;
@NotNull protected final EditorEx[] myEditors;
@NotNull protected final List<FoldedBlock[]> myFoldings = new ArrayList<>();
private boolean myDuringSynchronize;
private final boolean[] myShouldUpdateLineNumbers;
public FoldingModelSupport(@NotNull EditorEx[] editors, @NotNull Disposable disposable) {
myEditors = editors;
myCount = myEditors.length;
myShouldUpdateLineNumbers = new boolean[myCount];
MyDocumentListener documentListener = new MyDocumentListener();
List<Document> documents = ContainerUtil.map(myEditors, EditorEx::getDocument);
TextDiffViewerUtil.installDocumentListeners(documentListener, documents, disposable);
for (int i = 0; i < myCount; i++) {
if (myCount > 1) {
myEditors[i].getFoldingModel().addListener(new MyFoldingListener(i), disposable);
}
myEditors[i].getGutterComponentEx().setLineNumberConvertor(getLineConvertor(i));
}
}
//
// Init
//
/*
* Iterator returns ranges of changed lines: start1, end1, start2, end2, ...
*/
protected void install(@Nullable final Iterator<int[]> changedLines,
@Nullable final UserDataHolder context,
@NotNull final Settings settings) {
ApplicationManager.getApplication().assertIsDispatchThread();
for (FoldedBlock folding : getFoldedBlocks()) {
folding.destroyHighlighter();
}
runBatchOperation(() -> {
for (FoldedBlock folding : getFoldedBlocks()) {
folding.destroyFolding();
}
myFoldings.clear();
if (changedLines != null && settings.range != -1) {
FoldingBuilder builder = new FoldingBuilder(context, settings);
builder.build(changedLines);
}
});
updateLineNumbers(true);
}
private class FoldingBuilder {
@NotNull private final Settings mySettings;
@NotNull private final ExpandSuggester myExpandSuggester;
@NotNull private final int[] myLineCount;
public FoldingBuilder(@Nullable UserDataHolder context,
@NotNull Settings settings) {
FoldingCache cache = context != null ? context.getUserData(CACHE_KEY) : null;
myExpandSuggester = new ExpandSuggester(cache, settings.defaultExpanded);
mySettings = settings;
myLineCount = new int[myCount];
for (int i = 0; i < myCount; i++) {
myLineCount[i] = myEditors[i].getDocument().getLineCount();
}
}
private void build(@NotNull final Iterator<int[]> changedLines) {
int[] starts = new int[myCount];
int[] ends = new int[myCount];
int[] last = new int[myCount];
for (int i = 0; i < myCount; i++) {
last[i] = Integer.MIN_VALUE;
}
while (changedLines.hasNext()) {
int[] offsets = changedLines.next();
for (int i = 0; i < myCount; i++) {
starts[i] = last[i];
ends[i] = offsets[i * 2];
last[i] = offsets[i * 2 + 1];
}
addRange(starts, ends);
}
for (int i = 0; i < myCount; i++) {
starts[i] = last[i];
ends[i] = Integer.MAX_VALUE;
}
addRange(starts, ends);
}
private void addRange(int[] starts, int[] ends) {
List<FoldedBlock> result = new ArrayList<>(3);
int[] rangeStarts = new int[myCount];
int[] rangeEnds = new int[myCount];
for (int number = 0; ; number++) {
int shift = getRangeShift(mySettings.range, number);
if (shift == -1) break;
for (int i = 0; i < myCount; i++) {
rangeStarts[i] = bound(starts[i] + shift, i);
rangeEnds[i] = bound(ends[i] - shift, i);
}
ContainerUtil.addAllNotNull(result, createRange(rangeStarts, rangeEnds, myExpandSuggester.isExpanded(rangeStarts, rangeEnds)));
}
if (result.size() > 0) {
FoldedBlock[] block = ContainerUtil.toArray(result, new FoldedBlock[result.size()]);
for (FoldedBlock folding : block) {
folding.installHighlighter(block);
}
myFoldings.add(block);
}
}
@Nullable
private FoldedBlock createRange(int[] starts, int[] ends, boolean expanded) {
boolean hasFolding = false;
FoldRegion[] regions = new FoldRegion[myCount];
boolean hasExpanded = false; // do not desync on runBatchFoldingOperationDoNotCollapseCaret
for (int i = 0; i < myCount; i++) {
if (ends[i] - starts[i] < 2) continue;
regions[i] = addFolding(myEditors[i], starts[i], ends[i], expanded);
hasFolding |= regions[i] != null;
hasExpanded |= regions[i] != null && regions[i].isExpanded();
}
if (hasExpanded && !expanded) {
for (FoldRegion region : regions) {
if (region != null) region.setExpanded(true);
}
}
return hasFolding ? new FoldedBlock(regions) : null;
}
private int bound(int value, int index) {
return Math.min(Math.max(value, 0), myLineCount[index]);
}
}
@Nullable
public static FoldRegion addFolding(@NotNull EditorEx editor, int start, int end, boolean expanded) {
DocumentEx document = editor.getDocument();
final int startOffset = document.getLineStartOffset(start);
final int endOffset = document.getLineEndOffset(end - 1);
FoldRegion value = editor.getFoldingModel().addFoldRegion(startOffset, endOffset, PLACEHOLDER);
if (value != null) value.setExpanded(expanded);
return value;
}
private void runBatchOperation(@NotNull Runnable runnable) {
Runnable lastRunnable = runnable;
for (EditorEx editor : myEditors) {
final Runnable finalRunnable = lastRunnable;
lastRunnable = () -> {
if (DiffUtil.isFocusedComponent(editor.getComponent())) {
editor.getFoldingModel().runBatchFoldingOperationDoNotCollapseCaret(finalRunnable);
}
else {
editor.getFoldingModel().runBatchFoldingOperation(finalRunnable);
}
};
}
myDuringSynchronize = true;
try {
lastRunnable.run();
}
finally {
myDuringSynchronize = false;
}
}
public void destroy() {
for (FoldedBlock folding : getFoldedBlocks()) {
folding.destroyHighlighter();
}
runBatchOperation(() -> {
for (FoldedBlock folding : getFoldedBlocks()) {
folding.destroyFolding();
}
myFoldings.clear();
});
}
//
// Line numbers
//
private class MyDocumentListener extends DocumentAdapter {
@Override
public void documentChanged(DocumentEvent e) {
if (StringUtil.indexOf(e.getOldFragment(), '\n') != -1 ||
StringUtil.indexOf(e.getNewFragment(), '\n') != -1) {
for (int i = 0; i < myCount; i++) {
if (myEditors[i].getDocument() == e.getDocument()) {
myShouldUpdateLineNumbers[i] = true;
}
}
}
}
}
@NotNull
protected TIntFunction getLineConvertor(final int index) {
return value -> {
updateLineNumbers(false);
for (FoldedBlock folding : getFoldedBlocks()) { // TODO: avoid full scan - it could slowdown painting
int line = folding.getLine(index);
if (line == -1) continue;
if (line > value) break;
FoldRegion region = folding.getRegion(index);
if (line == value && region != null && !region.isExpanded()) return -1;
}
return value;
};
}
private void updateLineNumbers(boolean force) {
for (int i = 0; i < myCount; i++) {
if (!myShouldUpdateLineNumbers[i] && !force) continue;
myShouldUpdateLineNumbers[i] = false;
ApplicationManager.getApplication().assertReadAccessAllowed();
for (FoldedBlock folding : getFoldedBlocks()) {
folding.updateLineNumber(i);
}
}
}
//
// Synchronized toggling of ranges
//
public void expandAll(final boolean expanded) {
if (myDuringSynchronize) return;
myDuringSynchronize = true;
try {
for (int i = 0; i < myCount; i++) {
final int index = i;
final FoldingModelEx model = myEditors[index].getFoldingModel();
model.runBatchFoldingOperation(() -> {
for (FoldedBlock folding : getFoldedBlocks()) {
FoldRegion region = folding.getRegion(index);
if (region != null) region.setExpanded(expanded);
}
});
}
}
finally {
myDuringSynchronize = false;
}
}
private class MyFoldingListener implements FoldingListener {
private final int myIndex;
@NotNull Set<FoldRegion> myModifiedRegions = new HashSet<>();
public MyFoldingListener(int index) {
myIndex = index;
}
@Override
public void onFoldRegionStateChange(@NotNull FoldRegion region) {
if (myDuringSynchronize) return;
myModifiedRegions.add(region);
}
@Override
public void onFoldProcessingEnd() {
if (myModifiedRegions.isEmpty()) return;
myDuringSynchronize = true;
try {
for (int i = 0; i < myCount; i++) {
if (i == myIndex) continue;
final int pairedIndex = i;
myEditors[pairedIndex].getFoldingModel().runBatchFoldingOperation(() -> {
for (FoldedBlock folding : getFoldedBlocks()) {
FoldRegion region = folding.getRegion(myIndex);
if (region == null || !region.isValid()) continue;
if (myModifiedRegions.contains(region)) {
FoldRegion pairedRegion = folding.getRegion(pairedIndex);
if (pairedRegion == null || !pairedRegion.isValid()) continue;
pairedRegion.setExpanded(region.isExpanded());
}
}
});
}
myModifiedRegions.clear();
}
finally {
myDuringSynchronize = false;
}
}
}
//
// Highlighting
//
protected class MyPaintable implements DiffDividerDrawUtil.DividerSeparatorPaintable {
private final int myLeft;
private final int myRight;
public MyPaintable(int left, int right) {
myLeft = left;
myRight = right;
}
@Override
public void process(@NotNull Handler handler) {
for (FoldedBlock[] block : myFoldings) {
for (FoldedBlock folding : block) {
FoldRegion region1 = folding.getRegion(myLeft);
FoldRegion region2 = folding.getRegion(myRight);
if (region1 == null || !region1.isValid() || region1.isExpanded()) continue;
if (region2 == null || !region2.isValid() || region2.isExpanded()) continue;
int line1 = myEditors[myLeft].getDocument().getLineNumber(region1.getStartOffset());
int line2 = myEditors[myRight].getDocument().getLineNumber(region2.getStartOffset());
if (!handler.process(line1, line2)) return;
break;
}
}
}
public void paintOnDivider(@NotNull Graphics2D gg, @NotNull Component divider) {
DiffDividerDrawUtil.paintSeparators(gg, divider.getWidth(), myEditors[myLeft], myEditors[myRight], this);
}
}
//
// Cache
//
/*
* To Cache:
* For each block of foldings (foldings for a single unchanged block in diff) we remember biggest expanded and biggest collapsed range.
*
* From Cache:
* We look into cache while building ranges, trying to find corresponding range in cached state.
* "Corresponding range" now is just smallest covering range.
*
* If document was modified since cache creation, this will lead to strange results. But this is a rare case, and we can't do anything with it.
*/
private class ExpandSuggester {
@Nullable private final FoldingCache myCache;
private final int[] myIndex = new int[myCount];
private final boolean myDefault;
public ExpandSuggester(@Nullable FoldingCache cache, boolean defaultValue) {
myCache = cache;
myDefault = defaultValue;
}
public boolean isExpanded(int[] starts, int[] ends) {
if (myCache == null || myCache.ranges.length != myCount) return myDefault;
if (myDefault != myCache.expandByDefault) return myDefault;
Boolean state = null;
for (int index = 0; index < myCount; index++) {
Boolean sideState = getCachedExpanded(starts[index], ends[index], index);
if (sideState == null) continue;
if (state == null) {
state = sideState;
continue;
}
if (state != sideState) return myDefault;
}
return state == null ? myDefault : state;
}
@Nullable
private Boolean getCachedExpanded(int start, int end, int index) {
if (start == end) return null;
//noinspection ConstantConditions
List<FoldedRangeState> ranges = myCache.ranges[index];
for (; myIndex[index] < ranges.size(); myIndex[index]++) {
FoldedRangeState range = ranges.get(myIndex[index]);
LineRange lineRange = range.getLineRange();
if (lineRange.end <= start) continue;
if (lineRange.contains(start, end)) {
if (range.collapsed != null && range.collapsed.contains(start, end)) return false;
if (range.expanded != null && range.expanded.contains(start, end)) return true;
assert false : "Invalid LineRange" + range.expanded + ", " + range.collapsed + ", " + new LineRange(start, end);
}
if (lineRange.start >= start) return null; // we could need current range for enclosing next-level foldings
}
return null;
}
}
public void updateContext(@NotNull UserDataHolder context, @NotNull final Settings settings) {
if (myFoldings.isEmpty()) return; // do not rewrite cache by initial state
context.putUserData(CACHE_KEY, getFoldingCache(settings));
}
@NotNull
private FoldingCache getFoldingCache(@NotNull final Settings settings) {
return ReadAction.compute(() -> {
List<FoldedRangeState>[] result = new List[myCount];
for (int i = 0; i < myCount; i++) {
result[i] = getFoldedRanges(i, settings);
}
return new FoldingCache(result, settings.defaultExpanded);
});
}
@NotNull
private List<FoldedRangeState> getFoldedRanges(int index, @NotNull Settings settings) {
ApplicationManager.getApplication().assertReadAccessAllowed();
List<FoldedRangeState> ranges = new ArrayList<>();
DocumentEx document = myEditors[index].getDocument();
for (FoldedBlock[] blocks : myFoldings) {
LineRange expanded = null;
LineRange collapsed = null;
for (FoldedBlock folding : blocks) {
FoldRegion region = folding.getRegion(index);
if (region == null || !region.isValid()) continue;
if (region.isExpanded()) {
if (expanded == null) {
int line1 = document.getLineNumber(region.getStartOffset());
int line2 = document.getLineNumber(region.getEndOffset()) + 1;
expanded = new LineRange(line1, line2);
}
}
else {
int line1 = document.getLineNumber(region.getStartOffset());
int line2 = document.getLineNumber(region.getEndOffset()) + 1;
collapsed = new LineRange(line1, line2);
break;
}
}
if (expanded != null || collapsed != null) {
ranges.add(new FoldedRangeState(expanded, collapsed));
}
}
return ranges;
}
private static class FoldingCache {
public final boolean expandByDefault;
@NotNull public final List<FoldedRangeState>[] ranges;
public FoldingCache(@NotNull List<FoldedRangeState>[] ranges, boolean expandByDefault) {
this.ranges = ranges;
this.expandByDefault = expandByDefault;
}
}
private static class FoldedRangeState {
@Nullable public final LineRange expanded;
@Nullable public final LineRange collapsed;
public FoldedRangeState(@Nullable LineRange expanded, @Nullable LineRange collapsed) {
assert expanded != null || collapsed != null;
this.expanded = expanded;
this.collapsed = collapsed;
}
@NotNull
public LineRange getLineRange() {
//noinspection ConstantConditions
return expanded != null ? expanded : collapsed;
}
}
//
// Impl
//
@NotNull
private Iterable<FoldedBlock> getFoldedBlocks() {
return () -> new Iterator<FoldedBlock>() {
private int myGroupIndex = 0;
private int myBlockIndex = 0;
@Override
public boolean hasNext() {
return myGroupIndex < myFoldings.size();
}
@Override
public FoldedBlock next() {
FoldedBlock[] group = myFoldings.get(myGroupIndex);
FoldedBlock folding = group[myBlockIndex];
if (group.length > myBlockIndex + 1) {
myBlockIndex++;
}
else {
myGroupIndex++;
myBlockIndex = 0;
}
return folding;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
protected class FoldedBlock {
@NotNull private final FoldRegion[] myRegions;
@NotNull private final int[] myLines;
@NotNull private final List<RangeHighlighter> myHighlighters = new ArrayList<>(myCount);
public FoldedBlock(@NotNull FoldRegion[] regions) {
assert regions.length == myCount;
myRegions = regions;
myLines = new int[myCount];
}
public void installHighlighter(@NotNull final FoldedBlock[] block) {
assert myHighlighters.isEmpty();
for (int i = 0; i < myCount; i++) {
FoldRegion region = myRegions[i];
if (region == null || !region.isValid()) continue;
myHighlighters.addAll(DiffDrawUtil.createLineSeparatorHighlighter(myEditors[i],
region.getStartOffset(), region.getEndOffset(),
getHighlighterCondition(block, i)));
}
}
public void destroyFolding() {
for (int i = 0; i < myCount; i++) {
FoldRegion region = myRegions[i];
if (region != null) myEditors[i].getFoldingModel().removeFoldRegion(region);
}
}
public void destroyHighlighter() {
for (RangeHighlighter highlighter : myHighlighters) {
highlighter.dispose();
}
myHighlighters.clear();
}
public void updateLineNumber(int index) {
FoldRegion region = myRegions[index];
if (region == null || !region.isValid()) {
myLines[index] = -1;
}
else {
myLines[index] = myEditors[index].getDocument().getLineNumber(region.getStartOffset());
}
}
@Nullable
public FoldRegion getRegion(int index) {
return myRegions[index];
}
public int getLine(int index) {
return myLines[index];
}
@NotNull
private BooleanGetter getHighlighterCondition(@NotNull final FoldedBlock[] block, final int index) {
return () -> {
if (!myEditors[index].getFoldingModel().isFoldingEnabled()) return false;
for (FoldedBlock folding : block) {
FoldRegion region = folding.getRegion(index);
boolean visible = region != null && region.isValid() && !region.isExpanded();
if (folding == this) return visible;
if (visible) return false; // do not paint separator, if 'parent' folding is collapsed
}
return false;
};
}
}
//
// Helpers
//
/*
* number - depth of folding insertion (from zero)
* return: number of context lines. ('-1' - end)
*/
private static int getRangeShift(int range, int number) {
switch (number) {
case 0:
return range;
case 1:
return range * 2;
case 2:
return range * 4;
default:
return -1;
}
}
@Nullable
@Contract("null, _ -> null; !null, _ -> !null")
protected static <T, V> Iterator<V> map(@Nullable final List<T> list, @NotNull final Function<T, V> mapping) {
if (list == null) return null;
final Iterator<T> it = list.iterator();
return new Iterator<V>() {
@Override
public boolean hasNext() {
return it.hasNext();
}
@Override
public V next() {
return mapping.fun(it.next());
}
@Override
public void remove() {
}
};
}
public static class Settings {
public final int range;
public final boolean defaultExpanded;
public Settings(int range, boolean defaultExpanded) {
this.range = range;
this.defaultExpanded = defaultExpanded;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sshd.client.session;
import java.io.IOException;
import java.net.SocketAddress;
import java.security.KeyPair;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.sshd.ClientChannel;
import org.apache.sshd.ClientSession;
import org.apache.sshd.client.ClientFactoryManager;
import org.apache.sshd.client.ScpClient;
import org.apache.sshd.client.ServerKeyVerifier;
import org.apache.sshd.client.SftpClient;
import org.apache.sshd.client.auth.deprecated.UserAuth;
import org.apache.sshd.client.auth.deprecated.UserAuthAgent;
import org.apache.sshd.client.auth.deprecated.UserAuthKeyboardInteractive;
import org.apache.sshd.client.auth.deprecated.UserAuthPassword;
import org.apache.sshd.client.auth.deprecated.UserAuthPublicKey;
import org.apache.sshd.client.channel.ChannelDirectTcpip;
import org.apache.sshd.client.channel.ChannelExec;
import org.apache.sshd.client.channel.ChannelShell;
import org.apache.sshd.client.channel.ChannelSubsystem;
import org.apache.sshd.client.future.AuthFuture;
import org.apache.sshd.client.future.DefaultAuthFuture;
import org.apache.sshd.client.scp.DefaultScpClient;
import org.apache.sshd.client.sftp.DefaultSftpClient;
import org.apache.sshd.common.KeyPairProvider;
import org.apache.sshd.common.Service;
import org.apache.sshd.common.ServiceFactory;
import org.apache.sshd.common.SessionListener;
import org.apache.sshd.common.SshConstants;
import org.apache.sshd.common.SshException;
import org.apache.sshd.common.SshdSocketAddress;
import org.apache.sshd.common.io.IoSession;
import org.apache.sshd.common.session.AbstractSession;
import org.apache.sshd.common.session.ConnectionService;
import org.apache.sshd.common.util.Buffer;
/**
* TODO Add javadoc
*
* @author <a href="mailto:dev@mina.apache.org">Apache MINA SSHD Project</a>
*/
public class ClientSessionImpl extends AbstractSession implements ClientSession {
/**
* For clients to store their own metadata
*/
private Map<Object, Object> metadataMap = new HashMap<Object, Object>();
// TODO: clean service support a bit
private boolean initialServiceRequestSent;
private ServiceFactory currentServiceFactory;
private Service nextService;
private ServiceFactory nextServiceFactory;
protected AuthFuture authFuture;
public ClientSessionImpl(ClientFactoryManager client, IoSession session) throws Exception {
super(false, client, session);
log.info("Client session created");
// Need to set the initial service early as calling code likes to start trying to
// manipulate it before the connection has even been established. For instance, to
// set the authPassword.
List<ServiceFactory> factories = client.getServiceFactories();
if (factories == null || factories.isEmpty() || factories.size() > 2) {
throw new IllegalArgumentException("One or two services must be configured");
}
currentServiceFactory = factories.get(0);
currentService = currentServiceFactory.create(this);
if (factories.size() > 1) {
nextServiceFactory = factories.get(1);
nextService = nextServiceFactory.create(this);
} else {
nextServiceFactory = null;
}
authFuture = new DefaultAuthFuture(lock);
authFuture.setAuthed(false);
sendClientIdentification();
kexState.set(KEX_STATE_INIT);
sendKexInit();
}
protected Service[] getServices() {
Service[] services;
if (nextService != null) {
services = new Service[] { currentService, nextService };
} else if (currentService != null) {
services = new Service[] { currentService };
} else {
services = new Service[0];
}
return services;
}
public ClientFactoryManager getFactoryManager() {
return (ClientFactoryManager) factoryManager;
}
private final List<Object> identities = new ArrayList<Object>();
public void addPasswordIdentity(String password) {
identities.add(password);
}
public void addPublicKeyIdentity(KeyPair key) {
identities.add(key);
}
public AuthFuture auth() throws IOException {
if (username == null) {
throw new IllegalStateException("No username specified when the session was created");
}
synchronized (lock) {
return authFuture = getUserAuthService().auth(identities, nextServiceName());
}
}
public AuthFuture authAgent(String user) throws IOException {
return tryAuth(user, new UserAuthAgent(this, nextServiceName()));
}
public AuthFuture authPassword(String user, String password) throws IOException {
return tryAuth(user, new UserAuthPassword(this, nextServiceName(), password));
}
public AuthFuture authInteractive(String user, String password) throws IOException {
return tryAuth(user, new UserAuthKeyboardInteractive(this, nextServiceName(), password));
}
public AuthFuture authPublicKey(String user, KeyPair key) throws IOException {
return tryAuth(user, new UserAuthPublicKey(this, nextServiceName(), key));
}
private AuthFuture tryAuth(String user, UserAuth auth) throws IOException {
this.username = user;
synchronized (lock) {
return authFuture = getUserAuthService().auth(auth);
}
}
private String nextServiceName() {
synchronized (lock) {
return nextServiceFactory.getName();
}
}
protected void switchToNextService() throws IOException {
synchronized (lock) {
if (nextService == null) {
throw new IllegalStateException("No service available");
}
currentServiceFactory = nextServiceFactory;
currentService = nextService;
nextServiceFactory = null;
nextService = null;
currentService.start();
}
}
public ClientChannel createChannel(String type) throws IOException {
return createChannel(type, null);
}
public ClientChannel createChannel(String type, String subType) throws IOException {
if (ClientChannel.CHANNEL_SHELL.equals(type)) {
return createShellChannel();
} else if (ClientChannel.CHANNEL_EXEC.equals(type)) {
return createExecChannel(subType);
} else if (ClientChannel.CHANNEL_SUBSYSTEM.equals(type)) {
return createSubsystemChannel(subType);
} else {
throw new IllegalArgumentException("Unsupported channel type " + type);
}
}
public ChannelShell createShellChannel() throws IOException {
ChannelShell channel = new ChannelShell();
getConnectionService().registerChannel(channel);
return channel;
}
public ChannelExec createExecChannel(String command) throws IOException {
ChannelExec channel = new ChannelExec(command);
getConnectionService().registerChannel(channel);
return channel;
}
public ChannelSubsystem createSubsystemChannel(String subsystem) throws IOException {
ChannelSubsystem channel = new ChannelSubsystem(subsystem);
getConnectionService().registerChannel(channel);
return channel;
}
public ChannelDirectTcpip createDirectTcpipChannel(SshdSocketAddress local, SshdSocketAddress remote) throws IOException {
ChannelDirectTcpip channel = new ChannelDirectTcpip(local, remote);
getConnectionService().registerChannel(channel);
return channel;
}
private ClientUserAuthService getUserAuthService() {
return findService(ClientUserAuthService.class);
}
private ConnectionService getConnectionService() {
return findService(ConnectionService.class);
}
private <T> T findService(Class<T> clazz) {
if (clazz.isInstance(currentService)) {
return clazz.cast(currentService);
}
if (clazz.isInstance(nextService)) {
return clazz.cast(nextService);
}
throw new IllegalStateException("Attempted to access unknown service " + clazz.getSimpleName());
}
public ScpClient createScpClient() {
return new DefaultScpClient(this);
}
public SftpClient createSftpClient() throws IOException {
return new DefaultSftpClient(this);
}
public SshdSocketAddress startLocalPortForwarding(SshdSocketAddress local, SshdSocketAddress remote) throws IOException {
return getConnectionService().getTcpipForwarder().startLocalPortForwarding(local, remote);
}
public void stopLocalPortForwarding(SshdSocketAddress local) throws IOException {
getConnectionService().getTcpipForwarder().stopLocalPortForwarding(local);
}
public SshdSocketAddress startRemotePortForwarding(SshdSocketAddress remote, SshdSocketAddress local) throws IOException {
return getConnectionService().getTcpipForwarder().startRemotePortForwarding(remote, local);
}
public void stopRemotePortForwarding(SshdSocketAddress remote) throws IOException {
getConnectionService().getTcpipForwarder().stopRemotePortForwarding(remote);
}
protected void handleMessage(Buffer buffer) throws Exception {
synchronized (lock) {
super.handleMessage(buffer);
}
}
public int waitFor(int mask, long timeout) {
long t = 0;
synchronized (lock) {
for (;;) {
int cond = 0;
if (closeFuture.isClosed()) {
cond |= ClientSession.CLOSED;
}
if (authed) { // authFuture.isSuccess()
cond |= AUTHED;
}
if (authFuture.isFailure()) {
cond |= WAIT_AUTH;
}
if ((cond & mask) != 0) {
return cond;
}
if (timeout > 0) {
if (t == 0) {
t = System.currentTimeMillis() + timeout;
} else {
timeout = t - System.currentTimeMillis();
if (timeout <= 0) {
cond |= TIMEOUT;
return cond;
}
}
}
try {
if (timeout > 0) {
lock.wait(timeout);
} else {
lock.wait();
}
} catch (InterruptedException e) {
// Ignore
}
}
}
}
protected boolean readIdentification(Buffer buffer) throws IOException {
serverVersion = doReadIdentification(buffer);
if (serverVersion == null) {
return false;
}
log.info("Server version string: {}", serverVersion);
if (!(serverVersion.startsWith("SSH-2.0-") || serverVersion.startsWith("SSH-1.99-"))) {
throw new SshException(SshConstants.SSH2_DISCONNECT_PROTOCOL_VERSION_NOT_SUPPORTED,
"Unsupported protocol version: " + serverVersion);
}
return true;
}
private void sendClientIdentification() {
clientVersion = "SSH-2.0-" + getFactoryManager().getVersion();
sendIdentification(clientVersion);
}
protected void sendKexInit() throws IOException {
clientProposal = createProposal(KeyPairProvider.SSH_RSA + "," + KeyPairProvider.SSH_DSS);
I_C = sendKexInit(clientProposal);
}
protected void receiveKexInit(Buffer buffer) throws IOException {
serverProposal = new String[SshConstants.PROPOSAL_MAX];
I_S = receiveKexInit(buffer, serverProposal);
}
@Override
protected void checkKeys() throws SshException {
ServerKeyVerifier serverKeyVerifier = getFactoryManager().getServerKeyVerifier();
SocketAddress remoteAddress = ioSession.getRemoteAddress();
if (!serverKeyVerifier.verifyServerKey(this, remoteAddress, kex.getServerKey())) {
throw new SshException("Server key did not validate");
}
}
@Override
protected void sendEvent(SessionListener.Event event) throws IOException {
if (event == SessionListener.Event.KeyEstablished) {
sendInitialServiceRequest();
}
synchronized (lock) {
lock.notifyAll();
}
super.sendEvent(event);
}
protected void sendInitialServiceRequest() throws IOException {
if (initialServiceRequestSent) {
return;
}
initialServiceRequestSent = true;
log.debug("Send SSH_MSG_SERVICE_REQUEST for {}", currentServiceFactory.getName());
Buffer request = createBuffer(SshConstants.SSH_MSG_SERVICE_REQUEST);
request.putString(currentServiceFactory.getName());
writePacket(request);
// Assuming that MINA-SSHD only implements "explicit server authentication" it is permissible
// for the client's service to start sending data before the service-accept has been received.
// If "implicit authentication" were to ever be supported, then this would need to be
// called after service-accept comes back. See SSH-TRANSPORT.
currentService.start();
}
@Override
public void startService(String name) throws Exception {
throw new IllegalStateException("Starting services is not supported on the client side");
}
@Override
public void resetIdleTimeout() {
}
public Map<Object, Object> getMetadataMap() {
return metadataMap;
}
}
| |
/*
Licensed to Diennea S.r.l. under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. Diennea S.r.l. licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package majordodo.task;
import majordodo.clientfacade.SubmitTaskResult;
import majordodo.clientfacade.TaskStatusView;
import majordodo.executors.TaskExecutor;
import majordodo.network.netty.NettyBrokerLocator;
import majordodo.network.netty.NettyChannelAcceptor;
import majordodo.worker.WorkerCore;
import majordodo.worker.WorkerCoreConfiguration;
import majordodo.worker.WorkerStatusListener;
import java.io.IOException;
import java.nio.file.FileVisitResult;
import java.nio.file.FileVisitor;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.ConsoleHandler;
import java.util.logging.Level;
import java.util.logging.SimpleFormatter;
import majordodo.clientfacade.AddTaskRequest;
import org.junit.After;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import org.junit.Before;
import org.junit.Test;
/**
* Basic tests for recovery
*
* @author enrico.olivelli
*/
public class DoNotConnectUnsecureBrokerTest {
protected Path workDir;
@After
public void deleteWorkdir() throws Exception {
if (workDir != null) {
Files.walkFileTree(workDir, new FileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
Files.delete(file);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
Files.delete(dir);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException {
return FileVisitResult.CONTINUE;
}
});
}
}
@Before
public void setupLogger() throws Exception {
Level level = Level.INFO;
Thread.setDefaultUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread t, Throwable e) {
System.err.println("uncaughtException from thread " + t.getName() + ": " + e);
e.printStackTrace();
}
});
java.util.logging.LogManager.getLogManager().reset();
ConsoleHandler ch = new ConsoleHandler();
ch.setLevel(level);
SimpleFormatter f = new SimpleFormatter();
ch.setFormatter(f);
java.util.logging.Logger.getLogger("").setLevel(level);
java.util.logging.Logger.getLogger("").addHandler(ch);
}
protected TaskPropertiesMapperFunction createTaskPropertiesMapperFunction() {
return (long taskid, String taskType, String userid) -> {
int group1 = groupsMap.getOrDefault(userid, 0);
return new TaskProperties(group1, null);
};
}
protected Map<String, Integer> groupsMap = new HashMap<>();
private static final String TASKTYPE_MYTYPE = "mytype";
private static final String userId = "queue1";
private static final int group = 12345;
@Before
public void before() throws Exception {
groupsMap.clear();
groupsMap.put(userId, group);
}
@Test
public void workerWillNeverConnectTest() throws Exception {
Path mavenTargetDir = Paths.get("target").toAbsolutePath();
workDir = Files.createTempDirectory(mavenTargetDir, "test" + System.nanoTime());
long taskId;
String workerId = "abc";
String taskParams = "param";
final String SLOTID = "myslot";
// startAsWritable a broker and request a task, with slot
try (Broker broker = new Broker(new BrokerConfiguration(), new MemoryCommitLog(), new TasksHeap(1000, createTaskPropertiesMapperFunction()));) {
broker.startAsWritable();
try (NettyChannelAcceptor server = new NettyChannelAcceptor(broker.getAcceptor());) {
server.setSsl(true);
server.start();
try (NettyBrokerLocator locator = new NettyBrokerLocator(server.getHost(), server.getPort(), server.isSsl())) {
locator.setSslUnsecure(false);
CountDownLatch connectedLatch = new CountDownLatch(1);
CountDownLatch connectionErrorLatch = new CountDownLatch(1);
CountDownLatch disconnectedLatch = new CountDownLatch(1);
WorkerStatusListener listener = new WorkerStatusListener() {
@Override
public void connectionEvent(String event, WorkerCore core) {
if (event.equals(WorkerStatusListener.EVENT_CONNECTED)) {
connectedLatch.countDown();
}
if (event.equals(WorkerStatusListener.EVENT_CONNECTION_ERROR)) {
connectionErrorLatch.countDown();
}
if (event.equals(WorkerStatusListener.EVENT_DISCONNECTED)) {
disconnectedLatch.countDown();
}
}
};
Map<String, Integer> tags = new HashMap<>();
tags.put(TASKTYPE_MYTYPE, 1);
WorkerCoreConfiguration config = new WorkerCoreConfiguration();
config.setMaxPendingFinishedTaskNotifications(1);
config.setWorkerId(workerId);
config.setMaxThreadsByTaskType(tags);
config.setGroups(Arrays.asList(group));
try (WorkerCore core = new WorkerCore(config, workerId, locator, listener);) {
core.start();
assertTrue(connectionErrorLatch.await(10, TimeUnit.SECONDS));
locator.setSslUnsecure(true);
assertTrue(connectedLatch.await(10, TimeUnit.SECONDS));
}
assertTrue(disconnectedLatch.await(10, TimeUnit.SECONDS));
}
}
}
}
}
| |
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.swift;
import static com.facebook.buck.swift.SwiftLibraryDescription.SWIFT_COMPANION_FLAVOR;
import static com.facebook.buck.swift.SwiftLibraryDescription.SWIFT_COMPILE_FLAVOR;
import com.facebook.buck.cxx.CxxDescriptionEnhancer;
import com.facebook.buck.cxx.CxxHeadersDir;
import com.facebook.buck.cxx.CxxLink;
import com.facebook.buck.cxx.CxxPlatform;
import com.facebook.buck.cxx.CxxPreprocessables;
import com.facebook.buck.cxx.CxxPreprocessorDep;
import com.facebook.buck.cxx.CxxPreprocessorInput;
import com.facebook.buck.cxx.HeaderSymlinkTree;
import com.facebook.buck.cxx.HeaderVisibility;
import com.facebook.buck.cxx.ImmutableCxxPreprocessorInputCacheKey;
import com.facebook.buck.cxx.Linker;
import com.facebook.buck.cxx.LinkerMapMode;
import com.facebook.buck.cxx.NativeLinkable;
import com.facebook.buck.cxx.NativeLinkableInput;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.Flavor;
import com.facebook.buck.model.FlavorDomain;
import com.facebook.buck.parser.NoSuchBuildTargetException;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.HasRuntimeDeps;
import com.facebook.buck.rules.NoopBuildRule;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.args.FileListableLinkerInputArg;
import com.facebook.buck.rules.args.SourcePathArg;
import com.facebook.buck.rules.coercer.FrameworkPath;
import com.facebook.buck.util.MoreCollectors;
import com.facebook.buck.util.RichStream;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import java.util.Collection;
import java.util.Optional;
import java.util.regex.Pattern;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
/**
* An action graph representation of a Swift library from the target graph, providing the
* various interfaces to make it consumable by C/C native linkable rules.
*/
class SwiftLibrary
extends NoopBuildRule
implements HasRuntimeDeps, NativeLinkable, CxxPreprocessorDep {
private final LoadingCache<
CxxPreprocessables.CxxPreprocessorInputCacheKey,
ImmutableMap<BuildTarget, CxxPreprocessorInput>
> transitiveCxxPreprocessorInputCache =
CxxPreprocessables.getTransitiveCxxPreprocessorInputCache(this);
private final BuildRuleResolver ruleResolver;
private final Collection<? extends BuildRule> exportedDeps;
private final ImmutableSet<FrameworkPath> frameworks;
private final ImmutableSet<FrameworkPath> libraries;
private final FlavorDomain<SwiftPlatform> swiftPlatformFlavorDomain;
private final Optional<Pattern> supportedPlatformsRegex;
private final Linkage linkage;
SwiftLibrary(
BuildRuleParams params,
BuildRuleResolver ruleResolver,
Collection<? extends BuildRule> exportedDeps,
FlavorDomain<SwiftPlatform> swiftPlatformFlavorDomain,
ImmutableSet<FrameworkPath> frameworks,
ImmutableSet<FrameworkPath> libraries,
Optional<Pattern> supportedPlatformsRegex,
Linkage linkage) {
super(params);
this.ruleResolver = ruleResolver;
this.exportedDeps = exportedDeps;
this.frameworks = frameworks;
this.libraries = libraries;
this.swiftPlatformFlavorDomain = swiftPlatformFlavorDomain;
this.supportedPlatformsRegex = supportedPlatformsRegex;
this.linkage = linkage;
}
private boolean isPlatformSupported(CxxPlatform cxxPlatform) {
return !supportedPlatformsRegex.isPresent() ||
supportedPlatformsRegex.get()
.matcher(cxxPlatform.getFlavor().toString())
.find();
}
@Override
public Iterable<NativeLinkable> getNativeLinkableDeps() {
// TODO(bhamiltoncx, ryu2): Use pseudo targets to represent the Swift
// runtime library's linker args here so NativeLinkables can
// deduplicate the linker flags on the build target (which would be the same for
// all libraries).
return RichStream.from(getDeclaredDeps())
.filter(NativeLinkable.class)
.collect(MoreCollectors.toImmutableSet());
}
@Override
public Iterable<? extends NativeLinkable> getNativeLinkableExportedDeps() {
throw new RuntimeException(
"SwiftLibrary does not support getting linkable exported deps " +
"without a specific platform.");
}
@Override
public Iterable<? extends NativeLinkable> getNativeLinkableExportedDepsForPlatform(
CxxPlatform cxxPlatform) {
if (!isPlatformSupported(cxxPlatform)) {
return ImmutableList.of();
}
SwiftRuntimeNativeLinkable swiftRuntimeNativeLinkable = new SwiftRuntimeNativeLinkable(
swiftPlatformFlavorDomain.getValue(
cxxPlatform.getFlavor()));
return RichStream.from(exportedDeps)
.filter(NativeLinkable.class)
.concat(RichStream.of(swiftRuntimeNativeLinkable))
.collect(MoreCollectors.toImmutableSet());
}
@Override
public NativeLinkableInput getNativeLinkableInput(
CxxPlatform cxxPlatform,
Linker.LinkableDepType type) throws NoSuchBuildTargetException {
SwiftCompile rule = requireSwiftCompileRule(cxxPlatform.getFlavor());
NativeLinkableInput.Builder inputBuilder = NativeLinkableInput.builder();
inputBuilder
.addAllArgs(rule.getAstLinkArgs())
.addAllFrameworks(frameworks)
.addAllLibraries(libraries);
boolean isDynamic;
Linkage preferredLinkage = getPreferredLinkage(cxxPlatform);
switch (preferredLinkage) {
case STATIC:
isDynamic = false;
break;
case SHARED:
isDynamic = true;
break;
case ANY:
isDynamic = type == Linker.LinkableDepType.SHARED;
break;
default:
throw new IllegalStateException("unhandled linkage type: " + preferredLinkage);
}
if (isDynamic) {
CxxLink swiftLinkRule = requireSwiftLinkRule(cxxPlatform.getFlavor());
inputBuilder.addArgs(
FileListableLinkerInputArg.withSourcePathArg(
SourcePathArg.of(swiftLinkRule.getSourcePathToOutput())));
} else {
inputBuilder.addArgs(rule.getFileListLinkArg());
}
return inputBuilder.build();
}
@Override
public ImmutableMap<String, SourcePath> getSharedLibraries(
CxxPlatform cxxPlatform) throws NoSuchBuildTargetException {
if (!isPlatformSupported(cxxPlatform)) {
return ImmutableMap.of();
}
ImmutableMap.Builder<String, SourcePath> libs = ImmutableMap.builder();
BuildRule sharedLibraryBuildRule = requireSwiftLinkRule(cxxPlatform.getFlavor());
String sharedLibrarySoname = CxxDescriptionEnhancer.getSharedLibrarySoname(
Optional.empty(),
sharedLibraryBuildRule.getBuildTarget(),
cxxPlatform);
libs.put(
sharedLibrarySoname,
sharedLibraryBuildRule.getSourcePathToOutput());
return libs.build();
}
SwiftCompile requireSwiftCompileRule(Flavor... flavors)
throws NoSuchBuildTargetException {
BuildTarget requiredBuildTarget = getBuildTarget()
.withAppendedFlavors(flavors)
.withoutFlavors(ImmutableSet.of(CxxDescriptionEnhancer.SHARED_FLAVOR))
.withoutFlavors(ImmutableSet.of(SWIFT_COMPANION_FLAVOR))
.withoutFlavors(LinkerMapMode.FLAVOR_DOMAIN.getFlavors())
.withAppendedFlavors(SWIFT_COMPILE_FLAVOR);
BuildRule rule = ruleResolver.requireRule(requiredBuildTarget);
if (!(rule instanceof SwiftCompile)) {
throw new RuntimeException(
String.format("Could not find SwiftCompile with target %s", requiredBuildTarget));
}
return (SwiftCompile) rule;
}
private CxxLink requireSwiftLinkRule(Flavor... flavors) throws NoSuchBuildTargetException {
BuildTarget requiredBuildTarget = getBuildTarget()
.withoutFlavors(SWIFT_COMPANION_FLAVOR)
.withAppendedFlavors(CxxDescriptionEnhancer.SHARED_FLAVOR)
.withAppendedFlavors(flavors);
BuildRule rule = ruleResolver.requireRule(requiredBuildTarget);
if (!(rule instanceof CxxLink)) {
throw new RuntimeException(
String.format(
"Could not find CxxLink with target %s",
requiredBuildTarget));
}
return (CxxLink) rule;
}
@Override
public NativeLinkable.Linkage getPreferredLinkage(CxxPlatform cxxPlatform) {
// don't create dylib for swift companion target.
if (getBuildTarget().getFlavors().contains(SWIFT_COMPANION_FLAVOR)) {
return Linkage.STATIC;
} else {
return linkage;
}
}
@Override
public Stream<BuildTarget> getRuntimeDeps() {
// We export all declared deps as runtime deps, to setup a transitive runtime dep chain which
// will pull in runtime deps (e.g. other binaries) or transitive C/C++ libraries. Since the
// `CxxLibrary` rules themselves are noop meta rules, they shouldn't add any unnecessary
// overhead.
return Stream
.concat(
getDeclaredDeps().stream(),
StreamSupport.stream(exportedDeps.spliterator(), false))
.map(BuildRule::getBuildTarget);
}
@Override
public Iterable<? extends CxxPreprocessorDep> getCxxPreprocessorDeps(CxxPlatform cxxPlatform) {
return getDeps().stream()
.filter(CxxPreprocessorDep.class::isInstance)
.map(CxxPreprocessorDep.class::cast)
.collect(MoreCollectors.toImmutableSet());
}
@Override
public Optional<HeaderSymlinkTree> getExportedHeaderSymlinkTree(
CxxPlatform cxxPlatform) {
return Optional.empty();
}
@Override
public CxxPreprocessorInput getCxxPreprocessorInput(
CxxPlatform cxxPlatform,
HeaderVisibility headerVisibility) throws NoSuchBuildTargetException {
if (!isPlatformSupported(cxxPlatform)) {
return CxxPreprocessorInput.EMPTY;
}
BuildRule rule = requireSwiftCompileRule(cxxPlatform.getFlavor());
return CxxPreprocessorInput.builder()
.addIncludes(
CxxHeadersDir.of(
CxxPreprocessables.IncludeType.LOCAL,
rule.getSourcePathToOutput()))
.build();
}
@Override
public ImmutableMap<BuildTarget, CxxPreprocessorInput> getTransitiveCxxPreprocessorInput(
CxxPlatform cxxPlatform,
HeaderVisibility headerVisibility) throws NoSuchBuildTargetException {
if (getBuildTarget().getFlavors().contains(SWIFT_COMPANION_FLAVOR)) {
return ImmutableMap.of(
getBuildTarget(),
getCxxPreprocessorInput(cxxPlatform, headerVisibility));
} else {
return transitiveCxxPreprocessorInputCache.getUnchecked(
ImmutableCxxPreprocessorInputCacheKey.of(cxxPlatform, headerVisibility));
}
}
}
| |
/*
* Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.core.util;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.impl.builder.StAXOMBuilder;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.identity.base.IdentityConstants;
import org.wso2.carbon.identity.base.IdentityRuntimeException;
import org.wso2.carbon.identity.core.model.*;
import org.wso2.carbon.utils.ServerConstants;
import org.wso2.securevault.SecretResolver;
import org.wso2.securevault.SecretResolverFactory;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLStreamException;
import java.io.*;
import java.util.*;
public class IdentityConfigParser {
private static Map<String, Object> configuration = new HashMap<String, Object>();
private static Map<IdentityEventListenerConfigKey, IdentityEventListenerConfig> eventListenerConfiguration = new HashMap();
private static Map<IdentityCacheConfigKey, IdentityCacheConfig> identityCacheConfigurationHolder = new HashMap();
private static Map<String, IdentityCookieConfig> identityCookieConfigurationHolder = new HashMap<>();
public final static String IS_DISTRIBUTED_CACHE = "isDistributed";
private static IdentityConfigParser parser;
private static SecretResolver secretResolver;
// To enable attempted thread-safety using double-check locking
private static Object lock = new Object();
private static Log log = LogFactory.getLog(IdentityConfigParser.class);
private static String configFilePath;
private OMElement rootElement;
private IdentityConfigParser() {
buildConfiguration();
}
public static IdentityConfigParser getInstance() {
if (parser == null) {
synchronized (lock) {
if (parser == null) {
parser = new IdentityConfigParser();
}
}
}
return parser;
}
public static IdentityConfigParser getInstance(String filePath) {
configFilePath = filePath;
return getInstance();
}
public Map<String, Object> getConfiguration() {
return configuration;
}
public static Map<IdentityEventListenerConfigKey, IdentityEventListenerConfig> getEventListenerConfiguration() {
return eventListenerConfiguration;
}
public static Map<IdentityCacheConfigKey, IdentityCacheConfig> getIdentityCacheConfigurationHolder() {
return identityCacheConfigurationHolder;
}
public static Map<String, IdentityCookieConfig> getIdentityCookieConfigurationHolder() {
return identityCookieConfigurationHolder;
}
/**
* @return
* @throws XMLStreamException
* @throws IOException
*/
private void buildConfiguration() {
InputStream inStream = null;
StAXOMBuilder builder = null;
String warningMessage = "";
try {
if ( configFilePath != null ) {
File identityConfigXml = new File(configFilePath);
if ( identityConfigXml.exists() ) {
inStream = new FileInputStream(identityConfigXml);
}
} else {
File identityConfigXml = new File(IdentityUtil.getIdentityConfigDirPath(),
IdentityCoreConstants.IDENTITY_CONFIG);
if ( identityConfigXml.exists() ) {
inStream = new FileInputStream(identityConfigXml);
}
/*Following seems a wrong use of a class inside internal package (IdentityCoreServiceComponent),
outside that package which causes hard to troubleshoot CNF errors in certain occasions.
Besides, identity.xml is not present in the */
/*if (inStream == null) {
URL url;
BundleContext bundleContext = IdentityCoreServiceComponent.getBundleContext();
if (bundleContext != null) {
if ((url = bundleContext.getBundle().getResource(IDENTITY_CONFIG)) != null) {
inStream = url.openStream();
} else {
warningMessage = "Bundle context could not find resource " + IDENTITY_CONFIG +
" or user does not have sufficient permission to access the resource.";
}
} else {
if ((url = this.getClass().getClassLoader().getResource(IDENTITY_CONFIG)) != null) {
inStream = url.openStream();
} else {
warningMessage = "Identity core could not find resource " + IDENTITY_CONFIG +
" or user does not have sufficient permission to access the resource.";
}
}
}*/
}
if ( inStream == null ) {
String message = "Identity configuration not found. Cause - " + warningMessage;
if ( log.isDebugEnabled() ) {
log.debug(message);
}
throw new FileNotFoundException(message);
}
builder = new StAXOMBuilder(inStream);
rootElement = builder.getDocumentElement();
Stack<String> nameStack = new Stack<String>();
secretResolver = SecretResolverFactory.create(rootElement, true);
readChildElements(rootElement, nameStack);
buildEventListenerData();
buildCacheConfig();
buildCookieConfig();
} catch ( IOException | XMLStreamException e ) {
throw IdentityRuntimeException.error("Error occurred while building configuration from identity.xml", e);
} finally {
try {
if ( inStream != null ) {
inStream.close();
}
} catch ( IOException e ) {
log.error("Error closing the input stream for identity.xml", e);
}
}
}
private void buildEventListenerData() {
OMElement eventListeners = this.getConfigElement(IdentityConstants.EVENT_LISTENERS);
if (eventListeners != null) {
Iterator<OMElement> eventListener = eventListeners.getChildrenWithName(
new QName(IdentityCoreConstants.IDENTITY_DEFAULT_NAMESPACE, IdentityConstants.EVENT_LISTENER));
if (eventListener != null) {
while (eventListener.hasNext()) {
OMElement eventListenerElement = eventListener.next();
String eventListenerType = eventListenerElement.getAttributeValue(new QName(
IdentityConstants.EVENT_LISTENER_TYPE));
String eventListenerName = eventListenerElement.getAttributeValue(new QName(
IdentityConstants.EVENT_LISTENER_NAME));
int order = Integer.parseInt(eventListenerElement.getAttributeValue(new QName(
IdentityConstants.EVENT_LISTENER_ORDER)));
String enable = eventListenerElement.getAttributeValue(new QName(
IdentityConstants.EVENT_LISTENER_ENABLE));
Iterator<OMElement> propertyElements = eventListenerElement.getChildrenWithName(new QName
(IdentityConstants.EVENT_LISTENER_PROPERTY));
Properties properties = new Properties();
while (propertyElements.hasNext()){
OMElement propertyElem = propertyElements.next();
String propertyName = propertyElem.getAttributeValue(new QName(
IdentityConstants.EVENT_LISTENER_PROPERTY_NAME));
String propertyValue = propertyElem.getText();
properties.setProperty(propertyName, propertyValue);
}
if (StringUtils.isBlank(eventListenerType) || StringUtils.isBlank(eventListenerName)) {
throw IdentityRuntimeException.error("eventListenerType or eventListenerName is not defined " +
"correctly");
}
IdentityEventListenerConfigKey configKey = new IdentityEventListenerConfigKey(eventListenerType, eventListenerName);
IdentityEventListenerConfig identityEventListenerConfig = new IdentityEventListenerConfig(enable,
order, configKey, properties);
eventListenerConfiguration.put(configKey, identityEventListenerConfig);
}
}
}
}
private void buildCacheConfig() {
OMElement cacheConfig = this.getConfigElement(IdentityConstants.CACHE_CONFIG);
if (cacheConfig != null) {
Iterator<OMElement> cacheManagers = cacheConfig.getChildrenWithName(
new QName(IdentityCoreConstants.IDENTITY_DEFAULT_NAMESPACE, IdentityConstants.CACHE_MANAGER));
if (cacheManagers != null) {
while (cacheManagers.hasNext()) {
OMElement cacheManager = cacheManagers.next();
String cacheManagerName = cacheManager.getAttributeValue(new QName(
IdentityConstants.CACHE_MANAGER_NAME));
if (StringUtils.isBlank(cacheManagerName)) {
throw IdentityRuntimeException.error("CacheManager name not defined correctly");
}
Iterator<OMElement> caches = cacheManager.getChildrenWithName(
new QName(IdentityCoreConstants.IDENTITY_DEFAULT_NAMESPACE, IdentityConstants.CACHE));
if (caches != null) {
while (caches.hasNext()) {
OMElement cache = caches.next();
String cacheName = cache.getAttributeValue(new QName(IdentityConstants.CACHE_NAME));
if (StringUtils.isBlank(cacheName)) {
throw IdentityRuntimeException.error("Cache name not defined correctly");
}
IdentityCacheConfigKey identityCacheConfigKey = new IdentityCacheConfigKey(cacheManagerName,
cacheName);
IdentityCacheConfig identityCacheConfig = new IdentityCacheConfig(identityCacheConfigKey);
String enable = cache.getAttributeValue(new QName(IdentityConstants.CACHE_ENABLE));
if (StringUtils.isNotBlank(enable)) {
identityCacheConfig.setEnabled(Boolean.parseBoolean(enable));
}
String timeout = cache.getAttributeValue(new QName(IdentityConstants.CACHE_TIMEOUT));
if (StringUtils.isNotBlank(timeout)) {
identityCacheConfig.setTimeout(Integer.parseInt(timeout));
}
String capacity = cache.getAttributeValue(new QName(IdentityConstants.CACHE_CAPACITY));
if (StringUtils.isNotBlank(capacity)) {
identityCacheConfig.setCapacity(Integer.parseInt(capacity));
}
String isDistributedCache = cache.getAttributeValue(new QName(IS_DISTRIBUTED_CACHE));
if (StringUtils.isNotBlank(isDistributedCache)) {
identityCacheConfig.setDistributed(Boolean.parseBoolean(isDistributedCache));
}
// Add the config to container
identityCacheConfigurationHolder.put(identityCacheConfigKey, identityCacheConfig);
}
}
}
}
}
}
private void buildCookieConfig() {
OMElement cookiesConfig = this.getConfigElement(IdentityConstants.COOKIES_CONFIG);
if (cookiesConfig != null) {
Iterator<OMElement> cookies = cookiesConfig.getChildrenWithName(
new QName(IdentityCoreConstants.IDENTITY_DEFAULT_NAMESPACE, IdentityConstants.COOKIE));
if (cookies != null) {
while (cookies.hasNext()) {
OMElement cookie = cookies.next();
String cookieName = cookie.getAttributeValue(new QName(IdentityConstants.COOKIE_NAME));
if (StringUtils.isBlank(cookieName)) {
throw IdentityRuntimeException.error("Cookie name not defined correctly");
}
IdentityCookieConfig cookieConfig = new IdentityCookieConfig(cookieName);
String domain = cookie.getAttributeValue(new QName(IdentityConstants.COOKIE_DOMAIN));
if (StringUtils.isNotBlank(domain)) {
cookieConfig.setDomain(domain);
}
String path = cookie.getAttributeValue(new QName(IdentityConstants.COOKIE_PATH));
if (StringUtils.isNotBlank(path)) {
cookieConfig.setPath(path);
}
String comment = cookie.getAttributeValue(new QName(IdentityConstants.COOKIE_COMMENT));
if (StringUtils.isNotBlank(comment)) {
cookieConfig.setComment(comment);
}
String version = cookie.getAttributeValue(new QName(IdentityConstants.COOKIE_VERSION));
if (StringUtils.isNotBlank(version)) {
cookieConfig.setVersion(Integer.valueOf(version));
}
String magAge = cookie.getAttributeValue(new QName(IdentityConstants.COOKIE_MAX_AGE));
if (StringUtils.isNotBlank(magAge)) {
cookieConfig.setMaxAge(Integer.valueOf(magAge));
}
String secure = cookie.getAttributeValue(new QName(IdentityConstants.COOKIE_SECURE));
if (StringUtils.isNotBlank(secure)) {
cookieConfig.setSecure(Boolean.valueOf(secure));
}
String httpOnly = cookie.getAttributeValue(new QName(IdentityConstants.COOKIE_HTTP_ONLY));
if (StringUtils.isNotBlank(httpOnly)) {
cookieConfig.setIsHttpOnly(Boolean.valueOf(httpOnly));
}
// Add the config to container
identityCookieConfigurationHolder.put(cookieName, cookieConfig);
}
}
}
}
private void readChildElements(OMElement serverConfig, Stack<String> nameStack) {
for (Iterator childElements = serverConfig.getChildElements(); childElements.hasNext(); ) {
OMElement element = (OMElement) childElements.next();
nameStack.push(element.getLocalName());
if (elementHasText(element)) {
String key = getKey(nameStack);
Object currentObject = configuration.get(key);
String value = replaceSystemProperty(element.getText());
if (secretResolver != null && secretResolver.isInitialized() &&
secretResolver.isTokenProtected(key)) {
value = secretResolver.resolve(key);
}
if (currentObject == null) {
configuration.put(key, value);
} else if (currentObject instanceof ArrayList) {
ArrayList list = (ArrayList) currentObject;
if (!list.contains(value)) {
list.add(value);
configuration.put(key, list);
}
} else {
if (!value.equals(currentObject)) {
ArrayList arrayList = new ArrayList(2);
arrayList.add(currentObject);
arrayList.add(value);
configuration.put(key, arrayList);
}
}
}
readChildElements(element, nameStack);
nameStack.pop();
}
}
private String getKey(Stack<String> nameStack) {
StringBuilder key = new StringBuilder();
for (int i = 0; i < nameStack.size(); i++) {
String name = nameStack.elementAt(i);
key.append(name).append(".");
}
key.deleteCharAt(key.lastIndexOf("."));
return key.toString();
}
private boolean elementHasText(OMElement element) {
String text = element.getText();
return text != null && text.trim().length() != 0;
}
private String replaceSystemProperty(String text) {
int indexOfStartingChars = -1;
int indexOfClosingBrace;
// The following condition deals with properties.
// Properties are specified as ${system.property},
// and are assumed to be System properties
while (indexOfStartingChars < text.indexOf("${")
&& (indexOfStartingChars = text.indexOf("${")) != -1
&& (indexOfClosingBrace = text.indexOf("}")) != -1) { // Is a property used?
String sysProp = text.substring(indexOfStartingChars + 2, indexOfClosingBrace);
String propValue = System.getProperty(sysProp);
if (propValue != null) {
text = text.substring(0, indexOfStartingChars) + propValue
+ text.substring(indexOfClosingBrace + 1);
}
if (sysProp.equals(ServerConstants.CARBON_HOME)) {
if (System.getProperty(ServerConstants.CARBON_HOME).equals(".")) {
text = new File(".").getAbsolutePath() + File.separator + text;
}
}
}
return text;
}
/**
* Returns the element with the provided local part
*
* @param localPart local part name
* @return Corresponding OMElement
*/
public OMElement getConfigElement(String localPart) {
return rootElement.getFirstChildWithName(new QName(IdentityCoreConstants.IDENTITY_DEFAULT_NAMESPACE,localPart));
}
/**
* Returns the QName with the identity name space
*
* @param localPart local part name
* @return relevant QName
*/
public QName getQNameWithIdentityNS(String localPart) {
return new QName(IdentityCoreConstants.IDENTITY_DEFAULT_NAMESPACE, localPart);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.ssl;
import org.apache.drill.shaded.guava.com.google.common.base.Preconditions;
import io.netty.handler.ssl.SslContext;
import io.netty.handler.ssl.SslContextBuilder;
import io.netty.handler.ssl.SslProvider;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.common.exceptions.DrillException;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.memory.BufferAllocator;
import org.apache.hadoop.conf.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.TrustManagerFactory;
public class SSLConfigServer extends SSLConfig {
private static final Logger logger = LoggerFactory.getLogger(SSLConfigServer.class);
private final DrillConfig config;
private final Configuration hadoopConfig;
private final boolean userSslEnabled;
private final boolean httpsEnabled;
private final String keyStoreType;
private final String keyStorePath;
private final String keyStorePassword;
private final String keyPassword;
private final String trustStoreType;
private final String trustStorePath;
private final String trustStorePassword;
private final String protocol;
private final String provider;
public SSLConfigServer(DrillConfig config, Configuration hadoopConfig) throws DrillException {
this.config = config;
Mode mode = Mode.SERVER;
httpsEnabled =
config.hasPath(ExecConstants.HTTP_ENABLE_SSL) && config.getBoolean(ExecConstants.HTTP_ENABLE_SSL);
// For testing we will mock up a hadoop configuration, however for regular use, we find the actual hadoop config.
boolean enableHadoopConfig = config.getBoolean(ExecConstants.SSL_USE_HADOOP_CONF);
if (enableHadoopConfig) {
if (hadoopConfig == null) {
this.hadoopConfig = new Configuration(); // get hadoop configuration
} else {
this.hadoopConfig = hadoopConfig;
}
String hadoopSSLConfigFile =
this.hadoopConfig.get(resolveHadoopPropertyName(HADOOP_SSL_CONF_TPL_KEY, getMode()));
logger.debug("Using Hadoop configuration for SSL");
logger.debug("Hadoop SSL configuration file: {}", hadoopSSLConfigFile);
this.hadoopConfig.addResource(hadoopSSLConfigFile);
} else {
this.hadoopConfig = null;
}
userSslEnabled =
config.hasPath(ExecConstants.USER_SSL_ENABLED) && config.getBoolean(ExecConstants.USER_SSL_ENABLED);
SSLCredentialsProvider credentialsProvider = SSLCredentialsProvider.getSSLCredentialsProvider(
this::getConfigParam,
this::getPasswordConfigParam,
Mode.SERVER,
config.getBoolean(ExecConstants.SSL_USE_MAPR_CONFIG));
trustStoreType = credentialsProvider.getTrustStoreType(
ExecConstants.SSL_TRUSTSTORE_TYPE, resolveHadoopPropertyName(HADOOP_SSL_TRUSTSTORE_TYPE_TPL_KEY, mode));
trustStorePath = credentialsProvider.getTrustStoreLocation(
ExecConstants.SSL_TRUSTSTORE_PATH, resolveHadoopPropertyName(HADOOP_SSL_TRUSTSTORE_LOCATION_TPL_KEY, mode));
trustStorePassword = credentialsProvider.getTrustStorePassword(
ExecConstants.SSL_TRUSTSTORE_PASSWORD, resolveHadoopPropertyName(HADOOP_SSL_TRUSTSTORE_PASSWORD_TPL_KEY, mode));
keyStoreType = credentialsProvider.getKeyStoreType(
ExecConstants.SSL_KEYSTORE_TYPE, resolveHadoopPropertyName(HADOOP_SSL_KEYSTORE_TYPE_TPL_KEY, mode));
keyStorePath = credentialsProvider.getKeyStoreLocation(
ExecConstants.SSL_KEYSTORE_PATH, resolveHadoopPropertyName(HADOOP_SSL_KEYSTORE_LOCATION_TPL_KEY, mode));
keyStorePassword = credentialsProvider.getKeyStorePassword(
ExecConstants.SSL_KEYSTORE_PASSWORD, resolveHadoopPropertyName(HADOOP_SSL_KEYSTORE_PASSWORD_TPL_KEY, mode));
String keyPass = credentialsProvider.getKeyPassword(
ExecConstants.SSL_KEY_PASSWORD, resolveHadoopPropertyName(HADOOP_SSL_KEYSTORE_KEYPASSWORD_TPL_KEY, mode));
// if no keypassword specified, use keystore password
keyPassword = keyPass.isEmpty() ? keyStorePassword : keyPass;
protocol = config.getString(ExecConstants.SSL_PROTOCOL);
// If provider is OPENSSL then to debug or run this code in an IDE, you will need to enable
// the dependency on netty-tcnative with the correct classifier for the platform you use.
// This can be done by enabling the openssl profile.
// If the IDE is Eclipse, it requires you to install an additional Eclipse plugin available here:
// http://repo1.maven.org/maven2/kr/motd/maven/os-maven-plugin/1.6.1/os-maven-plugin-1.6.1.jar
// or from your local maven repository:
// ~/.m2/repository/kr/motd/maven/os-maven-plugin/1.6.1/os-maven-plugin-1.6.1.jar
// Note that installing this plugin may require you to start with a new workspace
provider = config.getString(ExecConstants.SSL_PROVIDER);
}
@Override
public void validateKeyStore() throws DrillException {
//HTTPS validates the keystore is not empty. User Server SSL context initialization also validates keystore, but
// much more strictly. User Client context initialization does not validate keystore.
/*If keystorePath or keystorePassword is provided in the configuration file use that*/
if ((isUserSslEnabled() || isHttpsEnabled())) {
if (!keyStorePath.isEmpty() || !keyStorePassword.isEmpty()) {
if (keyStorePath.isEmpty()) {
throw new DrillException(
" *.ssl.keyStorePath in the configuration file is empty, but *.ssl.keyStorePassword is set");
} else if (keyStorePassword.isEmpty()) {
throw new DrillException(
" *.ssl.keyStorePassword in the configuration file is empty, but *.ssl.keyStorePath is set ");
}
}
}
}
@Override
public SslContext initNettySslContext() throws DrillException {
final SslContext sslCtx;
if (!userSslEnabled) {
return null;
}
KeyManagerFactory kmf;
TrustManagerFactory tmf;
try {
if (keyStorePath.isEmpty()) {
throw new DrillException("No Keystore provided.");
}
kmf = initializeKeyManagerFactory();
tmf = initializeTrustManagerFactory();
sslCtx = SslContextBuilder.forServer(kmf)
.trustManager(tmf)
.protocols(protocol)
.sslProvider(getProvider())
.build(); // Will throw an exception if the key password is not correct
} catch (Exception e) {
// Catch any SSL initialization Exceptions here and abort.
throw new DrillException(new StringBuilder()
.append("SSL is enabled but cannot be initialized - ")
.append("[ ")
.append(e.getMessage())
.append("]. ")
.toString());
}
this.nettySslContext = sslCtx;
return sslCtx;
}
@Override
public SSLContext initJDKSSLContext() throws DrillException {
final SSLContext sslCtx;
if (!userSslEnabled) {
return null;
}
KeyManagerFactory kmf;
TrustManagerFactory tmf;
try {
if (keyStorePath.isEmpty()) {
throw new DrillException("No Keystore provided.");
}
kmf = initializeKeyManagerFactory();
tmf = initializeTrustManagerFactory();
sslCtx = SSLContext.getInstance(protocol);
sslCtx.init(kmf.getKeyManagers(), tmf.getTrustManagers(), null);
} catch (Exception e) {
// Catch any SSL initialization Exceptions here and abort.
throw new DrillException(
new StringBuilder().append("SSL is enabled but cannot be initialized - ")
.append("[ ")
.append(e.getMessage())
.append("]. ")
.toString());
}
this.jdkSSlContext = sslCtx;
return sslCtx;
}
@Override
public SSLEngine createSSLEngine(BufferAllocator allocator, String peerHost, int peerPort) {
SSLEngine engine = super.createSSLEngine(allocator, peerHost, peerPort);
engine.setUseClientMode(false);
// No need for client side authentication (HTTPS like behaviour)
engine.setNeedClientAuth(false);
try {
engine.setEnableSessionCreation(true);
} catch (Exception e) {
// Openssl implementation may throw this.
logger.debug("Session creation not enabled. Exception: {}", e.getMessage());
}
return engine;
}
private String getConfigParam(String name, String hadoopName) {
String value = "";
if (hadoopConfig != null) {
value = getHadoopConfigParam(hadoopName);
}
if (value.isEmpty() && config.hasPath(name)) {
value = config.getString(name);
}
value = value.trim();
return value;
}
private String getHadoopConfigParam(String name) {
Preconditions.checkArgument(this.hadoopConfig != null);
String value = hadoopConfig.get(name, "");
value = value.trim();
return value;
}
private String getPasswordConfigParam(String name, String hadoopName) {
String value = getPassword(hadoopName);
if (value == null) {
value = getConfigParam(name, hadoopName);
}
return value;
}
@Override
public boolean isUserSslEnabled() {
return userSslEnabled;
}
@Override
public boolean isHttpsEnabled() {
return httpsEnabled;
}
@Override
public String getKeyStoreType() {
return keyStoreType;
}
@Override
public String getKeyStorePath() {
return keyStorePath;
}
@Override
public String getKeyStorePassword() {
return keyStorePassword;
}
@Override
public String getKeyPassword() {
return keyPassword;
}
@Override
public String getTrustStoreType() {
return trustStoreType;
}
@Override
public boolean hasTrustStorePath() {
return !trustStorePath.isEmpty();
}
@Override
public String getTrustStorePath() {
return trustStorePath;
}
@Override
public boolean hasTrustStorePassword() {
return !trustStorePassword.isEmpty();
}
@Override
public String getTrustStorePassword() {
return trustStorePassword;
}
@Override
public String getProtocol() {
return protocol;
}
@Override
public SslProvider getProvider() {
return provider.equalsIgnoreCase("JDK") ? SslProvider.JDK : SslProvider.OPENSSL;
}
@Override
public int getHandshakeTimeout() {
return 0;
}
@Override
public Mode getMode() {
return Mode.SERVER;
}
@Override
public boolean disableHostVerification() {
return false;
}
@Override
public boolean disableCertificateVerification() {
return false;
}
@Override
public boolean useSystemTrustStore() {
return false; // Client only, notsupported by the server
}
@Override
public boolean isSslValid() {
return !keyStorePath.isEmpty() && !keyStorePassword.isEmpty();
}
@Override
Configuration getHadoopConfig() {
return hadoopConfig;
}
}
| |
/*
* ARX: Powerful Data Anonymization
* Copyright 2012 - 2016 Fabian Prasser, Florian Kohlmayer and contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.deidentifier.arx.gui.view.impl.risk;
import org.deidentifier.arx.ARXPopulationModel;
import org.deidentifier.arx.ARXPopulationModel.Region;
import org.deidentifier.arx.DataHandle;
import org.deidentifier.arx.criteria.PrivacyCriterion;
import org.deidentifier.arx.gui.Controller;
import org.deidentifier.arx.gui.model.Model;
import org.deidentifier.arx.gui.model.ModelEvent;
import org.deidentifier.arx.gui.model.ModelEvent.ModelPart;
import org.deidentifier.arx.gui.resources.Resources;
import org.deidentifier.arx.gui.view.SWTUtil;
import org.deidentifier.arx.gui.view.def.IView;
import org.eclipse.jface.layout.GridDataFactory;
import org.eclipse.jface.layout.GridLayoutFactory;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Text;
import de.linearbits.swt.table.DynamicTable;
import de.linearbits.swt.table.DynamicTableColumn;
/**
* This view displays the population settings
*
* @author Fabian Prasser
*/
public class ViewRisksPopulationModel implements IView {
/** Controller */
private final Controller controller;
/** View */
private final Composite root;
/** View */
private DynamicTable table;
/** View */
private Text textSampleFraction;
/** View */
private Text textPopulationSize;
/** View */
private Button buttonUse;
/** Model */
private Model model;
/** Model */
private final boolean output;
/**
* Creates a new instance.
*
* @param parent
* @param controller
* @param output
*/
public ViewRisksPopulationModel(final Composite parent,
final Controller controller,
final boolean output) {
controller.addListener(ModelPart.INPUT, this);
controller.addListener(ModelPart.POPULATION_MODEL, this);
controller.addListener(ModelPart.MODEL, this);
controller.addListener(ModelPart.OUTPUT, this);
controller.addListener(ModelPart.RESULT, this);
this.controller = controller;
this.output = output;
// Create group
root = parent;
root.setLayout(GridLayoutFactory.swtDefaults().numColumns(2).create());
create(root);
reset();
}
@Override
public void dispose() {
controller.removeListener(this);
}
@Override
public void reset() {
table.select(0);
table.showSelection();
textSampleFraction.setText(""); //$NON-NLS-1$
textPopulationSize.setText(""); //$NON-NLS-1$
SWTUtil.disable(root);
}
@Override
public void update(final ModelEvent event) {
if (event.part == ModelPart.MODEL) {
this.model = (Model) event.data;
update();
} else if (event.part == ModelPart.INPUT ||
event.part == ModelPart.POPULATION_MODEL ||
event.part == ModelPart.OUTPUT ||
event.part == ModelPart.RESULT) {
update();
}
}
/**
* Creates the required controls.
*
* @param parent
*/
private void create(final Composite parent) {
buttonUse = new Button(parent, SWT.CHECK);
buttonUse.setText(Resources.getMessage("ViewRisksPopulationModel.3")); //$NON-NLS-1$
buttonUse.setLayoutData(GridDataFactory.fillDefaults().span(2, 1).grab(true, false).create());
buttonUse.addSelectionListener(new SelectionAdapter(){
public void widgetSelected(SelectionEvent arg0) {
model.getRiskModel().setUseOutputPopulationModelIfAvailable(output ? buttonUse.getSelection()
: !buttonUse.getSelection());
controller.update(new ModelEvent(controller, ModelPart.POPULATION_MODEL, null));
}
});
Label lbl1 = new Label(parent, SWT.NONE);
lbl1.setText(Resources.getMessage("ViewRisksPopulationModel.4")); //$NON-NLS-1$
lbl1.setLayoutData(GridDataFactory.swtDefaults().align(SWT.LEFT, SWT.TOP).create());
table = SWTUtil.createTableDynamic(root, SWT.SINGLE | SWT.BORDER | SWT.V_SCROLL | SWT.FULL_SELECTION | SWT.READ_ONLY);
table.setLayoutData(new GridData(GridData.FILL_BOTH));
table.setHeaderVisible(false);
table.setLinesVisible(true);
DynamicTableColumn c = new DynamicTableColumn(table, SWT.LEFT);
c.setWidth("100%"); //$NON-NLS-1$ //$NON-NLS-2$
c.setText(""); //$NON-NLS-1$
c.setResizable(false);
for (Region region : Region.values()) {
final TableItem item = new TableItem(table, SWT.NONE);
item.setText(region.getName());
}
Label lbl2 = new Label(parent, SWT.NONE);
lbl2.setText(Resources.getMessage("ViewRisksPopulationModel.5")); //$NON-NLS-1$
textSampleFraction = new Text(parent, SWT.BORDER | SWT.SINGLE);
textSampleFraction.setText("0"); //$NON-NLS-1$
textSampleFraction.setLayoutData(SWTUtil.createFillHorizontallyGridData());
textSampleFraction.setEditable(false);
Label lbl3 = new Label(parent, SWT.NONE);
lbl3.setText(Resources.getMessage("ViewRisksPopulationModel.7")); //$NON-NLS-1$
textPopulationSize = new Text(parent, SWT.BORDER | SWT.SINGLE);
textPopulationSize.setText("0"); //$NON-NLS-1$
textPopulationSize.setLayoutData(SWTUtil.createFillHorizontallyGridData());
textPopulationSize.setEditable(false);
table.addListener(SWT.Selection, new Listener() {
@Override
public void handleEvent(Event event) {
event.detail = SWT.NONE;
event.type = SWT.None;
event.doit = false;
try
{
table.setRedraw(false);
table.deselectAll();
} finally {
table.setRedraw(true);
table.getParent().setFocus();
}
}
});
}
/**
* Is an output model available
* @return
*/
private boolean isOutputPopulationModelAvailable() {
if (model == null || model.getOutputConfig() == null) { return false; }
for (PrivacyCriterion c : model.getOutputConfig().getCriteria()) {
if (c.getPopulationModel() != null) {
return true;
}
}
return false;
}
/**
* Updates the view.
*
* @param node
*/
private void update() {
// Check
if (model == null || model.getInputConfig() == null ||
model.getInputConfig().getInput() == null) {
return;
}
root.setRedraw(false);
SWTUtil.enable(root);
boolean mayUseOutput = isOutputPopulationModelAvailable() && model.getRiskModel().isUseOutputPopulationModelIfAvailable();
boolean enabled = output ? mayUseOutput : !mayUseOutput;
this.buttonUse.setSelection(enabled);
if (output && !isOutputPopulationModelAvailable()) {
reset();
} else {
ARXPopulationModel popmodel = model.getInputPopulationModel();
if (output && isOutputPopulationModelAvailable()) {
popmodel = model.getOutputPopulationModel();
}
table.deselectAll();
TableItem selected = null;
for (TableItem item : table.getItems()) {
if (item.getText().equals(popmodel.getRegion().getName())) {
item.setBackground(table.getDisplay().getSystemColor(SWT.COLOR_LIST_SELECTION));
selected = item;
} else {
item.setBackground(table.getDisplay().getSystemColor(SWT.COLOR_LIST_BACKGROUND));
}
}
if (selected != null) {
table.showItem(selected);
}
table.getParent().setFocus();
DataHandle handle = model.getInputConfig().getInput().getHandle();
long population = (long)popmodel.getPopulationSize();
double fraction = (double)handle.getNumRows() / (double)population;
textSampleFraction.setText(SWTUtil.getPrettyString(fraction));
textSampleFraction.setToolTipText(String.valueOf(fraction));
textSampleFraction.setEnabled(true);
textPopulationSize.setText(SWTUtil.getPrettyString(population));
textPopulationSize.setToolTipText(String.valueOf(population));
textPopulationSize.setEnabled(true);
}
root.setRedraw(true);
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.iotsitewise.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/iotsitewise-2019-12-02/ListAssociatedAssets" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListAssociatedAssetsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* A list that summarizes the associated assets.
* </p>
*/
private java.util.List<AssociatedAssetsSummary> assetSummaries;
/**
* <p>
* The token for the next set of results, or null if there are no additional results.
* </p>
*/
private String nextToken;
/**
* <p>
* A list that summarizes the associated assets.
* </p>
*
* @return A list that summarizes the associated assets.
*/
public java.util.List<AssociatedAssetsSummary> getAssetSummaries() {
return assetSummaries;
}
/**
* <p>
* A list that summarizes the associated assets.
* </p>
*
* @param assetSummaries
* A list that summarizes the associated assets.
*/
public void setAssetSummaries(java.util.Collection<AssociatedAssetsSummary> assetSummaries) {
if (assetSummaries == null) {
this.assetSummaries = null;
return;
}
this.assetSummaries = new java.util.ArrayList<AssociatedAssetsSummary>(assetSummaries);
}
/**
* <p>
* A list that summarizes the associated assets.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setAssetSummaries(java.util.Collection)} or {@link #withAssetSummaries(java.util.Collection)} if you want
* to override the existing values.
* </p>
*
* @param assetSummaries
* A list that summarizes the associated assets.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListAssociatedAssetsResult withAssetSummaries(AssociatedAssetsSummary... assetSummaries) {
if (this.assetSummaries == null) {
setAssetSummaries(new java.util.ArrayList<AssociatedAssetsSummary>(assetSummaries.length));
}
for (AssociatedAssetsSummary ele : assetSummaries) {
this.assetSummaries.add(ele);
}
return this;
}
/**
* <p>
* A list that summarizes the associated assets.
* </p>
*
* @param assetSummaries
* A list that summarizes the associated assets.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListAssociatedAssetsResult withAssetSummaries(java.util.Collection<AssociatedAssetsSummary> assetSummaries) {
setAssetSummaries(assetSummaries);
return this;
}
/**
* <p>
* The token for the next set of results, or null if there are no additional results.
* </p>
*
* @param nextToken
* The token for the next set of results, or null if there are no additional results.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* The token for the next set of results, or null if there are no additional results.
* </p>
*
* @return The token for the next set of results, or null if there are no additional results.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* The token for the next set of results, or null if there are no additional results.
* </p>
*
* @param nextToken
* The token for the next set of results, or null if there are no additional results.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListAssociatedAssetsResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getAssetSummaries() != null)
sb.append("AssetSummaries: ").append(getAssetSummaries()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListAssociatedAssetsResult == false)
return false;
ListAssociatedAssetsResult other = (ListAssociatedAssetsResult) obj;
if (other.getAssetSummaries() == null ^ this.getAssetSummaries() == null)
return false;
if (other.getAssetSummaries() != null && other.getAssetSummaries().equals(this.getAssetSummaries()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getAssetSummaries() == null) ? 0 : getAssetSummaries().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public ListAssociatedAssetsResult clone() {
try {
return (ListAssociatedAssetsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* Copyright 2018 Amazon.com, Inc. or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.spinnaker.clouddriver.lambda.provider.agent;
import static com.netflix.spinnaker.cats.agent.AgentDataType.Authority.AUTHORITATIVE;
import static com.netflix.spinnaker.clouddriver.lambda.cache.Keys.Namespace.IAM_ROLE;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.identitymanagement.AmazonIdentityManagement;
import com.amazonaws.services.identitymanagement.model.ListRolesRequest;
import com.amazonaws.services.identitymanagement.model.ListRolesResult;
import com.amazonaws.services.identitymanagement.model.Role;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.netflix.spinnaker.cats.agent.AgentDataType;
import com.netflix.spinnaker.cats.agent.CacheResult;
import com.netflix.spinnaker.cats.agent.CachingAgent;
import com.netflix.spinnaker.cats.agent.DefaultCacheResult;
import com.netflix.spinnaker.cats.cache.CacheData;
import com.netflix.spinnaker.cats.cache.DefaultCacheData;
import com.netflix.spinnaker.cats.provider.ProviderCache;
import com.netflix.spinnaker.clouddriver.aws.provider.AwsProvider;
import com.netflix.spinnaker.clouddriver.aws.security.AmazonClientProvider;
import com.netflix.spinnaker.clouddriver.aws.security.NetflixAmazonCredentials;
import com.netflix.spinnaker.clouddriver.cache.CustomScheduledAgent;
import com.netflix.spinnaker.clouddriver.lambda.cache.Keys;
import com.netflix.spinnaker.clouddriver.lambda.cache.model.IamRole;
import java.io.IOException;
import java.net.URLDecoder;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class IamRoleCachingAgent implements CachingAgent, CustomScheduledAgent {
private static final long POLL_INTERVAL_MILLIS = TimeUnit.MINUTES.toMillis(30);
private static final long DEFAULT_TIMEOUT_MILLIS = TimeUnit.MINUTES.toMillis(5);
private final Logger log = LoggerFactory.getLogger(getClass());
private final Collection<AgentDataType> types =
Collections.singletonList(AUTHORITATIVE.forType(IAM_ROLE.toString()));
private final ObjectMapper objectMapper;
private AmazonClientProvider amazonClientProvider;
private NetflixAmazonCredentials account;
private String accountName;
IamRoleCachingAgent(
ObjectMapper objectMapper,
NetflixAmazonCredentials account,
AmazonClientProvider amazonClientProvider) {
this.objectMapper = objectMapper;
this.account = account;
this.accountName = account.getName();
this.amazonClientProvider = amazonClientProvider;
}
@Override
public String getAgentType() {
return accountName + "/" + getClass().getSimpleName();
}
@Override
public String getProviderName() {
return AwsProvider.PROVIDER_NAME;
}
@Override
public Collection<AgentDataType> getProvidedDataTypes() {
return types;
}
@Override
public long getPollIntervalMillis() {
return POLL_INTERVAL_MILLIS;
}
@Override
public long getTimeoutMillis() {
return DEFAULT_TIMEOUT_MILLIS;
}
@Override
public CacheResult loadData(ProviderCache providerCache) {
AmazonIdentityManagement iam =
amazonClientProvider.getIam(account, Regions.DEFAULT_REGION.getName(), false);
Set<IamRole> cacheableRoles = fetchIamRoles(iam, accountName);
Map<String, Collection<CacheData>> newDataMap = generateFreshData(cacheableRoles);
Collection<CacheData> newData = newDataMap.get(IAM_ROLE.toString());
Set<String> oldKeys =
providerCache.getAll(IAM_ROLE.toString()).stream()
.map(CacheData::getId)
.filter(this::keyAccountFilter)
.collect(Collectors.toSet());
Map<String, Collection<String>> evictionsByKey = computeEvictableData(newData, oldKeys);
logUpcomingActions(newDataMap, evictionsByKey);
return new DefaultCacheResult(newDataMap, evictionsByKey);
}
private void logUpcomingActions(
Map<String, Collection<CacheData>> newDataMap,
Map<String, Collection<String>> evictionsByKey) {
log.info(
String.format(
"Caching %s IAM roles in %s for account %s",
newDataMap.get(IAM_ROLE.toString()).size(), getAgentType(), accountName));
if (evictionsByKey.get(IAM_ROLE.toString()).size() > 0) {
log.info(
String.format(
"Evicting %s IAM roles in %s for account %s",
evictionsByKey.get(IAM_ROLE.toString()).size(), getAgentType(), accountName));
}
}
private Map<String, Collection<String>> computeEvictableData(
Collection<CacheData> newData, Collection<String> oldKeys) {
Set<String> newKeys = newData.stream().map(CacheData::getId).collect(Collectors.toSet());
Set<String> evictedKeys = new HashSet<>();
for (String oldKey : oldKeys) {
if (!newKeys.contains(oldKey)) {
evictedKeys.add(oldKey);
}
}
Map<String, Collection<String>> evictionsByKey = new HashMap<>();
evictionsByKey.put(IAM_ROLE.toString(), evictedKeys);
return evictionsByKey;
}
private Map<String, Collection<CacheData>> generateFreshData(Set<IamRole> cacheableRoles) {
Collection<CacheData> dataPoints = new HashSet<>();
Map<String, Collection<CacheData>> newDataMap = new HashMap<>();
for (IamRole iamRole : cacheableRoles) {
String key = Keys.getIamRoleKey(accountName, iamRole.getName());
Map<String, Object> attributes = convertIamRoleToAttributes(iamRole);
CacheData data = new DefaultCacheData(key, attributes, Collections.emptyMap());
dataPoints.add(data);
}
newDataMap.put(IAM_ROLE.toString(), dataPoints);
return newDataMap;
}
private Set<IamRole> fetchIamRoles(AmazonIdentityManagement iam, String accountName) {
Set<IamRole> cacheableRoles = new HashSet<>();
String marker = null;
do {
ListRolesRequest request = new ListRolesRequest();
if (marker != null) {
request.setMarker(marker);
}
ListRolesResult listRolesResult = iam.listRoles(request);
List<Role> roles = listRolesResult.getRoles();
for (Role role : roles) {
cacheableRoles.add(
new IamRole(
role.getArn(),
role.getRoleName(),
accountName,
getTrustedEntities(role.getAssumeRolePolicyDocument())));
}
if (listRolesResult.isTruncated()) {
marker = listRolesResult.getMarker();
} else {
marker = null;
}
} while (marker != null && marker.length() != 0);
return cacheableRoles;
}
private boolean keyAccountFilter(String key) {
Map<String, String> keyParts = Keys.parse(key);
return keyParts != null && keyParts.get("account").equals(accountName);
}
private Set<IamTrustRelationship> getTrustedEntities(String urlEncodedPolicyDocument) {
Set<IamTrustRelationship> trustedEntities = new HashSet<>();
String decodedPolicyDocument = URLDecoder.decode(urlEncodedPolicyDocument);
Map<String, Object> policyDocument;
try {
policyDocument = objectMapper.readValue(decodedPolicyDocument, Map.class);
List<Map<String, Object>> statementItems =
(List<Map<String, Object>>) policyDocument.get("Statement");
for (Map<String, Object> statementItem : statementItems) {
if ("sts:AssumeRole".equals(statementItem.get("Action"))) {
Map<String, Object> principal = (Map<String, Object>) statementItem.get("Principal");
for (Map.Entry<String, Object> principalEntry : principal.entrySet()) {
if (principalEntry.getValue() instanceof List) {
((List) principalEntry.getValue())
.stream()
.forEach(
o ->
trustedEntities.add(
new IamTrustRelationship(principalEntry.getKey(), o.toString())));
} else {
trustedEntities.add(
new IamTrustRelationship(
principalEntry.getKey(), principalEntry.getValue().toString()));
}
}
}
}
} catch (IOException e) {
log.error(
"Unable to extract trusted entities (policyDocument: {})", urlEncodedPolicyDocument, e);
}
return trustedEntities;
}
private static Map<String, Object> convertIamRoleToAttributes(IamRole iamRole) {
Map<String, Object> attributes = new HashMap<>();
attributes.put("name", iamRole.getName());
attributes.put("accountName", iamRole.getAccountName());
attributes.put("arn", iamRole.getId());
attributes.put("trustRelationships", iamRole.getTrustRelationships());
return attributes;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter.launcher;
import org.apache.commons.lang3.StringUtils;
import org.apache.zeppelin.conf.ZeppelinConfiguration;
import org.apache.zeppelin.interpreter.recovery.RecoveryStorage;
import org.apache.zeppelin.interpreter.remote.RemoteInterpreterUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
* Spark specific launcher.
*/
public class SparkInterpreterLauncher extends ShellScriptLauncher {
private static final Logger LOGGER = LoggerFactory.getLogger(SparkInterpreterLauncher.class);
public SparkInterpreterLauncher(ZeppelinConfiguration zConf, RecoveryStorage recoveryStorage) {
super(zConf, recoveryStorage);
}
@Override
protected Map<String, String> buildEnvFromProperties(InterpreterLaunchContext context) {
Map<String, String> env = new HashMap<String, String>();
Properties sparkProperties = new Properties();
String sparkMaster = getSparkMaster(properties);
for (String key : properties.stringPropertyNames()) {
if (RemoteInterpreterUtils.isEnvString(key)) {
env.put(key, properties.getProperty(key));
}
if (isSparkConf(key, properties.getProperty(key))) {
sparkProperties.setProperty(key, toShellFormat(properties.getProperty(key)));
}
}
setupPropertiesForPySpark(sparkProperties);
setupPropertiesForSparkR(sparkProperties);
if (isYarnMode() && getDeployMode().equals("cluster")) {
env.put("ZEPPELIN_SPARK_YARN_CLUSTER", "true");
}
StringBuilder sparkConfBuilder = new StringBuilder();
if (sparkMaster != null) {
sparkConfBuilder.append(" --master " + sparkMaster);
}
if (isYarnMode() && getDeployMode().equals("cluster")) {
sparkConfBuilder.append(" --files " + zConf.getConfDir() + "/log4j_yarn_cluster.properties");
}
for (String name : sparkProperties.stringPropertyNames()) {
sparkConfBuilder.append(" --conf " + name + "=" + sparkProperties.getProperty(name));
}
String useProxyUserEnv = System.getenv("ZEPPELIN_IMPERSONATE_SPARK_PROXY_USER");
if (context.getOption().isUserImpersonate() && (StringUtils.isBlank(useProxyUserEnv) ||
!useProxyUserEnv.equals("false"))) {
sparkConfBuilder.append(" --proxy-user " + context.getUserName());
}
env.put("ZEPPELIN_SPARK_CONF", sparkConfBuilder.toString());
// set these env in the order of
// 1. interpreter-setting
// 2. zeppelin-env.sh
// It is encouraged to set env in interpreter setting, but just for backward compatability,
// we also fallback to zeppelin-env.sh if it is not specified in interpreter setting.
for (String envName : new String[]{"SPARK_HOME", "SPARK_CONF_DIR", "HADOOP_CONF_DIR"}) {
String envValue = getEnv(envName);
if (envValue != null) {
env.put(envName, envValue);
}
}
String keytab = zConf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_SERVER_KERBEROS_KEYTAB);
String principal =
zConf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_SERVER_KERBEROS_PRINCIPAL);
if (!StringUtils.isBlank(keytab) && !StringUtils.isBlank(principal)) {
env.put("ZEPPELIN_SERVER_KERBEROS_KEYTAB", keytab);
env.put("ZEPPELIN_SERVER_KERBEROS_PRINCIPAL", principal);
LOGGER.info("Run Spark under secure mode with keytab: " + keytab +
", principal: " + principal);
} else {
LOGGER.info("Run Spark under non-secure mode as no keytab and principal is specified");
}
LOGGER.debug("buildEnvFromProperties: " + env);
return env;
}
/**
* get environmental variable in the following order
*
* 1. interpreter setting
* 2. zeppelin-env.sh
*
*/
private String getEnv(String envName) {
String env = properties.getProperty(envName);
if (env == null) {
env = System.getenv(envName);
}
return env;
}
private boolean isSparkConf(String key, String value) {
return !StringUtils.isEmpty(key) && key.startsWith("spark.") && !StringUtils.isEmpty(value);
}
private void setupPropertiesForPySpark(Properties sparkProperties) {
if (isYarnMode()) {
sparkProperties.setProperty("spark.yarn.isPython", "true");
}
}
private void mergeSparkProperty(Properties sparkProperties, String propertyName,
String propertyValue) {
if (sparkProperties.containsKey(propertyName)) {
String oldPropertyValue = sparkProperties.getProperty(propertyName);
sparkProperties.setProperty(propertyName, oldPropertyValue + "," + propertyValue);
} else {
sparkProperties.setProperty(propertyName, propertyValue);
}
}
private void setupPropertiesForSparkR(Properties sparkProperties) {
String sparkHome = getEnv("SPARK_HOME");
File sparkRBasePath = null;
if (sparkHome == null) {
if (!getSparkMaster(properties).startsWith("local")) {
throw new RuntimeException("SPARK_HOME is not specified in interpreter-setting" +
" for non-local mode, if you specify it in zeppelin-env.sh, please move that into " +
" interpreter setting");
}
String zeppelinHome = zConf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_HOME);
sparkRBasePath = new File(zeppelinHome,
"interpreter" + File.separator + "spark" + File.separator + "R");
} else {
sparkRBasePath = new File(sparkHome, "R" + File.separator + "lib");
}
File sparkRPath = new File(sparkRBasePath, "sparkr.zip");
if (sparkRPath.exists() && sparkRPath.isFile()) {
mergeSparkProperty(sparkProperties, "spark.yarn.dist.archives",
sparkRPath.getAbsolutePath() + "#sparkr");
} else {
LOGGER.warn("sparkr.zip is not found, SparkR may not work.");
}
}
/**
* Order to look for spark master
* 1. master in interpreter setting
* 2. spark.master interpreter setting
* 3. use local[*]
* @param properties
* @return
*/
private String getSparkMaster(Properties properties) {
String master = properties.getProperty("master");
if (master == null) {
master = properties.getProperty("spark.master");
if (master == null) {
master = "local[*]";
}
}
return master;
}
private String getDeployMode() {
String master = getSparkMaster(properties);
if (master.equals("yarn-client")) {
return "client";
} else if (master.equals("yarn-cluster")) {
return "cluster";
} else if (master.startsWith("local")) {
return "client";
} else {
String deployMode = properties.getProperty("spark.submit.deployMode");
if (deployMode == null) {
throw new RuntimeException("master is set as yarn, but spark.submit.deployMode " +
"is not specified");
}
if (!deployMode.equals("client") && !deployMode.equals("cluster")) {
throw new RuntimeException("Invalid value for spark.submit.deployMode: " + deployMode);
}
return deployMode;
}
}
private boolean isYarnMode() {
return getSparkMaster(properties).startsWith("yarn");
}
private String toShellFormat(String value) {
if (value.contains("'") && value.contains("\"")) {
throw new RuntimeException("Spark property value could not contain both \" and '");
} else if (value.contains("'")) {
return "\"" + value + "\"";
} else {
return "'" + value + "'";
}
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver15;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import com.google.common.collect.ImmutableList;
import java.util.Set;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFBsnControllerConnectionsReplyVer15 implements OFBsnControllerConnectionsReply {
private static final Logger logger = LoggerFactory.getLogger(OFBsnControllerConnectionsReplyVer15.class);
// version: 1.5
final static byte WIRE_VERSION = 6;
final static int MINIMUM_LENGTH = 16;
private final static long DEFAULT_XID = 0x0L;
private final static List<OFBsnControllerConnection> DEFAULT_CONNECTIONS = ImmutableList.<OFBsnControllerConnection>of();
// OF message fields
private final long xid;
private final List<OFBsnControllerConnection> connections;
//
// Immutable default instance
final static OFBsnControllerConnectionsReplyVer15 DEFAULT = new OFBsnControllerConnectionsReplyVer15(
DEFAULT_XID, DEFAULT_CONNECTIONS
);
// package private constructor - used by readers, builders, and factory
OFBsnControllerConnectionsReplyVer15(long xid, List<OFBsnControllerConnection> connections) {
if(connections == null) {
throw new NullPointerException("OFBsnControllerConnectionsReplyVer15: property connections cannot be null");
}
this.xid = xid;
this.connections = connections;
}
// Accessors for OF message fields
@Override
public OFVersion getVersion() {
return OFVersion.OF_15;
}
@Override
public OFType getType() {
return OFType.EXPERIMENTER;
}
@Override
public long getXid() {
return xid;
}
@Override
public long getExperimenter() {
return 0x5c16c7L;
}
@Override
public long getSubtype() {
return 0x39L;
}
@Override
public List<OFBsnControllerConnection> getConnections() {
return connections;
}
public OFBsnControllerConnectionsReply.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFBsnControllerConnectionsReply.Builder {
final OFBsnControllerConnectionsReplyVer15 parentMessage;
// OF message fields
private boolean xidSet;
private long xid;
private boolean connectionsSet;
private List<OFBsnControllerConnection> connections;
BuilderWithParent(OFBsnControllerConnectionsReplyVer15 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_15;
}
@Override
public OFType getType() {
return OFType.EXPERIMENTER;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFBsnControllerConnectionsReply.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public long getExperimenter() {
return 0x5c16c7L;
}
@Override
public long getSubtype() {
return 0x39L;
}
@Override
public List<OFBsnControllerConnection> getConnections() {
return connections;
}
@Override
public OFBsnControllerConnectionsReply.Builder setConnections(List<OFBsnControllerConnection> connections) {
this.connections = connections;
this.connectionsSet = true;
return this;
}
@Override
public OFBsnControllerConnectionsReply build() {
long xid = this.xidSet ? this.xid : parentMessage.xid;
List<OFBsnControllerConnection> connections = this.connectionsSet ? this.connections : parentMessage.connections;
if(connections == null)
throw new NullPointerException("Property connections must not be null");
//
return new OFBsnControllerConnectionsReplyVer15(
xid,
connections
);
}
}
static class Builder implements OFBsnControllerConnectionsReply.Builder {
// OF message fields
private boolean xidSet;
private long xid;
private boolean connectionsSet;
private List<OFBsnControllerConnection> connections;
@Override
public OFVersion getVersion() {
return OFVersion.OF_15;
}
@Override
public OFType getType() {
return OFType.EXPERIMENTER;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFBsnControllerConnectionsReply.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public long getExperimenter() {
return 0x5c16c7L;
}
@Override
public long getSubtype() {
return 0x39L;
}
@Override
public List<OFBsnControllerConnection> getConnections() {
return connections;
}
@Override
public OFBsnControllerConnectionsReply.Builder setConnections(List<OFBsnControllerConnection> connections) {
this.connections = connections;
this.connectionsSet = true;
return this;
}
//
@Override
public OFBsnControllerConnectionsReply build() {
long xid = this.xidSet ? this.xid : DEFAULT_XID;
List<OFBsnControllerConnection> connections = this.connectionsSet ? this.connections : DEFAULT_CONNECTIONS;
if(connections == null)
throw new NullPointerException("Property connections must not be null");
return new OFBsnControllerConnectionsReplyVer15(
xid,
connections
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFBsnControllerConnectionsReply> {
@Override
public OFBsnControllerConnectionsReply readFrom(ByteBuf bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property version == 6
byte version = bb.readByte();
if(version != (byte) 0x6)
throw new OFParseError("Wrong version: Expected=OFVersion.OF_15(6), got="+version);
// fixed value property type == 4
byte type = bb.readByte();
if(type != (byte) 0x4)
throw new OFParseError("Wrong type: Expected=OFType.EXPERIMENTER(4), got="+type);
int length = U16.f(bb.readShort());
if(length < MINIMUM_LENGTH)
throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
long xid = U32.f(bb.readInt());
// fixed value property experimenter == 0x5c16c7L
int experimenter = bb.readInt();
if(experimenter != 0x5c16c7)
throw new OFParseError("Wrong experimenter: Expected=0x5c16c7L(0x5c16c7L), got="+experimenter);
// fixed value property subtype == 0x39L
int subtype = bb.readInt();
if(subtype != 0x39)
throw new OFParseError("Wrong subtype: Expected=0x39L(0x39L), got="+subtype);
List<OFBsnControllerConnection> connections = ChannelUtils.readList(bb, length - (bb.readerIndex() - start), OFBsnControllerConnectionVer15.READER);
OFBsnControllerConnectionsReplyVer15 bsnControllerConnectionsReplyVer15 = new OFBsnControllerConnectionsReplyVer15(
xid,
connections
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", bsnControllerConnectionsReplyVer15);
return bsnControllerConnectionsReplyVer15;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFBsnControllerConnectionsReplyVer15Funnel FUNNEL = new OFBsnControllerConnectionsReplyVer15Funnel();
static class OFBsnControllerConnectionsReplyVer15Funnel implements Funnel<OFBsnControllerConnectionsReplyVer15> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFBsnControllerConnectionsReplyVer15 message, PrimitiveSink sink) {
// fixed value property version = 6
sink.putByte((byte) 0x6);
// fixed value property type = 4
sink.putByte((byte) 0x4);
// FIXME: skip funnel of length
sink.putLong(message.xid);
// fixed value property experimenter = 0x5c16c7L
sink.putInt(0x5c16c7);
// fixed value property subtype = 0x39L
sink.putInt(0x39);
FunnelUtils.putList(message.connections, sink);
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFBsnControllerConnectionsReplyVer15> {
@Override
public void write(ByteBuf bb, OFBsnControllerConnectionsReplyVer15 message) {
int startIndex = bb.writerIndex();
// fixed value property version = 6
bb.writeByte((byte) 0x6);
// fixed value property type = 4
bb.writeByte((byte) 0x4);
// length is length of variable message, will be updated at the end
int lengthIndex = bb.writerIndex();
bb.writeShort(U16.t(0));
bb.writeInt(U32.t(message.xid));
// fixed value property experimenter = 0x5c16c7L
bb.writeInt(0x5c16c7);
// fixed value property subtype = 0x39L
bb.writeInt(0x39);
ChannelUtils.writeList(bb, message.connections);
// update length field
int length = bb.writerIndex() - startIndex;
bb.setShort(lengthIndex, length);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFBsnControllerConnectionsReplyVer15(");
b.append("xid=").append(xid);
b.append(", ");
b.append("connections=").append(connections);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFBsnControllerConnectionsReplyVer15 other = (OFBsnControllerConnectionsReplyVer15) obj;
if( xid != other.xid)
return false;
if (connections == null) {
if (other.connections != null)
return false;
} else if (!connections.equals(other.connections))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * (int) (xid ^ (xid >>> 32));
result = prime * result + ((connections == null) ? 0 : connections.hashCode());
return result;
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.mondrianinput;
import java.util.List;
import java.util.Map;
import org.apache.commons.vfs2.FileObject;
import org.pentaho.di.core.CheckResult;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.resource.ResourceDefinition;
import org.pentaho.di.resource.ResourceNamingInterface;
import org.pentaho.di.shared.SharedObjectInterface;
import org.pentaho.di.trans.DatabaseImpact;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.pentaho.metastore.api.IMetaStore;
import org.w3c.dom.Node;
/*
* Created on 2-jun-2003
*
*/
public class MondrianInputMeta extends BaseStepMeta implements StepMetaInterface {
private DatabaseMeta databaseMeta;
private String sql;
private String catalog;
private String role;
private boolean variableReplacementActive;
public MondrianInputMeta() {
super();
}
/**
* @return Returns the database.
*/
public DatabaseMeta getDatabaseMeta() {
return databaseMeta;
}
/**
* @param database
* The database to set.
*/
public void setDatabaseMeta( DatabaseMeta database ) {
this.databaseMeta = database;
}
/**
* @return Returns the variableReplacementActive.
*/
public boolean isVariableReplacementActive() {
return variableReplacementActive;
}
/**
* @param variableReplacementActive
* The variableReplacementActive to set.
*/
public void setVariableReplacementActive( boolean variableReplacementActive ) {
this.variableReplacementActive = variableReplacementActive;
}
/**
* @return Returns the sql.
*/
public String getSQL() {
return sql;
}
/**
* @param sql
* The sql to set.
*/
public void setSQL( String sql ) {
this.sql = sql;
}
public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException {
readData( stepnode, databases );
}
public Object clone() {
MondrianInputMeta retval = (MondrianInputMeta) super.clone();
return retval;
}
private void readData( Node stepnode, List<? extends SharedObjectInterface> databases ) throws KettleXMLException {
try {
databaseMeta = DatabaseMeta.findDatabase( databases, XMLHandler.getTagValue( stepnode, "connection" ) );
sql = XMLHandler.getTagValue( stepnode, "sql" );
catalog = XMLHandler.getTagValue( stepnode, "catalog" );
role = XMLHandler.getTagValue( stepnode, "role" );
variableReplacementActive = "Y".equals( XMLHandler.getTagValue( stepnode, "variables_active" ) );
} catch ( Exception e ) {
throw new KettleXMLException( "Unable to load step info from XML", e );
}
}
public void setDefault() {
databaseMeta = null;
sql =
"select\n"
+ " {([Gender].[F], [Measures].[Unit Sales]),\n" + " ([Gender].[M], [Measures].[Store Sales]),\n"
+ " ([Gender].[F], [Measures].[Unit Sales])} on columns,\n"
+ " CrossJoin([Marital Status].Members,\n" + " [Product].Children) on rows\n"
+ "from [Sales]";
variableReplacementActive = false;
}
public void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {
if ( databaseMeta == null ) {
return; // TODO: throw an exception here
}
RowMetaInterface add = null;
try {
String mdx = getSQL();
if ( isVariableReplacementActive() ) {
mdx = space.environmentSubstitute( mdx );
}
MondrianHelper helper = new MondrianHelper( databaseMeta, catalog, mdx, space );
add = helper.getCachedRowMeta();
if ( add == null ) {
helper.openQuery();
helper.createRectangularOutput();
add = helper.getOutputRowMeta();
}
} catch ( KettleDatabaseException dbe ) {
throw new KettleStepException( "Unable to get query result for MDX query: " + Const.CR + sql, dbe );
}
// Set the origin
//
for ( int i = 0; i < add.size(); i++ ) {
ValueMetaInterface v = add.getValueMeta( i );
v.setOrigin( origin );
}
row.addRowMeta( add );
}
public String getXML() {
StringBuilder retval = new StringBuilder();
retval.append( " "
+ XMLHandler.addTagValue( "connection", databaseMeta == null ? "" : databaseMeta.getName() ) );
retval.append( " " + XMLHandler.addTagValue( "sql", sql ) );
retval.append( " " + XMLHandler.addTagValue( "catalog", catalog ) );
retval.append( " " + XMLHandler.addTagValue( "role", role ) );
retval.append( " " + XMLHandler.addTagValue( "variables_active", variableReplacementActive ) );
return retval.toString();
}
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException {
try {
databaseMeta = rep.loadDatabaseMetaFromStepAttribute( id_step, "id_connection", databases );
sql = rep.getStepAttributeString( id_step, "sql" );
catalog = rep.getStepAttributeString( id_step, "catalog" );
role = rep.getStepAttributeString( id_step, "role" );
variableReplacementActive = rep.getStepAttributeBoolean( id_step, "variables_active" );
} catch ( Exception e ) {
throw new KettleException( "Unexpected error reading step information from the repository", e );
}
}
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException {
try {
rep.saveDatabaseMetaStepAttribute( id_transformation, id_step, "id_connection", databaseMeta );
rep.saveStepAttribute( id_transformation, id_step, "sql", sql );
rep.saveStepAttribute( id_transformation, id_step, "catalog", catalog );
rep.saveStepAttribute( id_transformation, id_step, "role", role );
rep.saveStepAttribute( id_transformation, id_step, "variables_active", variableReplacementActive );
// Also, save the step-database relationship!
if ( databaseMeta != null ) {
rep.insertStepDatabase( id_transformation, id_step, databaseMeta.getObjectId() );
}
} catch ( Exception e ) {
throw new KettleException( "Unable to save step information to the repository for id_step=" + id_step, e );
}
}
public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ) {
CheckResult cr;
if ( databaseMeta != null ) {
cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, "Connection exists", stepMeta );
remarks.add( cr );
// TODO: perform lookup to see if it all works fine.
} else {
cr =
new CheckResult(
CheckResultInterface.TYPE_RESULT_ERROR, "Please select or create a connection to use", stepMeta );
remarks.add( cr );
}
}
public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ) {
return new MondrianInput( stepMeta, stepDataInterface, cnr, transMeta, trans );
}
public StepDataInterface getStepData() {
return new MondrianData();
}
public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository,
IMetaStore metaStore ) throws KettleStepException {
// you can't really analyze the database impact since it runs on a Mondrian server
}
public DatabaseMeta[] getUsedDatabaseConnections() {
if ( databaseMeta != null ) {
return new DatabaseMeta[] { databaseMeta };
} else {
return super.getUsedDatabaseConnections();
}
}
/**
* @return the catalog
*/
public String getCatalog() {
return catalog;
}
/**
* @param catalog
* the catalog to set
*/
public void setCatalog( String catalog ) {
this.catalog = catalog;
}
public String getRole() {
return role;
}
public void setRole( String role ) {
this.role = role;
}
/**
* Since the exported transformation that runs this will reside in a ZIP file, we can't reference files relatively. So
* what this does is turn the name of files into absolute paths OR it simply includes the resource in the ZIP file.
* For now, we'll simply turn it into an absolute path and pray that the file is on a shared drive or something like
* that.
*
* @param space
* the variable space to use
* @param definitions
* @param resourceNamingInterface
* @param repository
* The repository to optionally load other resources from (to be converted to XML)
* @param metaStore
* the metaStore in which non-kettle metadata could reside.
*
* @return the filename of the exported resource
*/
public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException {
try {
// The object that we're modifying here is a copy of the original!
// So let's change the filename from relative to absolute by grabbing the file object...
// In case the name of the file comes from previous steps, forget about this!
//
if ( Const.isEmpty( catalog ) ) {
// From : ${Internal.Transformation.Filename.Directory}/../foo/bar.csv
// To : /home/matt/test/files/foo/bar.csv
//
FileObject fileObject = KettleVFS.getFileObject( space.environmentSubstitute( catalog ), space );
// If the file doesn't exist, forget about this effort too!
//
if ( fileObject.exists() ) {
// Convert to an absolute path...
//
catalog = resourceNamingInterface.nameResource( fileObject, space, true );
return catalog;
}
}
return null;
} catch ( Exception e ) {
throw new KettleException( e );
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.dynamodbv2.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/dynamodb-2012-08-10/UpdateTableReplicaAutoScaling"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UpdateTableReplicaAutoScalingRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* Represents the auto scaling settings of the global secondary indexes of the replica to be updated.
* </p>
*/
private java.util.List<GlobalSecondaryIndexAutoScalingUpdate> globalSecondaryIndexUpdates;
/**
* <p>
* The name of the global table to be updated.
* </p>
*/
private String tableName;
private AutoScalingSettingsUpdate provisionedWriteCapacityAutoScalingUpdate;
/**
* <p>
* Represents the auto scaling settings of replicas of the table that will be modified.
* </p>
*/
private java.util.List<ReplicaAutoScalingUpdate> replicaUpdates;
/**
* <p>
* Represents the auto scaling settings of the global secondary indexes of the replica to be updated.
* </p>
*
* @return Represents the auto scaling settings of the global secondary indexes of the replica to be updated.
*/
public java.util.List<GlobalSecondaryIndexAutoScalingUpdate> getGlobalSecondaryIndexUpdates() {
return globalSecondaryIndexUpdates;
}
/**
* <p>
* Represents the auto scaling settings of the global secondary indexes of the replica to be updated.
* </p>
*
* @param globalSecondaryIndexUpdates
* Represents the auto scaling settings of the global secondary indexes of the replica to be updated.
*/
public void setGlobalSecondaryIndexUpdates(java.util.Collection<GlobalSecondaryIndexAutoScalingUpdate> globalSecondaryIndexUpdates) {
if (globalSecondaryIndexUpdates == null) {
this.globalSecondaryIndexUpdates = null;
return;
}
this.globalSecondaryIndexUpdates = new java.util.ArrayList<GlobalSecondaryIndexAutoScalingUpdate>(globalSecondaryIndexUpdates);
}
/**
* <p>
* Represents the auto scaling settings of the global secondary indexes of the replica to be updated.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setGlobalSecondaryIndexUpdates(java.util.Collection)} or
* {@link #withGlobalSecondaryIndexUpdates(java.util.Collection)} if you want to override the existing values.
* </p>
*
* @param globalSecondaryIndexUpdates
* Represents the auto scaling settings of the global secondary indexes of the replica to be updated.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateTableReplicaAutoScalingRequest withGlobalSecondaryIndexUpdates(GlobalSecondaryIndexAutoScalingUpdate... globalSecondaryIndexUpdates) {
if (this.globalSecondaryIndexUpdates == null) {
setGlobalSecondaryIndexUpdates(new java.util.ArrayList<GlobalSecondaryIndexAutoScalingUpdate>(globalSecondaryIndexUpdates.length));
}
for (GlobalSecondaryIndexAutoScalingUpdate ele : globalSecondaryIndexUpdates) {
this.globalSecondaryIndexUpdates.add(ele);
}
return this;
}
/**
* <p>
* Represents the auto scaling settings of the global secondary indexes of the replica to be updated.
* </p>
*
* @param globalSecondaryIndexUpdates
* Represents the auto scaling settings of the global secondary indexes of the replica to be updated.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateTableReplicaAutoScalingRequest withGlobalSecondaryIndexUpdates(
java.util.Collection<GlobalSecondaryIndexAutoScalingUpdate> globalSecondaryIndexUpdates) {
setGlobalSecondaryIndexUpdates(globalSecondaryIndexUpdates);
return this;
}
/**
* <p>
* The name of the global table to be updated.
* </p>
*
* @param tableName
* The name of the global table to be updated.
*/
public void setTableName(String tableName) {
this.tableName = tableName;
}
/**
* <p>
* The name of the global table to be updated.
* </p>
*
* @return The name of the global table to be updated.
*/
public String getTableName() {
return this.tableName;
}
/**
* <p>
* The name of the global table to be updated.
* </p>
*
* @param tableName
* The name of the global table to be updated.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateTableReplicaAutoScalingRequest withTableName(String tableName) {
setTableName(tableName);
return this;
}
/**
* @param provisionedWriteCapacityAutoScalingUpdate
*/
public void setProvisionedWriteCapacityAutoScalingUpdate(AutoScalingSettingsUpdate provisionedWriteCapacityAutoScalingUpdate) {
this.provisionedWriteCapacityAutoScalingUpdate = provisionedWriteCapacityAutoScalingUpdate;
}
/**
* @return
*/
public AutoScalingSettingsUpdate getProvisionedWriteCapacityAutoScalingUpdate() {
return this.provisionedWriteCapacityAutoScalingUpdate;
}
/**
* @param provisionedWriteCapacityAutoScalingUpdate
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateTableReplicaAutoScalingRequest withProvisionedWriteCapacityAutoScalingUpdate(
AutoScalingSettingsUpdate provisionedWriteCapacityAutoScalingUpdate) {
setProvisionedWriteCapacityAutoScalingUpdate(provisionedWriteCapacityAutoScalingUpdate);
return this;
}
/**
* <p>
* Represents the auto scaling settings of replicas of the table that will be modified.
* </p>
*
* @return Represents the auto scaling settings of replicas of the table that will be modified.
*/
public java.util.List<ReplicaAutoScalingUpdate> getReplicaUpdates() {
return replicaUpdates;
}
/**
* <p>
* Represents the auto scaling settings of replicas of the table that will be modified.
* </p>
*
* @param replicaUpdates
* Represents the auto scaling settings of replicas of the table that will be modified.
*/
public void setReplicaUpdates(java.util.Collection<ReplicaAutoScalingUpdate> replicaUpdates) {
if (replicaUpdates == null) {
this.replicaUpdates = null;
return;
}
this.replicaUpdates = new java.util.ArrayList<ReplicaAutoScalingUpdate>(replicaUpdates);
}
/**
* <p>
* Represents the auto scaling settings of replicas of the table that will be modified.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setReplicaUpdates(java.util.Collection)} or {@link #withReplicaUpdates(java.util.Collection)} if you want
* to override the existing values.
* </p>
*
* @param replicaUpdates
* Represents the auto scaling settings of replicas of the table that will be modified.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateTableReplicaAutoScalingRequest withReplicaUpdates(ReplicaAutoScalingUpdate... replicaUpdates) {
if (this.replicaUpdates == null) {
setReplicaUpdates(new java.util.ArrayList<ReplicaAutoScalingUpdate>(replicaUpdates.length));
}
for (ReplicaAutoScalingUpdate ele : replicaUpdates) {
this.replicaUpdates.add(ele);
}
return this;
}
/**
* <p>
* Represents the auto scaling settings of replicas of the table that will be modified.
* </p>
*
* @param replicaUpdates
* Represents the auto scaling settings of replicas of the table that will be modified.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateTableReplicaAutoScalingRequest withReplicaUpdates(java.util.Collection<ReplicaAutoScalingUpdate> replicaUpdates) {
setReplicaUpdates(replicaUpdates);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getGlobalSecondaryIndexUpdates() != null)
sb.append("GlobalSecondaryIndexUpdates: ").append(getGlobalSecondaryIndexUpdates()).append(",");
if (getTableName() != null)
sb.append("TableName: ").append(getTableName()).append(",");
if (getProvisionedWriteCapacityAutoScalingUpdate() != null)
sb.append("ProvisionedWriteCapacityAutoScalingUpdate: ").append(getProvisionedWriteCapacityAutoScalingUpdate()).append(",");
if (getReplicaUpdates() != null)
sb.append("ReplicaUpdates: ").append(getReplicaUpdates());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof UpdateTableReplicaAutoScalingRequest == false)
return false;
UpdateTableReplicaAutoScalingRequest other = (UpdateTableReplicaAutoScalingRequest) obj;
if (other.getGlobalSecondaryIndexUpdates() == null ^ this.getGlobalSecondaryIndexUpdates() == null)
return false;
if (other.getGlobalSecondaryIndexUpdates() != null && other.getGlobalSecondaryIndexUpdates().equals(this.getGlobalSecondaryIndexUpdates()) == false)
return false;
if (other.getTableName() == null ^ this.getTableName() == null)
return false;
if (other.getTableName() != null && other.getTableName().equals(this.getTableName()) == false)
return false;
if (other.getProvisionedWriteCapacityAutoScalingUpdate() == null ^ this.getProvisionedWriteCapacityAutoScalingUpdate() == null)
return false;
if (other.getProvisionedWriteCapacityAutoScalingUpdate() != null
&& other.getProvisionedWriteCapacityAutoScalingUpdate().equals(this.getProvisionedWriteCapacityAutoScalingUpdate()) == false)
return false;
if (other.getReplicaUpdates() == null ^ this.getReplicaUpdates() == null)
return false;
if (other.getReplicaUpdates() != null && other.getReplicaUpdates().equals(this.getReplicaUpdates()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getGlobalSecondaryIndexUpdates() == null) ? 0 : getGlobalSecondaryIndexUpdates().hashCode());
hashCode = prime * hashCode + ((getTableName() == null) ? 0 : getTableName().hashCode());
hashCode = prime * hashCode
+ ((getProvisionedWriteCapacityAutoScalingUpdate() == null) ? 0 : getProvisionedWriteCapacityAutoScalingUpdate().hashCode());
hashCode = prime * hashCode + ((getReplicaUpdates() == null) ? 0 : getReplicaUpdates().hashCode());
return hashCode;
}
@Override
public UpdateTableReplicaAutoScalingRequest clone() {
return (UpdateTableReplicaAutoScalingRequest) super.clone();
}
}
| |
/*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.query.impl;
import com.hazelcast.config.IndexConfig;
import com.hazelcast.config.IndexType;
import com.hazelcast.core.TypeConverter;
import com.hazelcast.internal.monitor.impl.IndexOperationStats;
import com.hazelcast.internal.monitor.impl.PerIndexStats;
import com.hazelcast.internal.serialization.Data;
import com.hazelcast.internal.serialization.InternalSerializationService;
import com.hazelcast.map.impl.StoreAdapter;
import com.hazelcast.nio.ObjectDataInput;
import com.hazelcast.nio.ObjectDataOutput;
import com.hazelcast.query.Predicate;
import com.hazelcast.query.impl.getters.Extractors;
import com.hazelcast.query.impl.getters.MultiResult;
import com.hazelcast.query.impl.predicates.PredicateDataSerializerHook;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import java.util.Set;
import static com.hazelcast.internal.util.SetUtil.createHashSet;
import static com.hazelcast.query.impl.CompositeValue.NEGATIVE_INFINITY;
import static com.hazelcast.query.impl.TypeConverters.NULL_CONVERTER;
import static java.util.Collections.emptySet;
/**
* Provides an abstract base for indexes.
*/
public abstract class AbstractIndex implements InternalIndex {
/**
* Represents a null-like value that is equal to itself and less than any
* other value except {@link CompositeValue#NEGATIVE_INFINITY}. The latter
* is needed to establish the ordering of keys for composite indexes.
*/
public static final ComparableIdentifiedDataSerializable NULL = new NullObject();
protected final InternalSerializationService ss;
protected final Extractors extractors;
protected final IndexStore indexStore;
protected final IndexCopyBehavior copyBehavior;
private final String[] components;
private final IndexConfig config;
private final boolean ordered;
private final PerIndexStats stats;
/**
* Reference to the store if it is bound to the same partition as the index (local index), {@code null} otherwise.
*/
private final StoreAdapter partitionStoreAdapter;
private volatile TypeConverter converter;
@SuppressFBWarnings("EI_EXPOSE_REP2")
public AbstractIndex(
IndexConfig config,
InternalSerializationService ss,
Extractors extractors,
IndexCopyBehavior copyBehavior,
PerIndexStats stats,
StoreAdapter partitionStoreAdapter
) {
this.config = config;
this.components = IndexUtils.getComponents(config);
this.ordered = config.getType() == IndexType.SORTED;
this.ss = ss;
this.extractors = extractors;
this.copyBehavior = copyBehavior;
this.partitionStoreAdapter = partitionStoreAdapter;
this.indexStore = createIndexStore(config, stats);
this.stats = stats;
}
protected abstract IndexStore createIndexStore(IndexConfig config, PerIndexStats stats);
@Override
public String getName() {
return config.getName();
}
@SuppressFBWarnings("EI_EXPOSE_REP")
@Override
public String[] getComponents() {
return components;
}
@Override
public IndexConfig getConfig() {
return config;
}
@Override
public boolean isOrdered() {
return ordered;
}
@Override
public TypeConverter getConverter() {
return converter;
}
public StoreAdapter getPartitionStoreAdapter() {
return partitionStoreAdapter;
}
@Override
public void putEntry(QueryableEntry entry, Object oldValue, OperationSource operationSource) {
long timestamp = stats.makeTimestamp();
IndexOperationStats operationStats = stats.createOperationStats();
/*
* At first, check if converter is not initialized, initialize it before
* saving an entry index. Because, if entity index is saved before, that
* thread can be blocked before executing converter setting code block,
* another thread can query over indexes without knowing the converter
* and this causes to class cast exceptions.
*/
if (converterIsUnassignedOrTransient(converter)) {
converter = obtainConverter(entry);
}
Object newAttributeValue = extractAttributeValue(entry.getKeyData(), entry.getTargetObject(false));
if (oldValue == null) {
indexStore.insert(newAttributeValue, entry, operationStats);
stats.onInsert(timestamp, operationStats, operationSource);
} else {
Object oldAttributeValue = extractAttributeValue(entry.getKeyData(), oldValue);
indexStore.update(oldAttributeValue, newAttributeValue, entry, operationStats);
stats.onUpdate(timestamp, operationStats, operationSource);
}
}
@Override
public void removeEntry(Data key, Object value, OperationSource operationSource) {
long timestamp = stats.makeTimestamp();
IndexOperationStats operationStats = stats.createOperationStats();
Object attributeValue = extractAttributeValue(key, value);
indexStore.remove(attributeValue, key, value, operationStats);
stats.onRemove(timestamp, operationStats, operationSource);
}
@Override
public boolean isEvaluateOnly() {
return indexStore.isEvaluateOnly();
}
@Override
public boolean canEvaluate(Class<? extends Predicate> predicateClass) {
return indexStore.canEvaluate(predicateClass);
}
@Override
public Set<QueryableEntry> evaluate(Predicate predicate) {
assert converter != null;
return indexStore.evaluate(predicate, converter);
}
@Override
public Set<QueryableEntry> getRecords(Comparable value) {
long timestamp = stats.makeTimestamp();
if (converter == null) {
stats.onIndexHit(timestamp, 0);
return emptySet();
}
Set<QueryableEntry> result = indexStore.getRecords(convert(value));
stats.onIndexHit(timestamp, result.size());
return result;
}
@Override
public Set<QueryableEntry> getRecords(Comparable[] values) {
if (values.length == 1) {
return getRecords(values[0]);
}
long timestamp = stats.makeTimestamp();
if (converter == null || values.length == 0) {
stats.onIndexHit(timestamp, 0);
return emptySet();
}
Set<Comparable> convertedValues = createHashSet(values.length);
for (Comparable value : values) {
Comparable converted = convert(value);
convertedValues.add(canonicalizeQueryArgumentScalar(converted));
}
Set<QueryableEntry> result = indexStore.getRecords(convertedValues);
stats.onIndexHit(timestamp, result.size());
return result;
}
@Override
public Set<QueryableEntry> getRecords(Comparable from, boolean fromInclusive, Comparable to, boolean toInclusive) {
long timestamp = stats.makeTimestamp();
if (converter == null) {
stats.onIndexHit(timestamp, 0);
return emptySet();
}
Set<QueryableEntry> result = indexStore.getRecords(convert(from), fromInclusive, convert(to), toInclusive);
stats.onIndexHit(timestamp, result.size());
return result;
}
@Override
public Set<QueryableEntry> getRecords(Comparison comparison, Comparable value) {
long timestamp = stats.makeTimestamp();
if (converter == null) {
stats.onIndexHit(timestamp, 0);
return emptySet();
}
Set<QueryableEntry> result = indexStore.getRecords(comparison, convert(value));
stats.onIndexHit(timestamp, result.size());
return result;
}
@Override
public void clear() {
indexStore.clear();
converter = null;
stats.onClear();
}
@Override
public void destroy() {
stats.onClear();
}
@Override
public final Comparable canonicalizeQueryArgumentScalar(Comparable value) {
return indexStore.canonicalizeQueryArgumentScalar(value);
}
@Override
public PerIndexStats getPerIndexStats() {
return stats;
}
private Object extractAttributeValue(Data key, Object value) {
if (components.length == 1) {
return QueryableEntry.extractAttributeValue(extractors, ss, components[0], key, value, null);
} else {
Comparable[] valueComponents = new Comparable[components.length];
for (int i = 0; i < components.length; ++i) {
String attribute = components[i];
Object extractedValue = QueryableEntry.extractAttributeValue(extractors, ss, attribute, key, value, null);
if (extractedValue instanceof MultiResult) {
throw new IllegalStateException(
"Collection/array attributes are not supported by composite indexes: " + attribute);
} else if (extractedValue == null || extractedValue instanceof Comparable) {
valueComponents[i] = (Comparable) extractedValue;
} else {
throw new IllegalStateException("Unsupported non-comparable value type: " + extractedValue.getClass());
}
}
return new CompositeValue(valueComponents);
}
}
/**
* Note: the fact that the given value is of type Comparable doesn't mean
* that this value is of the same type as the one that's stored in the index,
* thus the conversion is needed.
*
* @param value to be converted from given type to the type of the
* attribute that's stored in the index
* @return converted value that may be compared with the value that's stored
* in the index
*/
private Comparable convert(Comparable value) {
return converter.convert(value);
}
private TypeConverter obtainConverter(QueryableEntry entry) {
if (components.length == 1) {
return entry.getConverter(components[0]);
} else {
CompositeConverter existingConverter = (CompositeConverter) converter;
TypeConverter[] converters = new TypeConverter[components.length];
for (int i = 0; i < components.length; ++i) {
TypeConverter existingComponentConverter = getNonTransientComponentConverter(existingConverter, i);
if (existingComponentConverter == null) {
converters[i] = entry.getConverter(components[i]);
assert converters[i] != null;
} else {
// preserve the old one to avoid downgrading
converters[i] = existingComponentConverter;
}
}
return new CompositeConverter(converters);
}
}
private static boolean converterIsUnassignedOrTransient(TypeConverter converter) {
if (converter == null) {
// unassigned
return true;
}
if (converter == NULL_CONVERTER) {
// transient
return true;
}
if (!(converter instanceof CompositeConverter)) {
return false;
}
CompositeConverter compositeConverter = (CompositeConverter) converter;
return compositeConverter.isTransient();
}
private static TypeConverter getNonTransientComponentConverter(CompositeConverter converter, int index) {
if (converter == null) {
return null;
}
TypeConverter componentConverter = converter.getComponentConverter(index);
return componentConverter == NULL_CONVERTER ? null : componentConverter;
}
private static final class NullObject implements ComparableIdentifiedDataSerializable {
@SuppressWarnings("NullableProblems")
@Override
public int compareTo(Object o) {
if (this == o) {
return 0;
}
return o == NEGATIVE_INFINITY ? +1 : -1;
}
@Override
public int hashCode() {
return 0;
}
@Override
public boolean equals(Object obj) {
return this == obj;
}
@Override
public String toString() {
return "NULL";
}
@Override
public void writeData(ObjectDataOutput out) {
// nothing to serialize
}
@Override
public void readData(ObjectDataInput in) {
// nothing to deserialize
}
@Override
public int getFactoryId() {
return PredicateDataSerializerHook.F_ID;
}
@Override
public int getClassId() {
return PredicateDataSerializerHook.NULL_OBJECT;
}
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.apimanagement.v2018_06_01_preview.implementation;
import retrofit2.Retrofit;
import com.google.common.reflect.TypeToken;
import com.microsoft.azure.AzureServiceFuture;
import com.microsoft.azure.CloudException;
import com.microsoft.azure.ListOperationCallback;
import com.microsoft.azure.Page;
import com.microsoft.azure.PagedList;
import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.ServiceResponse;
import java.io.IOException;
import java.util.List;
import okhttp3.ResponseBody;
import retrofit2.http.GET;
import retrofit2.http.Header;
import retrofit2.http.Headers;
import retrofit2.http.Query;
import retrofit2.http.Url;
import retrofit2.Response;
import rx.functions.Func1;
import rx.Observable;
/**
* An instance of this class provides access to all the operations defined
* in ApiManagementOperations.
*/
public class ApiManagementOperationsInner {
/** The Retrofit service to perform REST calls. */
private ApiManagementOperationsService service;
/** The service client containing this operation class. */
private ApiManagementClientImpl client;
/**
* Initializes an instance of ApiManagementOperationsInner.
*
* @param retrofit the Retrofit instance built from a Retrofit Builder.
* @param client the instance of the service client containing this operation class.
*/
public ApiManagementOperationsInner(Retrofit retrofit, ApiManagementClientImpl client) {
this.service = retrofit.create(ApiManagementOperationsService.class);
this.client = client;
}
/**
* The interface defining all the services for ApiManagementOperations to be
* used by Retrofit to perform actually REST calls.
*/
interface ApiManagementOperationsService {
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.apimanagement.v2018_06_01_preview.ApiManagementOperations list" })
@GET("providers/Microsoft.ApiManagement/operations")
Observable<Response<ResponseBody>> list(@Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.apimanagement.v2018_06_01_preview.ApiManagementOperations listNext" })
@GET
Observable<Response<ResponseBody>> listNext(@Url String nextUrl, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
}
/**
* Lists all of the available REST API operations of the Microsoft.ApiManagement provider.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PagedList<OperationInner> object if successful.
*/
public PagedList<OperationInner> list() {
ServiceResponse<Page<OperationInner>> response = listSinglePageAsync().toBlocking().single();
return new PagedList<OperationInner>(response.body()) {
@Override
public Page<OperationInner> nextPage(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
}
/**
* Lists all of the available REST API operations of the Microsoft.ApiManagement provider.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<OperationInner>> listAsync(final ListOperationCallback<OperationInner> serviceCallback) {
return AzureServiceFuture.fromPageResponse(
listSinglePageAsync(),
new Func1<String, Observable<ServiceResponse<Page<OperationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<OperationInner>>> call(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink);
}
},
serviceCallback);
}
/**
* Lists all of the available REST API operations of the Microsoft.ApiManagement provider.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<OperationInner> object
*/
public Observable<Page<OperationInner>> listAsync() {
return listWithServiceResponseAsync()
.map(new Func1<ServiceResponse<Page<OperationInner>>, Page<OperationInner>>() {
@Override
public Page<OperationInner> call(ServiceResponse<Page<OperationInner>> response) {
return response.body();
}
});
}
/**
* Lists all of the available REST API operations of the Microsoft.ApiManagement provider.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<OperationInner> object
*/
public Observable<ServiceResponse<Page<OperationInner>>> listWithServiceResponseAsync() {
return listSinglePageAsync()
.concatMap(new Func1<ServiceResponse<Page<OperationInner>>, Observable<ServiceResponse<Page<OperationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<OperationInner>>> call(ServiceResponse<Page<OperationInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink));
}
});
}
/**
* Lists all of the available REST API operations of the Microsoft.ApiManagement provider.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<OperationInner> object wrapped in {@link ServiceResponse} if successful.
*/
public Observable<ServiceResponse<Page<OperationInner>>> listSinglePageAsync() {
final String apiVersion = "2018-06-01-preview";
return service.list(apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<OperationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<OperationInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<OperationInner>> result = listDelegate(response);
return Observable.just(new ServiceResponse<Page<OperationInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl<OperationInner>> listDelegate(Response<ResponseBody> response) throws CloudException, IOException {
return this.client.restClient().responseBuilderFactory().<PageImpl<OperationInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl<OperationInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Lists all of the available REST API operations of the Microsoft.ApiManagement provider.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PagedList<OperationInner> object if successful.
*/
public PagedList<OperationInner> listNext(final String nextPageLink) {
ServiceResponse<Page<OperationInner>> response = listNextSinglePageAsync(nextPageLink).toBlocking().single();
return new PagedList<OperationInner>(response.body()) {
@Override
public Page<OperationInner> nextPage(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
}
/**
* Lists all of the available REST API operations of the Microsoft.ApiManagement provider.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param serviceFuture the ServiceFuture object tracking the Retrofit calls
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<OperationInner>> listNextAsync(final String nextPageLink, final ServiceFuture<List<OperationInner>> serviceFuture, final ListOperationCallback<OperationInner> serviceCallback) {
return AzureServiceFuture.fromPageResponse(
listNextSinglePageAsync(nextPageLink),
new Func1<String, Observable<ServiceResponse<Page<OperationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<OperationInner>>> call(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink);
}
},
serviceCallback);
}
/**
* Lists all of the available REST API operations of the Microsoft.ApiManagement provider.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<OperationInner> object
*/
public Observable<Page<OperationInner>> listNextAsync(final String nextPageLink) {
return listNextWithServiceResponseAsync(nextPageLink)
.map(new Func1<ServiceResponse<Page<OperationInner>>, Page<OperationInner>>() {
@Override
public Page<OperationInner> call(ServiceResponse<Page<OperationInner>> response) {
return response.body();
}
});
}
/**
* Lists all of the available REST API operations of the Microsoft.ApiManagement provider.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<OperationInner> object
*/
public Observable<ServiceResponse<Page<OperationInner>>> listNextWithServiceResponseAsync(final String nextPageLink) {
return listNextSinglePageAsync(nextPageLink)
.concatMap(new Func1<ServiceResponse<Page<OperationInner>>, Observable<ServiceResponse<Page<OperationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<OperationInner>>> call(ServiceResponse<Page<OperationInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink));
}
});
}
/**
* Lists all of the available REST API operations of the Microsoft.ApiManagement provider.
*
ServiceResponse<PageImpl<OperationInner>> * @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<OperationInner> object wrapped in {@link ServiceResponse} if successful.
*/
public Observable<ServiceResponse<Page<OperationInner>>> listNextSinglePageAsync(final String nextPageLink) {
if (nextPageLink == null) {
throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null.");
}
String nextUrl = String.format("%s", nextPageLink);
return service.listNext(nextUrl, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<OperationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<OperationInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<OperationInner>> result = listNextDelegate(response);
return Observable.just(new ServiceResponse<Page<OperationInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl<OperationInner>> listNextDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl<OperationInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl<OperationInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysds.runtime.io;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.sysds.conf.ConfigurationManager;
import org.apache.sysds.hops.OptimizerUtils;
import org.apache.sysds.runtime.DMLRuntimeException;
import org.apache.sysds.runtime.data.SparseRowVector;
import org.apache.sysds.runtime.matrix.data.MatrixBlock;
import org.apache.sysds.runtime.util.CommonThreadPool;
public class ReaderTextLIBSVMParallel extends MatrixReader {
private static FileFormatPropertiesLIBSVM _props;
private int _numThreads = 1;
private SplitOffsetInfos _offsets = null;
public ReaderTextLIBSVMParallel(FileFormatPropertiesLIBSVM props) {
_numThreads = OptimizerUtils.getParallelTextReadParallelism();
_props = props;
}
@Override
public MatrixBlock readMatrixFromHDFS(String fname, long rlen, long clen,
int blen, long estnnz)
throws IOException, DMLRuntimeException
{
// prepare file access
JobConf job = new JobConf(ConfigurationManager.getCachedJobConf());
Path path = new Path(fname);
FileSystem fs = IOUtilFunctions.getFileSystem(path, job);
FileInputFormat.addInputPath(job, path);
TextInputFormat informat = new TextInputFormat();
informat.configure(job);
InputSplit[] splits = informat.getSplits(job, _numThreads);
splits = IOUtilFunctions.sortInputSplits(splits);
// check existence and non-empty file
checkValidInputFile(fs, path);
// allocate output matrix block
// First Read Pass (count rows/cols, determine offsets, allocate matrix block)
MatrixBlock ret = computeLIBSVMSizeAndCreateOutputMatrixBlock(splits, path, job, rlen, clen, estnnz);
rlen = ret.getNumRows();
clen = ret.getNumColumns();
// Second Read Pass (read, parse strings, append to matrix block)
readLIBSVMMatrixFromHDFS(splits, path, job, ret, rlen, clen, blen);
//post-processing (representation-specific, change of sparse/dense block representation)
// - nnz explicitly maintained in parallel for the individual splits
ret.examSparsity();
// sanity check for parallel row count (since determined internally)
if (rlen >= 0 && rlen != ret.getNumRows())
throw new DMLRuntimeException("Read matrix inconsistent with given meta data: "
+ "expected nrow="+ rlen + ", real nrow=" + ret.getNumRows());
return ret;
}
@Override
public MatrixBlock readMatrixFromInputStream(InputStream is, long rlen, long clen, int blen, long estnnz)
throws IOException, DMLRuntimeException
{
//not implemented yet, fallback to sequential reader
return new ReaderTextLIBSVM(_props).readMatrixFromInputStream(is, rlen, clen, blen, estnnz);
}
private void readLIBSVMMatrixFromHDFS(InputSplit[] splits, Path path, JobConf job,
MatrixBlock dest, long rlen, long clen, int blen)
throws IOException
{
FileInputFormat.addInputPath(job, path);
TextInputFormat informat = new TextInputFormat();
informat.configure(job);
ExecutorService pool = CommonThreadPool.get(_numThreads);
try
{
// create read tasks for all splits
ArrayList<LIBSVMReadTask> tasks = new ArrayList<>();
int splitCount = 0;
for (InputSplit split : splits) {
tasks.add( new LIBSVMReadTask(split, _offsets, informat, job, dest, rlen, clen, splitCount++) );
}
pool.invokeAll(tasks);
pool.shutdown();
// check return codes and aggregate nnz
long lnnz = 0;
for (LIBSVMReadTask rt : tasks) {
lnnz += rt.getPartialNnz();
if (!rt.getReturnCode()) {
Exception err = rt.getException();
throw new IOException("Read task for libsvm input failed: "+ err.toString(), err);
}
}
dest.setNonZeros(lnnz);
}
catch (Exception e) {
throw new IOException("Threadpool issue, while parallel read.", e);
}
}
private MatrixBlock computeLIBSVMSizeAndCreateOutputMatrixBlock(InputSplit[] splits, Path path,
JobConf job, long rlen, long clen, long estnnz)
throws IOException, DMLRuntimeException
{
int nrow = 0;
int ncol = (int) clen;
FileInputFormat.addInputPath(job, path);
TextInputFormat informat = new TextInputFormat();
informat.configure(job);
// count rows in parallel per split
try
{
ExecutorService pool = CommonThreadPool.get(_numThreads);
ArrayList<CountRowsTask> tasks = new ArrayList<>();
for (InputSplit split : splits) {
tasks.add(new CountRowsTask(split, informat, job));
}
pool.invokeAll(tasks);
pool.shutdown();
// collect row counts for offset computation
// early error notify in case not all tasks successful
_offsets = new SplitOffsetInfos(tasks.size());
for (CountRowsTask rt : tasks) {
if (!rt.getReturnCode())
throw new IOException("Count task for libsvm input failed: "+ rt.getErrMsg());
_offsets.setOffsetPerSplit(tasks.indexOf(rt), nrow);
_offsets.setLenghtPerSplit(tasks.indexOf(rt), rt.getRowCount());
nrow = nrow + rt.getRowCount();
}
}
catch (Exception e) {
throw new IOException("Threadpool Error " + e.getMessage(), e);
}
//robustness for wrong dimensions which are already compiled into the plan
if( (rlen != -1 && nrow != rlen) || (clen != -1 && ncol != clen) ) {
String msg = "Read matrix dimensions differ from meta data: ["+nrow+"x"+ncol+"] vs. ["+rlen+"x"+clen+"].";
if( rlen < nrow || clen < ncol ) {
//a) specified matrix dimensions too small
throw new DMLRuntimeException(msg);
}
else {
//b) specified matrix dimensions too large -> padding and warning
LOG.warn(msg);
nrow = (int) rlen;
ncol = (int) clen;
}
}
// allocate target matrix block based on given size;
// need to allocate sparse as well since lock-free insert into target
long estnnz2 = (estnnz < 0) ? (long)nrow * ncol : estnnz;
return createOutputMatrixBlock(nrow, ncol, nrow, estnnz2, true, true);
}
private static class SplitOffsetInfos {
// offset & length info per split
private int[] offsetPerSplit = null;
private int[] lenghtPerSplit = null;
public SplitOffsetInfos(int numSplits) {
lenghtPerSplit = new int[numSplits];
offsetPerSplit = new int[numSplits];
}
public int getLenghtPerSplit(int split) {
return lenghtPerSplit[split];
}
public void setLenghtPerSplit(int split, int r) {
lenghtPerSplit[split] = r;
}
public int getOffsetPerSplit(int split) {
return offsetPerSplit[split];
}
public void setOffsetPerSplit(int split, int o) {
offsetPerSplit[split] = o;
}
}
private static class CountRowsTask implements Callable<Object>
{
private InputSplit _split = null;
private TextInputFormat _informat = null;
private JobConf _job = null;
private boolean _rc = true;
private String _errMsg = null;
private int _nrows = -1;
public CountRowsTask(InputSplit split, TextInputFormat informat, JobConf job) {
_split = split;
_informat = informat;
_job = job;
_nrows = 0;
}
public boolean getReturnCode() {
return _rc;
}
public int getRowCount() {
return _nrows;
}
public String getErrMsg() {
return _errMsg;
}
@Override
public Object call()
throws Exception
{
RecordReader<LongWritable, Text> reader = _informat.getRecordReader(_split, _job, Reporter.NULL);
LongWritable key = new LongWritable();
Text oneLine = new Text();
try {
// count rows from the first row
while (reader.next(key, oneLine)) {
_nrows++;
}
}
catch (Exception e) {
_rc = false;
_errMsg = "RecordReader error libsvm format. split: "+ _split.toString() + e.getMessage();
throw new IOException(_errMsg);
}
finally {
IOUtilFunctions.closeSilently(reader);
}
return null;
}
}
private static class LIBSVMReadTask implements Callable<Object>
{
private InputSplit _split = null;
private SplitOffsetInfos _splitoffsets = null;
private TextInputFormat _informat = null;
private JobConf _job = null;
private MatrixBlock _dest = null;
private long _clen = -1;
private int _splitCount = 0;
private boolean _rc = true;
private Exception _exception = null;
private long _nnz;
public LIBSVMReadTask(InputSplit split, SplitOffsetInfos offsets,
TextInputFormat informat, JobConf job, MatrixBlock dest,
long rlen, long clen, int splitCount)
{
_split = split;
_splitoffsets = offsets;
_informat = informat;
_job = job;
_dest = dest;
_clen = clen;
_rc = true;
_splitCount = splitCount;
}
public boolean getReturnCode() {
return _rc;
}
public Exception getException() {
return _exception;
}
public long getPartialNnz() {
return _nnz;
}
@Override
public Object call()
throws Exception
{
long lnnz = 0;
try
{
RecordReader<LongWritable, Text> reader = _informat.getRecordReader(_split, _job, Reporter.NULL);
LongWritable key = new LongWritable();
Text value = new Text();
SparseRowVector vect = new SparseRowVector(1024);
int row = _splitoffsets.getOffsetPerSplit(_splitCount);
try {
while (reader.next(key, value)) { // foreach line
String rowStr = value.toString().trim();
lnnz += ReaderTextLIBSVM
.parseLibsvmRow(rowStr, vect, (int) _clen, _props.getDelim(), _props.getIndexDelim());
_dest.appendRow(row, vect);
row++;
}
// sanity checks (number of rows)
if (row != (_splitoffsets.getOffsetPerSplit(_splitCount) + _splitoffsets.getLenghtPerSplit(_splitCount)) ) {
throw new IOException(
"Incorrect number of rows (" + row + ") found in delimited file (" + (_splitoffsets
.getOffsetPerSplit(_splitCount) + _splitoffsets
.getLenghtPerSplit(_splitCount)) + "): " + value);
}
}
finally {
IOUtilFunctions.closeSilently(reader);
}
}
catch (Exception ex) {
// central error handling (return code, message)
_rc = false;
_exception = ex;
}
//post processing
_nnz = lnnz;
return null;
}
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
*/
package com.intellij.xdebugger.impl.ui.tree.nodes;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.ui.ColoredTextContainer;
import com.intellij.ui.SimpleTextAttributes;
import com.intellij.util.ThreeState;
import com.intellij.xdebugger.XDebugSession;
import com.intellij.xdebugger.XSourcePosition;
import com.intellij.xdebugger.frame.*;
import com.intellij.xdebugger.frame.presentation.XValuePresentation;
import com.intellij.xdebugger.impl.XDebuggerInlayUtil;
import com.intellij.xdebugger.impl.frame.XDebugView;
import com.intellij.xdebugger.impl.frame.XValueMarkers;
import com.intellij.xdebugger.impl.frame.XValueWithInlinePresentation;
import com.intellij.xdebugger.impl.frame.XVariablesView;
import com.intellij.xdebugger.impl.ui.DebuggerUIUtil;
import com.intellij.xdebugger.impl.ui.XDebuggerUIConstants;
import com.intellij.xdebugger.impl.ui.tree.ValueMarkup;
import com.intellij.xdebugger.impl.ui.tree.XDebuggerTree;
import com.intellij.xdebugger.settings.XDebuggerSettingsManager;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.event.MouseEvent;
import java.util.Comparator;
/**
* @author nik
*/
public class XValueNodeImpl extends XValueContainerNode<XValue> implements XValueNode, XCompositeNode, XValueNodePresentationConfigurator.ConfigurableXValueNode, RestorableStateNode {
public static final Comparator<XValueNodeImpl> COMPARATOR = (o1, o2) -> StringUtil.naturalCompare(o1.getName(), o2.getName());
private static final int MAX_NAME_LENGTH = 100;
private final String myName;
@Nullable
private String myRawValue;
private XFullValueEvaluator myFullValueEvaluator;
private boolean myChanged;
private XValuePresentation myValuePresentation;
//todo[nik] annotate 'name' with @NotNull
public XValueNodeImpl(XDebuggerTree tree, @Nullable XDebuggerTreeNode parent, String name, @NotNull XValue value) {
super(tree, parent, value);
myName = name;
value.computePresentation(this, XValuePlace.TREE);
// add "Collecting" message only if computation is not yet done
if (!isComputed()) {
if (myName != null) {
myText.append(myName, XDebuggerUIConstants.VALUE_NAME_ATTRIBUTES);
myText.append(XDebuggerUIConstants.EQ_TEXT, SimpleTextAttributes.REGULAR_ATTRIBUTES);
}
myText.append(XDebuggerUIConstants.COLLECTING_DATA_MESSAGE, XDebuggerUIConstants.COLLECTING_DATA_HIGHLIGHT_ATTRIBUTES);
}
}
@Override
public void setPresentation(@Nullable Icon icon, @NonNls @Nullable String type, @NonNls @NotNull String value, boolean hasChildren) {
XValueNodePresentationConfigurator.setPresentation(icon, type, value, hasChildren, this);
}
@Override
public void setPresentation(@Nullable Icon icon, @NonNls @Nullable String type, @NonNls @NotNull String separator,
@NonNls @Nullable String value, boolean hasChildren) {
XValueNodePresentationConfigurator.setPresentation(icon, type, separator, value, hasChildren, this);
}
@Override
public void setPresentation(@Nullable Icon icon, @NotNull XValuePresentation presentation, boolean hasChildren) {
XValueNodePresentationConfigurator.setPresentation(icon, presentation, hasChildren, this);
}
@Override
public void applyPresentation(@Nullable Icon icon, @NotNull XValuePresentation valuePresentation, boolean hasChildren) {
// extra check for obsolete nodes - tree root was changed
// too dangerous to put this into isObsolete - it is called from anywhere, not only EDT
if (isObsolete()) return;
setIcon(icon);
myValuePresentation = valuePresentation;
myRawValue = XValuePresentationUtil.computeValueText(valuePresentation);
if (XDebuggerSettingsManager.getInstance().getDataViewSettings().isShowValuesInline()) {
updateInlineDebuggerData();
}
updateText();
setLeaf(!hasChildren);
fireNodeChanged();
myTree.nodeLoaded(this, myName);
}
public void updateInlineDebuggerData() {
try {
XDebugSession session = XDebugView.getSession(getTree());
final XSourcePosition debuggerPosition = session == null ? null : session.getCurrentPosition();
if (debuggerPosition == null) {
return;
}
final XInlineDebuggerDataCallback callback = new XInlineDebuggerDataCallback() {
@Override
public void computed(XSourcePosition position) {
if (isObsolete() || position == null) return;
VirtualFile file = position.getFile();
// filter out values from other files
if (!Comparing.equal(debuggerPosition.getFile(), file)) {
return;
}
final Document document = FileDocumentManager.getInstance().getDocument(file);
if (document == null) return;
XVariablesView.InlineVariablesInfo data = XVariablesView.InlineVariablesInfo.get(XDebugView.getSession(getTree()));
if (data == null) {
return;
}
if (!showAsInlay(file, position, debuggerPosition)) {
data.put(file, position, XValueNodeImpl.this, document.getModificationStamp());
myTree.updateEditor();
}
}
};
if (getValueContainer().computeInlineDebuggerData(callback) == ThreeState.UNSURE) {
getValueContainer().computeSourcePosition(callback::computed);
}
}
catch (Exception ignore) {
}
}
private boolean showAsInlay(VirtualFile file, XSourcePosition position, XSourcePosition debuggerPosition) {
if (!Registry.is("debugger.show.values.inplace")) return false;
if (!debuggerPosition.getFile().equals(position.getFile()) || debuggerPosition.getLine() != position.getLine()) return false;
XValue container = getValueContainer();
if (!(container instanceof XValueWithInlinePresentation)) return false;
String presentation = ((XValueWithInlinePresentation)container).computeInlinePresentation();
if (presentation == null) return false;
XDebuggerInlayUtil.createInlay(myTree.getProject(), file, position.getOffset(), presentation);
return true;
}
@Override
public void setFullValueEvaluator(@NotNull final XFullValueEvaluator fullValueEvaluator) {
invokeNodeUpdate(() -> {
myFullValueEvaluator = fullValueEvaluator;
fireNodeChanged();
});
}
public void clearFullValueEvaluator() {
myFullValueEvaluator = null;
}
private void updateText() {
myText.clear();
XValueMarkers<?, ?> markers = myTree.getValueMarkers();
if (markers != null) {
ValueMarkup markup = markers.getMarkup(getValueContainer());
if (markup != null) {
myText.append("[" + markup.getText() + "] ", new SimpleTextAttributes(SimpleTextAttributes.STYLE_BOLD, markup.getColor()));
}
}
appendName();
buildText(myValuePresentation, myText);
}
private void appendName() {
if (!StringUtil.isEmpty(myName)) {
SimpleTextAttributes attributes = myChanged ? XDebuggerUIConstants.CHANGED_VALUE_ATTRIBUTES : XDebuggerUIConstants.VALUE_NAME_ATTRIBUTES;
XValuePresentationUtil.renderValue(myName, myText, attributes, MAX_NAME_LENGTH, null);
}
}
public static void buildText(@NotNull XValuePresentation valuePresenter, @NotNull ColoredTextContainer text) {
buildText(valuePresenter, text, true);
}
public static void buildText(@NotNull XValuePresentation valuePresenter, @NotNull ColoredTextContainer text, boolean appendSeparator) {
if (appendSeparator) {
XValuePresentationUtil.appendSeparator(text, valuePresenter.getSeparator());
}
String type = valuePresenter.getType();
if (type != null) {
text.append("{" + type + "} ", XDebuggerUIConstants.TYPE_ATTRIBUTES);
}
valuePresenter.renderValue(new XValueTextRendererImpl(text));
}
@Override
public void markChanged() {
if (myChanged) return;
ApplicationManager.getApplication().assertIsDispatchThread();
myChanged = true;
if (myName != null && myValuePresentation != null) {
updateText();
fireNodeChanged();
}
}
@Nullable
public XFullValueEvaluator getFullValueEvaluator() {
return myFullValueEvaluator;
}
@Nullable
@Override
public XDebuggerTreeNodeHyperlink getLink() {
if (myFullValueEvaluator != null) {
return new XDebuggerTreeNodeHyperlink(myFullValueEvaluator.getLinkText()) {
@Override
public boolean alwaysOnScreen() {
return true;
}
@Override
public void onClick(MouseEvent event) {
if (myFullValueEvaluator.isShowValuePopup()) {
DebuggerUIUtil.showValuePopup(myFullValueEvaluator, event, myTree.getProject(), null);
}
else {
new HeadlessValueEvaluationCallback(XValueNodeImpl.this).startFetchingValue(myFullValueEvaluator);
}
event.consume();
}
};
}
return null;
}
@Override
@Nullable
public String getName() {
return myName;
}
@Nullable
public XValuePresentation getValuePresentation() {
return myValuePresentation;
}
@Override
@Nullable
public String getRawValue() {
return myRawValue;
}
@Override
public boolean isComputed() {
return myValuePresentation != null;
}
public void setValueModificationStarted() {
ApplicationManager.getApplication().assertIsDispatchThread();
myRawValue = null;
myText.clear();
appendName();
XValuePresentationUtil.appendSeparator(myText, myValuePresentation.getSeparator());
myText.append(XDebuggerUIConstants.MODIFYING_VALUE_MESSAGE, XDebuggerUIConstants.MODIFYING_VALUE_HIGHLIGHT_ATTRIBUTES);
setLeaf(true);
fireNodeStructureChanged();
}
@Override
public String toString() {
return getName();
}
}
| |
/*
* Copyright (C) 2004-2009 Jive Software. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.openfire.session;
import java.net.UnknownHostException;
import java.security.cert.Certificate;
import java.util.*;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import javax.net.ssl.SSLSession;
import org.jivesoftware.openfire.Connection;
import org.jivesoftware.openfire.SessionManager;
import org.jivesoftware.openfire.StreamID;
import org.jivesoftware.openfire.XMPPServer;
import org.jivesoftware.openfire.auth.UnauthorizedException;
import org.jivesoftware.openfire.interceptor.InterceptorManager;
import org.jivesoftware.openfire.interceptor.PacketRejectedException;
import org.jivesoftware.openfire.net.SocketConnection;
import org.jivesoftware.openfire.net.TLSStreamHandler;
import org.jivesoftware.openfire.streammanagement.StreamManager;
import org.jivesoftware.util.LocaleUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xmpp.packet.*;
/**
* The session represents a connection between the server and a client (c2s) or
* another server (s2s) as well as a connection with a component. Authentication and
* user accounts are associated with c2s connections while s2s has an optional authentication
* association but no single user user.<p>
*
* Obtain object managers from the session in order to access server resources.
*
* @author Gaston Dombiak
*/
public abstract class LocalSession implements Session {
private static final Logger Log = LoggerFactory.getLogger(LocalSession.class);
/**
* The utf-8 charset for decoding and encoding Jabber packet streams.
*/
protected static String CHARSET = "UTF-8";
/**
* The Address this session is authenticated as.
*/
private JID address;
/**
* The stream id for this session (random and unique).
*/
private StreamID streamID;
/**
* The current session status.
*/
protected int status = STATUS_CONNECTED;
/**
* The connection that this session represents.
*/
protected Connection conn;
protected SessionManager sessionManager;
private String serverName;
private long startDate = System.currentTimeMillis();
private long lastActiveDate;
private AtomicLong clientPacketCount = new AtomicLong( 0 );
private AtomicLong serverPacketCount = new AtomicLong( 0 );
/**
* Session temporary data.
*
* All data stored in this <code>Map</code> disappears when the session finishes.
*/
private final Map<String, Object> sessionData = new HashMap<>();
/**
* Software Version (XEP-0092) data as obtained from the peer on this connection.
*/
private Map<String, String> softwareVersionData = new HashMap<>();
/**
* XEP-0198 Stream Manager
*/
protected final StreamManager streamManager;
/**
* A lock to protect the connection changes.
*/
private final Lock lock = new ReentrantLock();
private final Locale language;
/**
* Creates a session with an underlying connection and permission protection.
*
* @param serverName domain of the XMPP server where the new session belongs.
* @param connection The connection we are proxying.
* @param streamID unique identifier for this session.
* @param language The language to use for this session.
*/
public LocalSession(String serverName, Connection connection, StreamID streamID, Locale language) {
if (connection == null) {
throw new IllegalArgumentException("connection must not be null");
}
conn = connection;
this.streamID = streamID;
this.serverName = serverName;
String id = streamID.getID();
this.address = new JID(null, serverName, id, true);
this.sessionManager = SessionManager.getInstance();
this.streamManager = new StreamManager(this);
this.language = language;
}
/**
* Returns true if the session is detached (that is, if the underlying connection
* has been closed while the session instance itself has not been closed).
*
* @return true if session detached
*/
public boolean isDetached() {
return this.sessionManager.isDetached(this);
}
/**
* Set the session to detached mode, indicating that the underlying connection
* has been closed.
*/
public void setDetached() {
lock.lock();
try {
Log.debug("Setting session with address {} and streamID {} in detached mode.", this.address, this.streamID );
this.sessionManager.addDetached(this);
this.conn = null;
}finally {
lock.unlock();
}
}
Connection releaseConnection()
{
lock.lock();
try {
Log.debug("Releasing connection from session with address {} and streamID {}.", this.address, this.streamID );
final Connection result = conn;
this.conn = null;
this.close();
return result;
} finally {
lock.unlock();
}
}
/**
* Reattach the (existing) session to the connection provided by a new session (a session that will be replaced
* by the older, pre-existing session). The connection must already be initialized as a running XML Stream, normally
* by having run through XEP-0198 resumption.
*
* @param connectionProvider Session from which to obtain the connection from.
* @param h the sequence number of the last handled stanza sent over the former stream
*/
public void reattach(LocalSession connectionProvider, long h) {
lock.lock();
try {
Log.debug("Reattaching session with address {} and streamID {} using connection from session with address {} and streamID {}.", this.address, this.streamID, connectionProvider.getAddress(), connectionProvider.getStreamID());
if (this.conn != null && !this.conn.isClosed())
{
this.conn.close();
}
this.conn = connectionProvider.releaseConnection();
this.conn.reinit(this);
}finally {
lock.unlock();
}
this.status = STATUS_AUTHENTICATED;
this.sessionManager.removeDetached(this);
this.streamManager.onResume(new JID(null, this.serverName, null, true), h);
this.sessionManager.removeSession((LocalClientSession)connectionProvider);
}
/**
* Obtain the address of the user. The address is used by services like the core
* server packet router to determine if a packet should be sent to the handler.
* Handlers that are working on behalf of the server should use the generic server
* hostname address (e.g. server.com).
*
* @return the address of the packet handler.
*/
@Override
public JID getAddress() {
return address;
}
/**
* Sets the new address of this session. The address is used by services like the core
* server packet router to determine if a packet should be sent to the handler.
* Handlers that are working on behalf of the server should use the generic server
* hostname address (e.g. server.com).
*
* @param address the new address of this session.
*/
public void setAddress(JID address){
this.address = address;
}
/**
* Returns the connection associated with this Session.
*
* @return The connection for this session
*/
public Connection getConnection() {
Connection connection = conn;
if (connection == null)
{
Log.error("Attempt to read connection of detached session with address {} and streamID {}: ", this.address, this.streamID, new IllegalStateException());
}
return connection;
}
/**
* Obtain the current status of this session.
*
* @return The status code for this session
*/
@Override
public int getStatus() {
return status;
}
/**
* Set the new status of this session. Setting a status may trigger
* certain events to occur (setting a closed status will close this
* session).
*
* @param status The new status code for this session
*/
public void setStatus(int status) {
if (status == STATUS_CLOSED && this.streamManager.getResume()) {
Log.debug( "Suppressing close for session with address {} and streamID {}.", this.address, this.streamID );
return;
}
this.status = status;
}
/**
* Obtain the stream ID associated with this sesison. Stream ID's are generated by the server
* and should be unique and random.
*
* @return This session's assigned stream ID
*/
@Override
public StreamID getStreamID() {
return streamID;
}
/**
* Obtain the name of the server this session belongs to.
*
* @return the server name.
*/
@Override
public String getServerName() {
return serverName;
}
/**
* Obtain the date the session was created.
*
* @return the session's creation date.
*/
@Override
public Date getCreationDate() {
return new Date(startDate);
}
/**
* Obtain the time the session last had activity.
*
* @return The last time the session received activity.
*/
@Override
public Date getLastActiveDate() {
return new Date(lastActiveDate);
}
/**
* Increments the number of packets sent from the client to the server.
*/
public void incrementClientPacketCount() {
clientPacketCount.incrementAndGet();
lastActiveDate = System.currentTimeMillis();
streamManager.incrementServerProcessedStanzas();
}
/**
* Increments the number of packets sent from the server to the client.
*/
public void incrementServerPacketCount() {
serverPacketCount.incrementAndGet();
lastActiveDate = System.currentTimeMillis();
}
/**
* Obtain the number of packets sent from the client to the server.
*
* @return The number of packets sent from the client to the server.
*/
@Override
public long getNumClientPackets() {
return clientPacketCount.get();
}
/**
* Obtain the number of packets sent from the server to the client.
*
* @return The number of packets sent from the server to the client.
*/
@Override
public long getNumServerPackets() {
return serverPacketCount.get();
}
/**
* Saves given session data. Data are saved to temporary storage only and are accessible during
* this session life only and only from this session instance.
*
* @param key a <code>String</code> value of stored data key ID.
* @param value a <code>Object</code> value of data stored in session.
* @return the previous value associated with {@code key}, or
* {@code null} if there was no mapping for {@code key}.
* (A {@code null} return can also indicate that the map
* previously associated {@code null} with {@code key}.)
* @see #getSessionData(String)
*/
public Object setSessionData(String key, Object value) {
synchronized (sessionData) {
return sessionData.put(key, value);
}
}
/**
* Retrieves session data. This method gives access to temporary session data only. You can
* retrieve earlier saved data giving key ID to receive needed value. Please see
* {@link #setSessionData(String, Object)} description for more details.
*
* @param key a <code>String</code> value of stored data ID.
* @return a <code>Object</code> value of data for given key.
* @see #setSessionData(String, Object)
*/
public Object getSessionData(String key) {
synchronized (sessionData) {
return sessionData.get(key);
}
}
/**
* Removes session data. Please see {@link #setSessionData(String, Object)} description
* for more details.
*
* @param key a <code>String</code> value of stored data ID.
* @return the previous value associated with {@code key}, or
* {@code null} if there was no mapping for {@code key}.
* @see #setSessionData(String, Object)
*/
public Object removeSessionData(String key) {
synchronized (sessionData) {
return sessionData.remove(key);
}
}
/**
* Get XEP-0198 Stream manager for session
* @return The StreamManager for the session.
*/
public StreamManager getStreamManager() {
return streamManager;
}
@Override
public void process(Packet packet) {
// Check that the requested packet can be processed
if (canProcess(packet)) {
// Perform the actual processing of the packet. This usually implies sending
// the packet to the entity
try {
// Invoke the interceptors before we send the packet
InterceptorManager.getInstance().invokeInterceptors(packet, this, false, false);
deliver(packet);
// Invoke the interceptors after we have sent the packet
InterceptorManager.getInstance().invokeInterceptors(packet, this, false, true);
}
catch (PacketRejectedException e) {
// An interceptor rejected the packet so do nothing
}
catch (Exception e) {
Log.error(LocaleUtils.getLocalizedString("admin.error"), e);
}
} else {
// http://xmpp.org/extensions/xep-0016.html#protocol-error
if (packet instanceof Message) {
// For message stanzas, the server SHOULD return an error, which SHOULD be <service-unavailable/>.
if (((Message)packet).getType() == Message.Type.error){
Log.debug("Avoid generating an error in response to a stanza that itself is an error (to avoid the chance of entering an endless back-and-forth of exchanging errors). Suppress sending an {} error in response to: {}", PacketError.Condition.service_unavailable, packet);
return;
}
Message message = (Message) packet;
Message result = message.createCopy();
result.setTo(message.getFrom());
result.setFrom(message.getTo());
result.setError(PacketError.Condition.service_unavailable);
XMPPServer.getInstance().getPacketRouter().route(result);
} else if (packet instanceof IQ) {
// For IQ stanzas of type "get" or "set", the server MUST return an error, which SHOULD be <service-unavailable/>.
// IQ stanzas of other types MUST be silently dropped by the server.
IQ iq = (IQ) packet;
if (iq.getType() == IQ.Type.get || iq.getType() == IQ.Type.set) {
IQ result = IQ.createResultIQ(iq);
result.setError(PacketError.Condition.service_unavailable);
XMPPServer.getInstance().getPacketRouter().route(result);
}
}
}
}
/**
* Returns true if the specified packet can be delivered to the entity. Subclasses will use different
* criterias to determine of processing is allowed or not. For instance, client sessions will use
* privacy lists while outgoing server sessions will always allow this action.
*
* @param packet the packet to analyze if it must be blocked.
* @return true if the specified packet must be blocked.
*/
abstract boolean canProcess(Packet packet);
abstract void deliver(Packet packet) throws UnauthorizedException;
@Override
public void deliverRawText(String text) {
Connection connection = conn;
if (connection == null )
{
Log.debug( "Unable to deliver raw text in session with address {} and streamID {}, as its connection is null. Dropping: {}", this.address, this.streamID, text );
return;
}
connection.deliverRawText(text);
}
/**
* Returns a text with the available stream features. Each subclass may return different
* values depending whether the session has been authenticated or not.
*
* @return a text with the available stream features or {@code null} to add nothing.
*/
public abstract String getAvailableStreamFeatures();
@Override
public void close() {
Optional.ofNullable(conn)
.ifPresent(Connection::close);
}
@Override
public boolean validate() {
return Optional.ofNullable(conn)
.map(Connection::validate)
.orElse(Boolean.FALSE);
}
@Override
public boolean isSecure() {
return Optional.ofNullable(conn)
.map(Connection::isSecure)
.orElse(Boolean.FALSE);
}
@Override
public Certificate[] getPeerCertificates() {
return Optional.ofNullable(conn)
.map(Connection::getPeerCertificates)
.orElse(new Certificate[0]);
}
@Override
public boolean isClosed() {
return Optional.ofNullable(conn)
.map(Connection::isClosed)
.orElse(Boolean.TRUE);
}
@Override
public String getHostAddress() throws UnknownHostException {
Connection connection = conn;
if (connection == null) {
throw new UnknownHostException("Detached session");
}
return connection.getHostAddress();
}
@Override
public String getHostName() throws UnknownHostException {
Connection connection = conn;
if (connection == null) {
throw new UnknownHostException("Detached session");
}
return connection.getHostName();
}
@Override
public String toString()
{
return this.getClass().getSimpleName() +"{" +
"address=" + getAddress() +
", streamID=" + getStreamID() +
", status=" + getStatus() +
(getStatus() == STATUS_AUTHENTICATED ? " (authenticated)" : "" ) +
(getStatus() == STATUS_CONNECTED ? " (connected)" : "" ) +
(getStatus() == STATUS_CLOSED ? " (closed)" : "" ) +
", isSecure=" + isSecure() +
", isDetached=" + isDetached() +
", serverName='" + getServerName() + '\'' +
'}';
}
protected static int[] decodeVersion(String version) {
int[] answer = new int[] {0 , 0};
String [] versionString = version.split("\\.");
answer[0] = Integer.parseInt(versionString[0]);
answer[1] = Integer.parseInt(versionString[1]);
return answer;
}
/**
* Returns true if the other peer of this session presented a self-signed certificate. When
* using self-signed certificate for server-2-server sessions then SASL EXTERNAL will not be
* used and instead server-dialback will be preferred for vcerifying the identify of the remote
* server.
*
* @return true if the other peer of this session presented a self-signed certificate.
*/
public boolean isUsingSelfSignedCertificate() {
return Optional.ofNullable(conn)
.map(Connection::isUsingSelfSignedCertificate)
.orElse(Boolean.FALSE);
}
/**
* Returns a String representing the Cipher Suite Name, or "NONE".
* @return String
*/
@Override
public String getCipherSuiteName() {
SocketConnection s = (SocketConnection)getConnection();
if (s != null) {
TLSStreamHandler t = s.getTLSStreamHandler();
if (t != null) {
SSLSession ssl = t.getSSLSession();
if (ssl != null) {
return ssl.getCipherSuite();
}
}
}
return "NONE";
}
@Override
public final Locale getLanguage() {
return language;
}
/**
* Retrieves Software Version data. This method gives access to temporary Software Version data only.
* @return a Map collection value of data .
*/
@Override
public Map<String, String> getSoftwareVersion() {
return softwareVersionData;
}
/**
* Saves given session data. Data is saved to temporary storage only and is accessible during
* this session life only and only from this session instance.
* @param key a <code>String</code> value of stored data key ID.
* @param value a <code>String</code> value of data stored in session.
*/
public void setSoftwareVersionData(String key, String value) {
softwareVersionData.put(key, value);
}
}
| |
package crazypants.enderzoo.entity;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
import javax.annotation.Nullable;
import crazypants.enderzoo.EnderZoo;
import crazypants.enderzoo.config.Config;
import crazypants.enderzoo.vec.VecUtil;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.Entity;
import net.minecraft.entity.SharedMonsterAttributes;
import net.minecraft.entity.ai.EntityAIAttackMelee;
import net.minecraft.entity.ai.EntityAIHurtByTarget;
import net.minecraft.entity.ai.EntityAILookIdle;
import net.minecraft.entity.ai.EntityAINearestAttackableTarget;
import net.minecraft.entity.ai.EntityAISwimming;
import net.minecraft.entity.ai.EntityAIWander;
import net.minecraft.entity.ai.EntityAIWatchClosest;
import net.minecraft.entity.ai.attributes.AttributeModifier;
import net.minecraft.entity.ai.attributes.IAttributeInstance;
import net.minecraft.entity.monster.EntityCreeper;
import net.minecraft.entity.monster.EntityMob;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Blocks;
import net.minecraft.init.Items;
import net.minecraft.init.SoundEvents;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.network.datasync.DataParameter;
import net.minecraft.network.datasync.DataSerializers;
import net.minecraft.network.datasync.EntityDataManager;
import net.minecraft.util.DamageSource;
import net.minecraft.util.EntityDamageSource;
import net.minecraft.util.EntityDamageSourceIndirect;
import net.minecraft.util.EnumParticleTypes;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.SoundCategory;
import net.minecraft.util.SoundEvent;
import net.minecraft.util.math.AxisAlignedBB;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.math.MathHelper;
import net.minecraft.util.math.Vec3d;
import net.minecraft.world.World;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.event.entity.living.EnderTeleportEvent;
public class EntityEnderminy extends EntityMob implements IEnderZooMob {
public static final String NAME = "enderminy";
public static final int EGG_BG_COL = 0x27624D;
public static final int EGG_FG_COL = 0x212121;
private static final int MAX_RND_TP_DISTANCE = 32;
// private static final int SCREAMING_INDEX = 30;
private static final DataParameter<Boolean> SCREAMING_INDEX = EntityDataManager.<Boolean>createKey(EntityEnderminy.class, DataSerializers.BOOLEAN);
private static final UUID attackingSpeedBoostModifierUUID = UUID.fromString("020E0DFB-87AE-4653-9556-831010E291B0");
private static final AttributeModifier attackingSpeedBoostModifier = (new AttributeModifier(attackingSpeedBoostModifierUUID, "Attacking speed boost",
6.2, 0)).setSaved(false);
private boolean isAggressive;
private boolean attackIfLookingAtPlayer = Config.enderminyAttacksPlayerOnSight;
private boolean attackCreepers = Config.enderminyAttacksCreepers;
private boolean groupAgroEnabled = Config.enderminyGroupAgro;
public EntityEnderminy(World world) {
super(world);
setSize(0.6F * 0.5F, 2.9F * 0.25F);
stepHeight = 1.0F;
tasks.addTask(0, new EntityAISwimming(this));
tasks.addTask(2, new EntityAIAttackMelee(this, 1.0D, false));
tasks.addTask(7, new EntityAIWander(this, 1.0D));
tasks.addTask(8, new EntityAIWatchClosest(this, EntityPlayer.class, 8.0F));
tasks.addTask(8, new EntityAILookIdle(this));
targetTasks.addTask(1, new EntityAIHurtByTarget(this, false, new Class[0]));
if(attackIfLookingAtPlayer) {
targetTasks.addTask(2, new AIFindPlayer());
}
if(attackCreepers) {
targetTasks.addTask(2, new EntityAINearestAttackableTarget<EntityCreeper>(this, EntityCreeper.class, true, true));
}
}
@Override
protected boolean isValidLightLevel() {
return Config.enderminySpawnInLitAreas ? true : super.isValidLightLevel();
}
@Override
protected void applyEntityAttributes() {
super.applyEntityAttributes();
getEntityAttribute(SharedMonsterAttributes.MOVEMENT_SPEED).setBaseValue(0.3);
MobInfo.ENDERMINY.applyAttributes(this);
}
@Override
protected void entityInit() {
super.entityInit();
dataManager.register(SCREAMING_INDEX, Boolean.valueOf(false));
}
@Override
public boolean getCanSpawnHere() {
boolean passedGrassCheck = true;
if(Config.enderminySpawnOnlyOnGrass) {
int i = MathHelper.floor(posX);
int j = MathHelper.floor(getEntityBoundingBox().minY);
int k = MathHelper.floor(posZ);
passedGrassCheck = world.getBlockState(VecUtil.bpos(i, j - 1, k)).getBlock() == Blocks.GRASS;
}
return passedGrassCheck && posY > Config.enderminyMinSpawnY && super.getCanSpawnHere();
}
/**
* Checks to see if this enderman should be attacking this player
*/
private boolean shouldAttackPlayer(EntityPlayer player) {
ItemStack itemstack = player.inventory.armorInventory.get(3);
// 3: Helmet, 2: Chestpiece, 1: Legs, 0: Boots
if(itemstack != null && itemstack.getItem() == Item.getItemFromBlock(Blocks.PUMPKIN)) {
return false;
} else {
Vec3d relativePlayerEyePos = new Vec3d(
posX - player.posX,
getEntityBoundingBox().minY + height / 2.0F - (player.posY + player.getEyeHeight()),
posZ - player.posZ);
double distance = relativePlayerEyePos.lengthVector();
relativePlayerEyePos = relativePlayerEyePos.normalize();
//NB: inverse of normal enderman, attack when this guy looks at the player instead of the other
//way around
Vec3d lookVec = getLook(1.0F).normalize();
double dotTangent = -lookVec.dotProduct(relativePlayerEyePos);
return dotTangent > 1.0D - 0.025D / distance;
}
}
public void onLivingUpdate() {
if(this.world.isRemote) {
for (int i = 0; i < 2; ++i) {
this.world.spawnParticle(EnumParticleTypes.PORTAL, this.posX + (this.rand.nextDouble() - 0.5D) * (double) this.width,
this.posY + this.rand.nextDouble() * (double) this.height - 0.25D, this.posZ + (this.rand.nextDouble() - 0.5D) * (double) this.width,
(this.rand.nextDouble() - 0.5D) * 2.0D, -this.rand.nextDouble(), (this.rand.nextDouble() - 0.5D) * 2.0D, new int[0]);
}
}
isJumping = false;
super.onLivingUpdate();
}
protected void updateAITasks() {
if(isWet()) {
attackEntityFrom(DamageSource.DROWN, 1.0F);
}
if(isScreaming() && !isAggressive && rand.nextInt(100) == 0) {
setScreaming(false);
}
super.updateAITasks();
}
protected boolean teleportRandomly(int distance) {
double d0 = posX + (rand.nextDouble() - 0.5D) * distance;
double d1 = posY + rand.nextInt(distance + 1) - distance / 2;
double d2 = posZ + (rand.nextDouble() - 0.5D) * distance;
return teleportTo(d0, d1, d2);
}
protected boolean teleportRandomly() {
return teleportRandomly(MAX_RND_TP_DISTANCE);
}
protected boolean teleportToEntity(Entity p_70816_1_) {
Vec3d vec3 = new Vec3d(posX - p_70816_1_.posX, getEntityBoundingBox().minY + height / 2.0F - p_70816_1_.posY
+ p_70816_1_.getEyeHeight(), posZ - p_70816_1_.posZ);
vec3 = vec3.normalize();
double d0 = 16.0D;
double d1 = posX + (rand.nextDouble() - 0.5D) * 8.0D - vec3.x * d0;
double d2 = posY + (rand.nextInt(16) - 8) - vec3.y * d0;
double d3 = posZ + (rand.nextDouble() - 0.5D) * 8.0D - vec3.z * d0;
return teleportTo(d1, d2, d3);
}
protected boolean teleportTo(double x, double y, double z) {
EnderTeleportEvent event = new EnderTeleportEvent(this, x, y, z, 0);
if(MinecraftForge.EVENT_BUS.post(event)) {
return false;
}
double d3 = posX;
double d4 = posY;
double d5 = posZ;
posX = event.getTargetX();
posY = event.getTargetY();
posZ = event.getTargetZ();
int xInt = MathHelper.floor(posX);
int yInt = MathHelper.floor(posY);
int zInt = MathHelper.floor(posZ);
boolean flag = false;
if(world.isBlockLoaded(new BlockPos(xInt, yInt, zInt))) {
boolean foundGround = false;
while (!foundGround && yInt > 0) {
IBlockState bs = world.getBlockState(new BlockPos(xInt, yInt - 1, zInt));
if(bs.getMaterial().blocksMovement()) {
foundGround = true;
} else {
--posY;
--yInt;
}
}
if(foundGround) {
setPosition(posX, posY, posZ);
if(world.getCollisionBoxes(this, getEntityBoundingBox()).isEmpty() && !world.containsAnyLiquid(getEntityBoundingBox())) {
flag = true;
}
}
}
if(!flag) {
setPosition(d3, d4, d5);
return false;
}
short short1 = 128;
for (int l = 0; l < short1; ++l) {
double d6 = l / (short1 - 1.0D);
float f = (rand.nextFloat() - 0.5F) * 0.2F;
float f1 = (rand.nextFloat() - 0.5F) * 0.2F;
float f2 = (rand.nextFloat() - 0.5F) * 0.2F;
double d7 = d3 + (posX - d3) * d6 + (rand.nextDouble() - 0.5D) * width * 2.0D;
double d8 = d4 + (posY - d4) * d6 + rand.nextDouble() * height;
double d9 = d5 + (posZ - d5) * d6 + (rand.nextDouble() - 0.5D) * width * 2.0D;
world.spawnParticle(EnumParticleTypes.PORTAL, d7, d8, d9, f, f1, f2);
}
world.playSound(d3, d4, d5, SoundEvents.ENTITY_ENDERMEN_TELEPORT, SoundCategory.NEUTRAL, 1.0F, 1.0F, false);
playSound(SoundEvents.ENTITY_ENDERMEN_TELEPORT, 1.0F, 1.0F);
return true;
}
@Override
protected SoundEvent getAmbientSound() {
return isScreaming() ? SoundEvents.ENTITY_ENDERMEN_SCREAM : SoundEvents.ENTITY_ENDERMEN_AMBIENT;
}
@Override
protected SoundEvent getHurtSound(DamageSource source) {
return SoundEvents.ENTITY_ENDERMEN_HURT;
}
@Override
protected SoundEvent getDeathSound() {
return SoundEvents.ENTITY_ENDERMEN_DEATH;
}
@Override
protected Item getDropItem() {
return Items.ENDER_PEARL;
}
@Override
@Nullable
protected ResourceLocation getLootTable() {
return null; // use getDropItem() instead
}
@Override
protected void dropFewItems(boolean hitByPlayer, int looting) {
Item item = getDropItem();
if(item != null) {
int numItems = rand.nextInt(2 + looting);
for (int i = 0; i < numItems; ++i) {
if(rand.nextFloat() <= 0.5) {
dropItem(EnderZoo.itemEnderFragment, 1);
}
dropItem(item, 1);
}
}
}
/**
* Called when the entity is attacked.
*/
@Override
public boolean attackEntityFrom(DamageSource damageSource, float p_70097_2_) {
if(isEntityInvulnerable(damageSource)) {
return false;
}
setScreaming(true);
if(damageSource instanceof EntityDamageSourceIndirect) {
isAggressive = false;
for (int i = 0; i < 64; ++i) {
if(teleportRandomly()) {
return true;
}
}
return super.attackEntityFrom(damageSource, p_70097_2_);
}
boolean res = super.attackEntityFrom(damageSource, p_70097_2_);
if(damageSource instanceof EntityDamageSource && damageSource.getTrueSource() instanceof EntityPlayer &&
getHealth() > 0
//&& !ItemDarkSteelSword.isEquippedAndPowered((EntityPlayer) damageSource.getEntity(), 1)) {
) {
isAggressive = true;
if(rand.nextInt(3) == 0) {
for (int i = 0; i < 64; ++i) {
if(teleportRandomly(16)) {
setAttackTarget((EntityPlayer) damageSource.getTrueSource());
doGroupArgo();
return true;
}
}
}
}
if(res) {
doGroupArgo();
}
return res;
}
private void doGroupArgo() {
if(!groupAgroEnabled) {
return;
}
if(!(getAttackTarget() instanceof EntityPlayer)) {
return;
}
int range = 16;
AxisAlignedBB bb = new AxisAlignedBB(posX - range, posY - range, posZ - range, posX + range, posY + range, posZ + range);
List<EntityEnderminy> minies = world.getEntitiesWithinAABB(EntityEnderminy.class, bb);
if(minies != null && !minies.isEmpty()) {
for (EntityEnderminy miny : minies) {
if(miny.getAttackTarget() == null) { //&& miny.canEntityBeSeen(this)) {
miny.setAttackTarget(getAttackTarget());
}
}
}
}
public boolean isScreaming() {
return dataManager.get(SCREAMING_INDEX);
}
public void setScreaming(boolean p_70819_1_) {
dataManager.set(SCREAMING_INDEX, Boolean.valueOf(p_70819_1_));
}
// private final class ClosestEntityComparator implements Comparator<EntityCreeper> {
//
// Vec3 pos = new Vec3(0, 0, 0);
//
// @Override
// public int compare(EntityCreeper o1, EntityCreeper o2) {
// pos = new Vec3(posX, posY, posZ);
// double d1 = distanceSquared(o1.posX, o1.posY, o1.posZ, pos);
// double d2 = distanceSquared(o2.posX, o2.posY, o2.posZ, pos);
// return Double.compare(d1, d2);
// }
// }
//
// public double distanceSquared(double x, double y, double z, Vec3 v2) {
// double dx, dy, dz;
// dx = x - v2.xCoord;
// dy = y - v2.yCoord;
// dz = z - v2.zCoord;
// return (dx * dx + dy * dy + dz * dz);
// }
class AIFindPlayer extends EntityAINearestAttackableTarget<EntityPlayer> {
private EntityPlayer targetPlayer;
private int stareTimer;
private int teleportDelay;
private EntityEnderminy enderminy = EntityEnderminy.this;
public AIFindPlayer() {
super(EntityEnderminy.this, EntityPlayer.class, true);
}
/**
* Returns whether the EntityAIBase should begin execution.
*/
public boolean shouldExecute() {
double d0 = getTargetDistance();
List<EntityPlayer> list = taskOwner.world.getEntitiesWithinAABB(EntityPlayer.class, taskOwner.getEntityBoundingBox().expand(d0, 4.0D, d0), targetEntitySelector);
Collections.sort(list, this.sorter);
if(list.isEmpty()) {
return false;
} else {
targetPlayer = (EntityPlayer) list.get(0);
return true;
}
}
/**
* Execute a one shot task or start executing a continuous task
*/
public void startExecuting() {
stareTimer = 5;
teleportDelay = 0;
}
/**
* Resets the task
*/
public void resetTask() {
targetPlayer = null;
enderminy.setScreaming(false);
IAttributeInstance iattributeinstance = enderminy.getEntityAttribute(SharedMonsterAttributes.MOVEMENT_SPEED);
iattributeinstance.removeModifier(EntityEnderminy.attackingSpeedBoostModifier);
super.resetTask();
}
/**
* Returns whether an in-progress EntityAIBase should continue executing
*/
@Override
public boolean shouldContinueExecuting() {
if(targetPlayer != null) {
if(!enderminy.shouldAttackPlayer(targetPlayer)) {
return false;
} else {
enderminy.isAggressive = true;
enderminy.faceEntity(targetPlayer, 10.0F, 10.0F);
return true;
}
} else {
return super.shouldContinueExecuting();
}
}
/**
* Updates the task
*/
public void updateTask() {
if(targetPlayer != null) {
if(--stareTimer <= 0) {
targetEntity = targetPlayer;
targetPlayer = null;
super.startExecuting();
enderminy.playSound(SoundEvents.ENTITY_ENDERMEN_STARE, 1.0F, 1.0F);
enderminy.setScreaming(true);
IAttributeInstance iattributeinstance = enderminy.getEntityAttribute(SharedMonsterAttributes.MOVEMENT_SPEED);
iattributeinstance.applyModifier(EntityEnderminy.attackingSpeedBoostModifier);
}
} else {
if(targetEntity != null) {
if(targetEntity instanceof EntityPlayer && enderminy.shouldAttackPlayer((EntityPlayer) this.targetEntity)) {
if(targetEntity.getDistanceSqToEntity(enderminy) < 16.0D) {
enderminy.teleportRandomly();
}
teleportDelay = 0;
} else if(targetEntity.getDistanceSqToEntity(enderminy) > 256.0D && this.teleportDelay++ >= 30 && enderminy.teleportToEntity(targetEntity)) {
teleportDelay = 0;
}
}
super.updateTask();
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.console.demo.service;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.cache.CacheAtomicityMode;
import org.apache.ignite.cache.QueryEntity;
import org.apache.ignite.cache.QueryIndex;
import org.apache.ignite.cache.QueryIndexType;
import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
import org.apache.ignite.cache.query.annotations.QuerySqlFunction;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.console.demo.AgentDemoUtils;
import org.apache.ignite.console.demo.model.Car;
import org.apache.ignite.console.demo.model.Country;
import org.apache.ignite.console.demo.model.Department;
import org.apache.ignite.console.demo.model.Employee;
import org.apache.ignite.console.demo.model.Parking;
import org.apache.ignite.resources.IgniteInstanceResource;
import org.apache.ignite.services.Service;
import org.apache.ignite.services.ServiceContext;
import org.apache.ignite.transactions.Transaction;
import static org.apache.ignite.transactions.TransactionConcurrency.PESSIMISTIC;
import static org.apache.ignite.transactions.TransactionIsolation.REPEATABLE_READ;
/**
* Demo service. Create and populate caches. Run demo load on caches.
*/
public class DemoCachesLoadService implements Service {
/** Ignite instance. */
@IgniteInstanceResource
private Ignite ignite;
/** Thread pool to execute cache load operations. */
private ScheduledExecutorService cachePool;
/** */
private static final String COUNTRY_CACHE_NAME = "CountryCache";
/** */
private static final String DEPARTMENT_CACHE_NAME = "DepartmentCache";
/** */
private static final String EMPLOYEE_CACHE_NAME = "EmployeeCache";
/** */
private static final String PARKING_CACHE_NAME = "ParkingCache";
/** */
public static final String CAR_CACHE_NAME = "CarCache";
/** */
static final Set<String> DEMO_CACHES = new HashSet<>(Arrays.asList(COUNTRY_CACHE_NAME,
DEPARTMENT_CACHE_NAME, EMPLOYEE_CACHE_NAME, PARKING_CACHE_NAME, CAR_CACHE_NAME));
/** Countries count. */
private static final int CNTR_CNT = 10;
/** Departments count */
private static final int DEP_CNT = 100;
/** Employees count. */
private static final int EMPL_CNT = 1000;
/** Countries count. */
private static final int CAR_CNT = 100;
/** Departments count */
private static final int PARK_CNT = 10;
/** */
private static final Random rnd = new Random();
/** Maximum count read/write key. */
private final int cnt;
/** Time range in milliseconds. */
private final long range;
/**
* @param cnt Maximum count read/write key.
*/
public DemoCachesLoadService(int cnt) {
this.cnt = cnt;
range = new java.util.Date().getTime();
}
/** {@inheritDoc} */
@Override public void cancel(ServiceContext ctx) {
if (cachePool != null)
cachePool.shutdownNow();
}
/** {@inheritDoc} */
@Override public void init(ServiceContext ctx) throws Exception {
ignite.getOrCreateCaches(Arrays.asList(
cacheCountry(), cacheDepartment(), cacheEmployee(), cacheCar(), cacheParking()
));
populateCacheEmployee();
populateCacheCar();
cachePool = AgentDemoUtils.newScheduledThreadPool(2, "demo-sql-load-cache-tasks");
}
/** {@inheritDoc} */
@Override public void execute(ServiceContext ctx) throws Exception {
cachePool.scheduleWithFixedDelay(new Runnable() {
@Override public void run() {
try {
IgniteCache<Integer, Employee> cacheEmployee = ignite.cache(EMPLOYEE_CACHE_NAME);
if (cacheEmployee != null)
try (Transaction tx = ignite.transactions().txStart(PESSIMISTIC, REPEATABLE_READ)) {
for (int i = 0, n = 1; i < cnt; i++, n++) {
Integer id = rnd.nextInt(EMPL_CNT);
Integer depId = rnd.nextInt(DEP_CNT);
double r = rnd.nextDouble();
cacheEmployee.put(id, new Employee(id, depId, depId, "First name employee #" + n,
"Last name employee #" + n, "Email employee #" + n, "Phone number employee #" + n,
new java.sql.Date((long)(r * range)), "Job employee #" + n,
500 + AgentDemoUtils.round(r * 2000, 2)));
if (rnd.nextBoolean())
cacheEmployee.remove(rnd.nextInt(EMPL_CNT));
cacheEmployee.get(rnd.nextInt(EMPL_CNT));
}
if (rnd.nextInt(100) > 20)
tx.commit();
}
}
catch (Throwable e) {
if (!e.getMessage().contains("cache is stopped"))
ignite.log().error("Cache write task execution error", e);
}
}
}, 10, 3, TimeUnit.SECONDS);
cachePool.scheduleWithFixedDelay(new Runnable() {
@Override public void run() {
try {
IgniteCache<Integer, Car> cache = ignite.cache(CAR_CACHE_NAME);
if (cache != null)
for (int i = 0; i < cnt; i++) {
Integer carId = rnd.nextInt(CAR_CNT);
cache.put(carId, new Car(carId, rnd.nextInt(PARK_CNT), "Car #" + (i + 1)));
if (rnd.nextBoolean())
cache.remove(rnd.nextInt(CAR_CNT));
}
}
catch (IllegalStateException ignored) {
// No-op.
}
catch (Throwable e) {
if (!e.getMessage().contains("cache is stopped"))
ignite.log().error("Cache write task execution error", e);
}
}
}, 10, 3, TimeUnit.SECONDS);
}
/**
* Create base cache configuration.
*
* @param name cache name.
* @return Cache configuration with basic properties set.
*/
private static CacheConfiguration cacheConfiguration(String name) {
CacheConfiguration ccfg = new CacheConfiguration<>(name);
ccfg.setAffinity(new RendezvousAffinityFunction(false, 32));
ccfg.setQueryDetailMetricsSize(10);
ccfg.setStatisticsEnabled(true);
ccfg.setSqlFunctionClasses(SQLFunctions.class);
ccfg.setDataRegionName("demo");
return ccfg;
}
/**
* Configure cacheCountry.
*/
private static CacheConfiguration cacheCountry() {
CacheConfiguration ccfg = cacheConfiguration(COUNTRY_CACHE_NAME);
// Configure cacheCountry types.
Collection<QueryEntity> qryEntities = new ArrayList<>();
// COUNTRY.
QueryEntity type = new QueryEntity();
qryEntities.add(type);
type.setKeyType(Integer.class.getName());
type.setValueType(Country.class.getName());
// Query fields for COUNTRY.
LinkedHashMap<String, String> qryFlds = new LinkedHashMap<>();
qryFlds.put("id", "java.lang.Integer");
qryFlds.put("name", "java.lang.String");
qryFlds.put("population", "java.lang.Integer");
type.setFields(qryFlds);
ccfg.setQueryEntities(qryEntities);
return ccfg;
}
/**
* Configure cacheEmployee.
*/
private static CacheConfiguration cacheDepartment() {
CacheConfiguration ccfg = cacheConfiguration(DEPARTMENT_CACHE_NAME);
// Configure cacheDepartment types.
Collection<QueryEntity> qryEntities = new ArrayList<>();
// DEPARTMENT.
QueryEntity type = new QueryEntity();
qryEntities.add(type);
type.setKeyType(Integer.class.getName());
type.setValueType(Department.class.getName());
// Query fields for DEPARTMENT.
LinkedHashMap<String, String> qryFlds = new LinkedHashMap<>();
qryFlds.put("id", "java.lang.Integer");
qryFlds.put("countryId", "java.lang.Integer");
qryFlds.put("name", "java.lang.String");
type.setFields(qryFlds);
// Indexes for DEPARTMENT.
ArrayList<QueryIndex> indexes = new ArrayList<>();
indexes.add(new QueryIndex("countryId", QueryIndexType.SORTED, false, "DEP_COUNTRY"));
type.setIndexes(indexes);
ccfg.setQueryEntities(qryEntities);
return ccfg;
}
/**
* Configure cacheEmployee.
*/
private static CacheConfiguration cacheEmployee() {
CacheConfiguration ccfg = cacheConfiguration(EMPLOYEE_CACHE_NAME);
ccfg.setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL);
ccfg.setBackups(1);
// Configure cacheEmployee types.
Collection<QueryEntity> qryEntities = new ArrayList<>();
// EMPLOYEE.
QueryEntity type = new QueryEntity();
qryEntities.add(type);
type.setKeyType(Integer.class.getName());
type.setValueType(Employee.class.getName());
// Query fields for EMPLOYEE.
LinkedHashMap<String, String> qryFlds = new LinkedHashMap<>();
qryFlds.put("id", "java.lang.Integer");
qryFlds.put("departmentId", "java.lang.Integer");
qryFlds.put("managerId", "java.lang.Integer");
qryFlds.put("firstName", "java.lang.String");
qryFlds.put("lastName", "java.lang.String");
qryFlds.put("email", "java.lang.String");
qryFlds.put("phoneNumber", "java.lang.String");
qryFlds.put("hireDate", "java.sql.Date");
qryFlds.put("job", "java.lang.String");
qryFlds.put("salary", "java.lang.Double");
type.setFields(qryFlds);
// Indexes for EMPLOYEE.
Collection<QueryIndex> indexes = new ArrayList<>();
indexes.add(new QueryIndex("departmentId", QueryIndexType.SORTED, false, "EMP_DEPARTMENT"));
indexes.add(new QueryIndex("managerId", QueryIndexType.SORTED, false, "EMP_MANAGER"));
QueryIndex idx = new QueryIndex();
idx.setName("EMP_NAMES");
idx.setIndexType(QueryIndexType.SORTED);
LinkedHashMap<String, Boolean> indFlds = new LinkedHashMap<>();
indFlds.put("firstName", Boolean.FALSE);
indFlds.put("lastName", Boolean.FALSE);
idx.setFields(indFlds);
indexes.add(idx);
indexes.add(new QueryIndex("salary", QueryIndexType.SORTED, false, "EMP_SALARY"));
type.setIndexes(indexes);
ccfg.setQueryEntities(qryEntities);
return ccfg;
}
/**
* Configure cacheEmployee.
*/
private static CacheConfiguration cacheParking() {
CacheConfiguration ccfg = cacheConfiguration(PARKING_CACHE_NAME);
// Configure cacheParking types.
Collection<QueryEntity> qryEntities = new ArrayList<>();
// PARKING.
QueryEntity type = new QueryEntity();
qryEntities.add(type);
type.setKeyType(Integer.class.getName());
type.setValueType(Parking.class.getName());
// Query fields for PARKING.
LinkedHashMap<String, String> qryFlds = new LinkedHashMap<>();
qryFlds.put("id", "java.lang.Integer");
qryFlds.put("name", "java.lang.String");
qryFlds.put("capacity", "java.lang.Integer");
type.setFields(qryFlds);
ccfg.setQueryEntities(qryEntities);
return ccfg;
}
/**
* Configure cacheEmployee.
*/
private static CacheConfiguration cacheCar() {
CacheConfiguration ccfg = cacheConfiguration(CAR_CACHE_NAME);
// Configure cacheCar types.
Collection<QueryEntity> qryEntities = new ArrayList<>();
// CAR.
QueryEntity type = new QueryEntity();
qryEntities.add(type);
type.setKeyType(Integer.class.getName());
type.setValueType(Car.class.getName());
// Query fields for CAR.
LinkedHashMap<String, String> qryFlds = new LinkedHashMap<>();
qryFlds.put("id", "java.lang.Integer");
qryFlds.put("parkingId", "java.lang.Integer");
qryFlds.put("name", "java.lang.String");
type.setFields(qryFlds);
// Indexes for CAR.
ArrayList<QueryIndex> indexes = new ArrayList<>();
indexes.add(new QueryIndex("parkingId", QueryIndexType.SORTED, false, "CAR_PARKING"));
type.setIndexes(indexes);
ccfg.setQueryEntities(qryEntities);
return ccfg;
}
/** */
private void populateCacheEmployee() {
if (ignite.log().isDebugEnabled())
ignite.log().debug("DEMO: Start employees population with data...");
IgniteCache<Integer, Country> cacheCountry = ignite.cache(COUNTRY_CACHE_NAME);
for (int i = 0, n = 1; i < CNTR_CNT; i++, n++)
cacheCountry.put(i, new Country(i, "Country #" + n, n * 10000000));
IgniteCache<Integer, Department> cacheDepartment = ignite.cache(DEPARTMENT_CACHE_NAME);
IgniteCache<Integer, Employee> cacheEmployee = ignite.cache(EMPLOYEE_CACHE_NAME);
for (int i = 0, n = 1; i < DEP_CNT; i++, n++) {
cacheDepartment.put(i, new Department(n, rnd.nextInt(CNTR_CNT), "Department #" + n));
double r = rnd.nextDouble();
cacheEmployee.put(i, new Employee(i, rnd.nextInt(DEP_CNT), null, "First name manager #" + n,
"Last name manager #" + n, "Email manager #" + n, "Phone number manager #" + n,
new java.sql.Date((long)(r * range)), "Job manager #" + n, 1000 + AgentDemoUtils.round(r * 4000, 2)));
}
for (int i = 0, n = 1; i < EMPL_CNT; i++, n++) {
Integer depId = rnd.nextInt(DEP_CNT);
double r = rnd.nextDouble();
cacheEmployee.put(i, new Employee(i, depId, depId, "First name employee #" + n,
"Last name employee #" + n, "Email employee #" + n, "Phone number employee #" + n,
new java.sql.Date((long)(r * range)), "Job employee #" + n, 500 + AgentDemoUtils.round(r * 2000, 2)));
}
if (ignite.log().isDebugEnabled())
ignite.log().debug("DEMO: Finished employees population.");
}
/** */
private void populateCacheCar() {
if (ignite.log().isDebugEnabled())
ignite.log().debug("DEMO: Start cars population...");
IgniteCache<Integer, Parking> cacheParking = ignite.cache(PARKING_CACHE_NAME);
for (int i = 0, n = 1; i < PARK_CNT; i++, n++)
cacheParking.put(i, new Parking(i, "Parking #" + n, n * 10));
IgniteCache<Integer, Car> cacheCar = ignite.cache(CAR_CACHE_NAME);
for (int i = 0, n = 1; i < CAR_CNT; i++, n++)
cacheCar.put(i, new Car(i, rnd.nextInt(PARK_CNT), "Car #" + n));
if (ignite.log().isDebugEnabled())
ignite.log().debug("DEMO: Finished cars population.");
}
/**
* Utility class with custom SQL functions.
*/
public static class SQLFunctions {
/**
* Sleep function to simulate long running queries.
*
* @param x Time to sleep.
* @return Return specified argument.
*/
@QuerySqlFunction
public static long sleep(long x) {
if (x >= 0)
try {
Thread.sleep(x);
}
catch (InterruptedException ignored) {
// No-op.
}
return x;
}
}
}
| |
package communitycommons;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.fileupload.util.LimitedInputStream;
import org.apache.commons.io.IOUtils;
import org.apache.pdfbox.Overlay;
import org.apache.pdfbox.exceptions.COSVisitorException;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDPage;
import org.apache.pdfbox.util.PDFMergerUtility;
import system.proxies.FileDocument;
import system.proxies.Language;
import com.google.common.collect.ImmutableMap;
import com.mendix.core.Core;
import com.mendix.core.CoreException;
import com.mendix.core.conf.RuntimeVersion;
import com.mendix.core.objectmanagement.member.MendixBoolean;
import com.mendix.integration.WebserviceException;
import com.mendix.logging.ILogNode;
import com.mendix.systemwideinterfaces.core.IContext;
import com.mendix.systemwideinterfaces.core.IMendixObject;
import com.mendix.systemwideinterfaces.core.ISession;
import com.mendix.systemwideinterfaces.core.IUser;
public class Misc
{
public abstract static class IterateCallback<T1, T2>
{
boolean start = false;
boolean stop = false;
private Iterator<T1> mapIter;
public abstract void hit(T1 key, T2 value) throws Exception;
public void exit() {
stop = true;
}
public void remove() {
mapIter.remove();
}
synchronized void runOn(Map<T1, T2> map) throws Exception {
if (start)
throw new IllegalMonitorStateException();
start = true;
try {
this.mapIter = map.keySet().iterator();
while ( mapIter.hasNext() )
{
T1 key = mapIter.next();
T2 value = map.get(key);
hit(key, value);
if (stop)
break;
}
}
finally {
//reset state to allow reuse, even when exceptions occur
mapIter = null;
stop = false;
start = false;
}
}
}
/**
* Because you cannot remove items from a map while iteration, this function is introduced.
* In the callback, you can use this.remove() or this.exit() to either remove or break the loop. Use return; to continue
* @throws Exception
*/
public static <A, B> void iterateMap(Map<A, B> map, IterateCallback<A, B> callback) throws Exception {
//http://marxsoftware.blogspot.com/2008/04/removing-entry-from-java-map-during.html
if (map == null || callback == null)
throw new IllegalArgumentException();
callback.runOn(map);
}
public static String getApplicationURL()
{
return Core.getConfiguration().getApplicationRootUrl();
}
public static String getRuntimeVersion()
{
RuntimeVersion runtimeVersion = RuntimeVersion.getInstance();
return runtimeVersion.toString();
}
public static void throwException(String message) throws UserThrownException
{
throw new UserThrownException(message);
}
public static void throwWebserviceException(String faultstring) throws WebserviceException {
throw new WebserviceException(WebserviceException.clientFaultCode, faultstring);
}
public static String retrieveURL(String url, String postdata) throws Exception
{
// Send data, appname
URLConnection conn = new URL(url).openConnection();
conn.setDoInput(true);
conn.setDoOutput(true);
conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
if (postdata != null) {
IOUtils.copy(new ByteArrayInputStream(postdata.getBytes("UTF-8")), conn.getOutputStream());
}
IOUtils.closeQuietly(conn.getOutputStream());
// Get the response
String result = new String(IOUtils.toString(conn.getInputStream()));
IOUtils.closeQuietly(conn.getInputStream());
return result;
}
public static Boolean duplicateFileDocument(IContext context, IMendixObject toClone, IMendixObject target) throws Exception
{
if (toClone == null || target == null)
throw new Exception("No file to clone or to clone into provided");
MendixBoolean hasContents = (MendixBoolean) toClone.getMember(context, FileDocument.MemberNames.HasContents.toString());
if (!hasContents.getValue(context))
return false;
InputStream inputStream = Core.getFileDocumentContent(context, toClone);
try {
Core.storeFileDocumentContent(context, target, (String) toClone.getValue(context, system.proxies.FileDocument.MemberNames.Name.toString()), inputStream);
return true;
} catch (Exception e) {
e.printStackTrace();
}
finally{
try {
if(inputStream != null)
inputStream.close();
} catch (Exception e) {
e.printStackTrace();
}
}
return true;
}
public static Boolean duplicateImage(IContext context, IMendixObject toClone, IMendixObject target, int thumbWidth, int thumbHeight) throws Exception
{
if (toClone == null || target == null)
throw new Exception("No file to clone or to clone into provided");
MendixBoolean hasContents = (MendixBoolean) toClone.getMember(context, FileDocument.MemberNames.HasContents.toString());
if (!hasContents.getValue(context))
return false;
InputStream inputStream = Core.getImage(context, toClone, false);
try {
Core.storeImageDocumentContent(context, target, inputStream, thumbWidth, thumbHeight);
return true;
} catch (Exception e) {
e.printStackTrace();
}
finally {
try {
if(inputStream!= null)
inputStream.close();
} catch (Exception e) {
e.printStackTrace();
}
}
return true;
}
public static Boolean storeURLToFileDocument(IContext context, String url, IMendixObject __document, String filename) throws Exception
{
if (__document == null || url == null || filename == null)
throw new Exception("No document, filename or URL provided");
final int MAX_REMOTE_FILESIZE = 1024 * 1024 * 200; //maxium of 200 MB
URL imageUrl = new URL(url);
URLConnection connection = imageUrl.openConnection();
//we connect in 20 seconds or not at all
connection.setConnectTimeout(20000);
connection.setReadTimeout(20000);
connection.connect();
//check on forehand the size of the remote file, we don't want to kill the server by providing a 3 terabyte image.
if (connection.getContentLength() > MAX_REMOTE_FILESIZE) { //maximum of 200 mb
throw new IllegalArgumentException("MxID: importing image, wrong filesize of remote url: " + connection.getContentLength()+ " (max: " + String.valueOf(MAX_REMOTE_FILESIZE)+ ")");
} else if (connection.getContentLength() < 0) {
// connection has not specified content length, wrap stream in a LimitedInputStream
LimitedInputStream limitStream = new LimitedInputStream(connection.getInputStream(), MAX_REMOTE_FILESIZE) {
@Override
protected void raiseError(long pSizeMax, long pCount) throws IOException {
throw new IllegalArgumentException("MxID: importing image, wrong filesize of remote url (max: " + String.valueOf(MAX_REMOTE_FILESIZE)+ ")");
}
};
Core.storeFileDocumentContent(context, __document, filename, limitStream);
} else {
// connection has specified correct content length, read the stream normally
//NB; stream is closed by the core
Core.storeFileDocumentContent(context, __document, filename, connection.getInputStream());
}
return true;
}
public static Long getFileSize(IContext context, IMendixObject document)
{
final int BUFFER_SIZE = 4096;
long size = 0;
if (context != null) {
InputStream inputStream = null;
byte[] buffer = new byte[BUFFER_SIZE];
try {
inputStream = Core.getFileDocumentContent(context, document);
int i;
while ((i = inputStream.read(buffer)) != -1)
size += i;
} catch (IOException e) {
Core.getLogger("FileUtil").error(
"Couldn't determine filesize of FileDocument '" + document.getId());
} finally {
IOUtils.closeQuietly(inputStream);
}
}
return size;
}
public static void delay(long delaytime) throws InterruptedException
{
Thread.sleep(delaytime);
}
public static IContext getContextFor(IContext context, String username, boolean sudoContext) {
if (username == null || username.isEmpty()) {
throw new RuntimeException("Assertion: No username provided");
}
// Session does not have a user when it's a scheduled event.
if (context.getSession().getUser() != null && username.equals(context.getSession().getUser().getName()))
{
return context;
}
else
{
ISession session = getSessionFor(context, username);
IContext c = session.createContext();
if (sudoContext) {
return c.getSudoContext();
}
return c;
}
}
private static ISession getSessionFor(IContext context, String username) {
ISession session = Core.getActiveSession(username);
if (session == null) {
IContext newContext = context.getSession().createContext().getSudoContext();
newContext.startTransaction();
try {
session = initializeSessionForUser(newContext, username);
} catch (CoreException e) {
newContext.rollbackTransAction();
throw new RuntimeException("Failed to initialize session for user: " + username + ": " + e.getMessage(), e);
} finally {
newContext.endTransaction();
}
}
return session;
}
private static ISession initializeSessionForUser(IContext context, String username) throws CoreException {
IUser user = Core.getUser(context, username);
if (user == null) {
throw new RuntimeException("Assertion: user with username '" + username + "' does not exist");
}
return Core.initializeSession(user, null);
}
public static Object executeMicroflowAsUser(IContext context,
String microflowName, String username, Boolean sudoContext, Object... args) throws Exception
{
if (context == null)
throw new Exception("Assertion: No context provided");
if (microflowName == null || microflowName.isEmpty())
throw new Exception("Assertion: No context provided");
if (!Core.getMicroflowNames().contains(microflowName))
throw new Exception("Assertion: microflow not found: " + microflowName);
if (args.length % 2 != 0)
throw new Exception("Assertion: odd number of dynamic arguments provided, please name every argument: " + args.length);
Map<String, Object> params = new LinkedHashMap<String, Object>();
for(int i = 0; i < args.length; i+= 2) if (args[i] != null)
params.put(args[i].toString(), args[i + 1]);
IContext c = getContextFor(context, username, sudoContext);
return Core.execute(c, microflowName, params);
}
//MWE: based on: http://download.oracle.com/javase/6/docs/api/java/util/concurrent/Executor.html
static class MFSerialExecutor {
private static final ILogNode LOG = Core.getLogger("communitycommons");
private static MFSerialExecutor _instance = new MFSerialExecutor();
private final AtomicLong tasknr = new AtomicLong();
private final ExecutorService executor;
public static MFSerialExecutor instance() {
return _instance;
}
private MFSerialExecutor() {
executor = Executors.newSingleThreadExecutor(new ThreadFactory() {
//Default thread factory takes care of setting the proper thread context
private final ThreadFactory defaultFactory = Executors.defaultThreadFactory();
@Override
public Thread newThread(Runnable runnable) {
Thread t = defaultFactory.newThread(runnable);
t.setPriority(Thread.MIN_PRIORITY);
t.setName("CommunityCommons background pool executor thread");
return t;
}
});
}
public void execute(final Runnable command)
{
if (command == null) {
throw new NullPointerException("command");
}
final long currenttasknr = tasknr.incrementAndGet();
LOG.info("[RunMicroflowAsyncInQueue] Scheduling task #" + currenttasknr);
executor.submit(new Runnable() {
@Override
public void run() {
LOG.info("[RunMicroflowAsyncInQueue] Running task #" + currenttasknr);
try {
command.run();
} catch(RuntimeException e) {
LOG.error("[RunMicroflowAsyncInQueue] Execution of task #" + currenttasknr + " failed: " + e.getMessage(), e);
throw e; //single thread executor will continue, even if an exception is thrown.
}
LOG.info("[RunMicroflowAsyncInQueue] Completed task #" + currenttasknr + ". Tasks left: " + (tasknr.get() - currenttasknr));
}
});
}
}
public static Boolean runMicroflowAsyncInQueue(final String microflowName)
{
MFSerialExecutor.instance().execute(new Runnable() {
@Override
public void run()
{
try
{
Future<Object> future = Core.executeAsync(Core.createSystemContext(), microflowName, true, new HashMap<String,Object>()); //MWE: somehow, it only works with system context... well thats OK for now.
future.get();
}
catch (Exception e)
{
throw new RuntimeException("Failed to run Async: "+ microflowName + ": " + e.getMessage(), e);
}
}
});
return true;
}
public static Boolean runMicroflowInBackground(final IContext context, final String microflowName,
final IMendixObject paramObject)
{
final ISession session = context.getSession();
if (paramObject != null)
session.retain(paramObject);
MFSerialExecutor.instance().execute(new Runnable() {
@Override
public void run()
{
try
{
IContext c = Core.createSystemContext();
if (paramObject != null) {
Core.executeAsync(c, microflowName, true, paramObject).get(); //MWE: somehow, it only works with system context... well thats OK for now.
}
else
Core.executeAsync(c, microflowName, true, new HashMap<String,Object>()).get(); //MWE: somehow, it only works with system context... well thats OK for now.
}
catch (Exception e)
{
throw new RuntimeException("Failed to run Async: "+ microflowName + ": " + e.getMessage(), e);
}
finally {
if (paramObject != null)
session.release(paramObject.getId());
}
}
});
return true;
}
private interface IBatchItemHandler
{
void exec(IContext context, IMendixObject obj) throws Exception;
}
private static class BatchState {
private int state = 0; //-1 = error, 1 = done.
private final IBatchItemHandler callback;
public BatchState(IBatchItemHandler callback) {
this.callback = callback;
}
public void setState(int state)
{
this.state = state;
}
public int getState()
{
return state;
}
public void handle(IContext context, IMendixObject obj) throws Exception {
callback.exec(context, obj);
}
}
public static Boolean executeMicroflowInBatches(String xpath, final String microflow, int batchsize, boolean waitUntilFinished, boolean asc) throws CoreException, InterruptedException {
Core.getLogger("communitycommons").info("[ExecuteInBatches] Starting microflow batch '" + microflow + "...");
return executeInBatches(xpath, new BatchState(new IBatchItemHandler() {
@Override
public void exec(IContext context, IMendixObject obj) throws Exception
{
Core.executeAsync(context, microflow, true, obj).get();
}
}), batchsize, waitUntilFinished, asc);
}
public static Boolean recommitInBatches(String xpath, int batchsize,
boolean waitUntilFinished, Boolean asc) throws CoreException, InterruptedException
{
Core.getLogger("communitycommons").info("[ExecuteInBatches] Starting recommit batch...");
return executeInBatches(xpath, new BatchState(new IBatchItemHandler() {
@Override
public void exec(IContext context, IMendixObject obj) throws Exception
{
Core.commit(context, obj);
}
}), batchsize, waitUntilFinished, asc);
}
public static Boolean executeInBatches(String xpathRaw, BatchState batchState, int batchsize, boolean waitUntilFinished, boolean asc) throws CoreException, InterruptedException
{
String xpath = xpathRaw.startsWith("//") ? xpathRaw : "//" + xpathRaw;
long count = Core.retrieveXPathQueryAggregate(Core.createSystemContext(), "count(" + xpath + ")");
int loop = (int) Math.ceil(((float)count) / ((float)batchsize));
Core.getLogger("communitycommons").info(
"[ExecuteInBatches] Starting batch on ~ " + count + " objects divided over ~ " + loop + " batches. "
+ (waitUntilFinished ? "Waiting until the batch has finished..." : "")
);
executeInBatchesHelper(xpath, batchsize, 0, batchState, count, asc);
if (waitUntilFinished) {
while (batchState.getState() == 0) {
Thread.sleep(5000);
}
if (batchState.getState() == 1) {
Core.getLogger("communitycommons").debug("[ExecuteInBatches] Successfully finished batch");
return true;
}
Core.getLogger("communitycommons").error("[ExecuteInBatches] Failed to finish batch. Please check the application log for more details.");
return false;
}
return true;
}
static void executeInBatchesHelper(final String xpath, final int batchsize, final long last, final BatchState batchState, final long count, final boolean asc) {
MFSerialExecutor.instance().execute(new Runnable() {
@Override
public void run()
{
try
{
Thread.sleep(200);
IContext c = Core.createSystemContext();
List<IMendixObject> objects = Core.retrieveXPathQuery(c, xpath + (last > 0 ? "[id " + (asc ? "> " : "< ") + last + "]" : ""), batchsize, 0, ImmutableMap.of("id", asc ? "asc" : "desc"));
//no new objects found :)
if (objects.size() == 0) {
Core.getLogger("communitycommons").info("[ExecuteInBatches] Succesfully finished batch on ~" + count + " objects.");
batchState.setState(1);
}
else {
//process objects
for(IMendixObject obj: objects)
batchState.handle(c, obj);
//invoke next batch
executeInBatchesHelper(xpath, batchsize, objects.get(objects.size() - 1).getId().toLong(), batchState, count, asc);
}
}
catch (Exception e)
{
batchState.setState(-1);
throw new RuntimeException("[ExecuteInBatches] Failed to run in batch: " + e.getMessage(), e);
}
}
});
}
/**
* Tests if two objects are equal with throwing unecessary null pointer exceptions.
*
* This is almost the most stupid function ever, since it should be part of Java itself.
*
* In java 7 it will finally be available as static method Object.equals()
* @param left
* @param right
* @return
*/
public static boolean objectsAreEqual(Object left, Object right) {
if (left == null && right == null)
return true;
if (left == null || right == null)
return false;
return left.equals(right);
}
/**
* Get the default language
* @param context
* @return The default language
* @throws CoreException
*/
public static Language getDefaultLanguage(IContext context) throws CoreException {
String languageCode = Core.getDefaultLanguage().getCode();
List<Language> languageList = Language.load(context, "[Code = '" + languageCode + "']");
if (languageList == null || languageList.isEmpty()) {
throw new RuntimeException("No language found for default language constant value " + languageCode);
}
return languageList.get(0);
}
public static boolean mergePDF(IContext context,List<FileDocument> documents, IMendixObject mergedDocument ){
int i = 0;
PDFMergerUtility mergePdf = new PDFMergerUtility();
for(i=0; i < documents.size(); i++)
{
FileDocument file = documents.get(i);
InputStream content = Core.getFileDocumentContent(context, file.getMendixObject());
mergePdf.addSource(content);
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
mergePdf.setDestinationStream(out);
try {
mergePdf.mergeDocuments();
} catch (COSVisitorException e) {
throw new RuntimeException("Failed to merge documents" + e.getMessage(), e);
} catch (IOException e) {
throw new RuntimeException("Failed to merge documents" + e.getMessage(), e);
}
Core.storeFileDocumentContent(context, mergedDocument, new ByteArrayInputStream(out.toByteArray()));
out.reset();
documents.clear();
return true;
}
/**
* Overlay a generated PDF document with another PDF (containing the company stationary for example)
* @param context
* @param generatedDocumentMendixObject The document to overlay
* @param overlayMendixObject The document containing the overlay
* @return boolean
* @throws IOException
* @throws COSVisitorException
*/
public static boolean overlayPdf(IContext context, IMendixObject generatedDocumentMendixObject, IMendixObject overlayMendixObject) throws IOException, COSVisitorException {
ILogNode logger = Core.getLogger("OverlayPdf");
logger.trace("Overlay PDF start, retrieve overlay PDF");
PDDocument overlayDoc = PDDocument.load(Core.getFileDocumentContent(context, overlayMendixObject));
int overlayPageCount = overlayDoc.getNumberOfPages();
PDPage lastOverlayPage = (PDPage)overlayDoc.getDocumentCatalog().getAllPages().get(overlayPageCount - 1);
logger.trace("Retrieve generated document");
PDDocument offerteDoc = PDDocument.load(Core.getFileDocumentContent(context, generatedDocumentMendixObject));
int pageCount = offerteDoc.getNumberOfPages();
if (logger.isTraceEnabled()) {
logger.trace("Number of pages in overlay: " + overlayPageCount + ", in generated document: " + pageCount);
}
if (pageCount > overlayPageCount) {
logger.trace("Duplicate last overlay page to match number of pages");
for (int i = overlayPageCount; i < pageCount; i++) {
overlayDoc.importPage(lastOverlayPage);
}
} else if (overlayPageCount > pageCount) {
logger.trace("Delete unnecessary pages from the overlay to match number of pages");
for (int i = pageCount; i < overlayPageCount; i++) {
overlayDoc.removePage(i);
}
}
logger.trace("Perform overlay");
Overlay overlay = new Overlay();
overlay.overlay(offerteDoc,overlayDoc);
logger.trace("Save result in output stream");
ByteArrayOutputStream baos = new ByteArrayOutputStream();
overlayDoc.save(baos);
logger.trace("Duplicate result in input stream");
InputStream overlayedContent = new ByteArrayInputStream(baos.toByteArray());
logger.trace("Store result in original document");
Core.storeFileDocumentContent(context, generatedDocumentMendixObject, overlayedContent);
logger.trace("Close PDFs");
overlayDoc.close();
offerteDoc.close();
logger.trace("Overlay PDF end");
return true;
}
}
| |
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License, version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package io.netty.handler.codec.http2;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http2.StreamByteDistributor.Writer;
import io.netty.util.BooleanSupplier;
import io.netty.util.internal.UnstableApi;
import io.netty.util.internal.logging.InternalLogger;
import io.netty.util.internal.logging.InternalLoggerFactory;
import java.util.ArrayDeque;
import java.util.Deque;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_WINDOW_SIZE;
import static io.netty.handler.codec.http2.Http2Error.FLOW_CONTROL_ERROR;
import static io.netty.handler.codec.http2.Http2Error.INTERNAL_ERROR;
import static io.netty.handler.codec.http2.Http2Exception.streamError;
import static io.netty.handler.codec.http2.Http2Stream.State.HALF_CLOSED_LOCAL;
import static io.netty.util.internal.ObjectUtil.checkNotNull;
import static java.lang.Math.max;
import static java.lang.Math.min;
/**
* Basic implementation of {@link Http2RemoteFlowController}.
* <p>
* This class is <strong>NOT</strong> thread safe. The assumption is all methods must be invoked from a single thread.
* Typically this thread is the event loop thread for the {@link ChannelHandlerContext} managed by this class.
*/
@UnstableApi
public class DefaultHttp2RemoteFlowController implements Http2RemoteFlowController {
private static final InternalLogger logger =
InternalLoggerFactory.getInstance(DefaultHttp2RemoteFlowController.class);
private static final int MIN_WRITABLE_CHUNK = 32 * 1024;
private final Http2Connection connection;
private final Http2Connection.PropertyKey stateKey;
private final StreamByteDistributor streamByteDistributor;
private final FlowState connectionState;
private int initialWindowSize = DEFAULT_WINDOW_SIZE;
private WritabilityMonitor monitor;
private ChannelHandlerContext ctx;
public DefaultHttp2RemoteFlowController(Http2Connection connection) {
this(connection, (Listener) null);
}
public DefaultHttp2RemoteFlowController(Http2Connection connection,
StreamByteDistributor streamByteDistributor) {
this(connection, streamByteDistributor, null);
}
public DefaultHttp2RemoteFlowController(Http2Connection connection, final Listener listener) {
this(connection, new WeightedFairQueueByteDistributor(connection), listener);
}
public DefaultHttp2RemoteFlowController(Http2Connection connection,
StreamByteDistributor streamByteDistributor,
final Listener listener) {
this.connection = checkNotNull(connection, "connection");
this.streamByteDistributor = checkNotNull(streamByteDistributor, "streamWriteDistributor");
// Add a flow state for the connection.
stateKey = connection.newKey();
connectionState = new FlowState(connection.connectionStream());
connection.connectionStream().setProperty(stateKey, connectionState);
// Monitor may depend upon connectionState, and so initialize after connectionState
listener(listener);
monitor.windowSize(connectionState, initialWindowSize);
// Register for notification of new streams.
connection.addListener(new Http2ConnectionAdapter() {
@Override
public void onStreamAdded(Http2Stream stream) {
// If the stream state is not open then the stream is not yet eligible for flow controlled frames and
// only requires the ReducedFlowState. Otherwise the full amount of memory is required.
stream.setProperty(stateKey, new FlowState(stream));
}
@Override
public void onStreamActive(Http2Stream stream) {
// If the object was previously created, but later activated then we have to ensure the proper
// initialWindowSize is used.
monitor.windowSize(state(stream), initialWindowSize);
}
@Override
public void onStreamClosed(Http2Stream stream) {
// Any pending frames can never be written, cancel and
// write errors for any pending frames.
state(stream).cancel();
}
@Override
public void onStreamHalfClosed(Http2Stream stream) {
if (HALF_CLOSED_LOCAL.equals(stream.state())) {
/**
* When this method is called there should not be any
* pending frames left if the API is used correctly. However,
* it is possible that a erroneous application can sneak
* in a frame even after having already written a frame with the
* END_STREAM flag set, as the stream state might not transition
* immediately to HALF_CLOSED_LOCAL / CLOSED due to flow control
* delaying the write.
*
* This is to cancel any such illegal writes.
*/
state(stream).cancel();
}
}
});
}
/**
* {@inheritDoc}
* <p>
* Any queued {@link FlowControlled} objects will be sent.
*/
@Override
public void channelHandlerContext(ChannelHandlerContext ctx) throws Http2Exception {
this.ctx = checkNotNull(ctx, "ctx");
// Writing the pending bytes will not check writability change and instead a writability change notification
// to be provided by an explicit call.
channelWritabilityChanged();
// Don't worry about cleaning up queued frames here if ctx is null. It is expected that all streams will be
// closed and the queue cleanup will occur when the stream state transitions occur.
// If any frames have been queued up, we should send them now that we have a channel context.
if (isChannelWritable()) {
writePendingBytes();
}
}
@Override
public ChannelHandlerContext channelHandlerContext() {
return ctx;
}
@Override
public void initialWindowSize(int newWindowSize) throws Http2Exception {
assert ctx == null || ctx.executor().inEventLoop();
monitor.initialWindowSize(newWindowSize);
}
@Override
public int initialWindowSize() {
return initialWindowSize;
}
@Override
public int windowSize(Http2Stream stream) {
return state(stream).windowSize();
}
@Override
public boolean isWritable(Http2Stream stream) {
return monitor.isWritable(state(stream));
}
@Override
public void channelWritabilityChanged() throws Http2Exception {
monitor.channelWritabilityChange();
}
private boolean isChannelWritable() {
return ctx != null && isChannelWritable0();
}
private boolean isChannelWritable0() {
return ctx.channel().isWritable();
}
@Override
public void listener(Listener listener) {
monitor = listener == null ? new WritabilityMonitor() : new ListenerWritabilityMonitor(listener);
}
@Override
public void incrementWindowSize(Http2Stream stream, int delta) throws Http2Exception {
assert ctx == null || ctx.executor().inEventLoop();
monitor.incrementWindowSize(state(stream), delta);
}
@Override
public void addFlowControlled(Http2Stream stream, FlowControlled frame) {
// The context can be null assuming the frame will be queued and send later when the context is set.
assert ctx == null || ctx.executor().inEventLoop();
checkNotNull(frame, "frame");
try {
monitor.enqueueFrame(state(stream), frame);
} catch (Throwable t) {
frame.error(ctx, t);
}
}
@Override
public boolean hasFlowControlled(Http2Stream stream) {
return state(stream).hasFrame();
}
private FlowState state(Http2Stream stream) {
return (FlowState) stream.getProperty(stateKey);
}
/**
* Returns the flow control window for the entire connection.
*/
private int connectionWindowSize() {
return connectionState.windowSize();
}
private int minUsableChannelBytes() {
// The current allocation algorithm values "fairness" and doesn't give any consideration to "goodput". It
// is possible that 1 byte will be allocated to many streams. In an effort to try to make "goodput"
// reasonable with the current allocation algorithm we have this "cheap" check up front to ensure there is
// an "adequate" amount of connection window before allocation is attempted. This is not foolproof as if the
// number of streams is >= this minimal number then we may still have the issue, but the idea is to narrow the
// circumstances in which this can happen without rewriting the allocation algorithm.
return max(ctx.channel().config().getWriteBufferLowWaterMark(), MIN_WRITABLE_CHUNK);
}
private int maxUsableChannelBytes() {
// If the channel isWritable, allow at least minUseableChannelBytes.
int channelWritableBytes = (int) min(Integer.MAX_VALUE, ctx.channel().bytesBeforeUnwritable());
int useableBytes = channelWritableBytes > 0 ? max(channelWritableBytes, minUsableChannelBytes()) : 0;
// Clip the usable bytes by the connection window.
return min(connectionState.windowSize(), useableBytes);
}
/**
* The amount of bytes that can be supported by underlying {@link io.netty.channel.Channel} without
* queuing "too-much".
*/
private int writableBytes() {
return min(connectionWindowSize(), maxUsableChannelBytes());
}
@Override
public void writePendingBytes() throws Http2Exception {
monitor.writePendingBytes();
}
/**
* The remote flow control state for a single stream.
*/
private final class FlowState implements StreamByteDistributor.StreamState {
private final Http2Stream stream;
private final Deque<FlowControlled> pendingWriteQueue;
private int window;
private int pendingBytes;
private boolean markedWritable;
/**
* Set to true while a frame is being written, false otherwise.
*/
private boolean writing;
/**
* Set to true if cancel() was called.
*/
private boolean cancelled;
private BooleanSupplier isWritableSupplier = new BooleanSupplier() {
@Override
public boolean get() throws Exception {
return windowSize() - pendingBytes() > 0;
}
};
FlowState(Http2Stream stream) {
this.stream = stream;
pendingWriteQueue = new ArrayDeque<FlowControlled>(2);
}
/**
* Determine if the stream associated with this object is writable.
* @return {@code true} if the stream associated with this object is writable.
*/
boolean isWritable() {
try {
return isWritableSupplier.get();
} catch (Throwable cause) {
throw new Error("isWritableSupplier should never throw!", cause);
}
}
/**
* The stream this state is associated with.
*/
@Override
public Http2Stream stream() {
return stream;
}
/**
* Returns the parameter from the last call to {@link #markedWritability(boolean)}.
*/
boolean markedWritability() {
return markedWritable;
}
/**
* Save the state of writability.
*/
void markedWritability(boolean isWritable) {
this.markedWritable = isWritable;
}
@Override
public int windowSize() {
return window;
}
/**
* Reset the window size for this stream.
*/
void windowSize(int initialWindowSize) {
window = initialWindowSize;
}
/**
* Write the allocated bytes for this stream.
* @return the number of bytes written for a stream or {@code -1} if no write occurred.
*/
int writeAllocatedBytes(int allocated) {
final int initialAllocated = allocated;
int writtenBytes;
// In case an exception is thrown we want to remember it and pass it to cancel(Throwable).
Throwable cause = null;
FlowControlled frame;
try {
assert !writing;
writing = true;
// Write the remainder of frames that we are allowed to
boolean writeOccurred = false;
while (!cancelled && (frame = peek()) != null) {
int maxBytes = min(allocated, writableWindow());
if (maxBytes <= 0 && frame.size() > 0) {
// The frame still has data, but the amount of allocated bytes has been exhausted.
// Don't write needless empty frames.
break;
}
writeOccurred = true;
int initialFrameSize = frame.size();
try {
frame.write(ctx, max(0, maxBytes));
if (frame.size() == 0) {
// This frame has been fully written, remove this frame and notify it.
// Since we remove this frame first, we're guaranteed that its error
// method will not be called when we call cancel.
pendingWriteQueue.remove();
frame.writeComplete();
}
} finally {
// Decrement allocated by how much was actually written.
allocated -= initialFrameSize - frame.size();
}
}
if (!writeOccurred) {
// Either there was no frame, or the amount of allocated bytes has been exhausted.
return -1;
}
} catch (Throwable t) {
// Mark the state as cancelled, we'll clear the pending queue via cancel() below.
cancelled = true;
cause = t;
} finally {
writing = false;
// Make sure we always decrement the flow control windows
// by the bytes written.
writtenBytes = initialAllocated - allocated;
decrementPendingBytes(writtenBytes, false);
decrementFlowControlWindow(writtenBytes);
// If a cancellation occurred while writing, call cancel again to
// clear and error all of the pending writes.
if (cancelled) {
cancel(cause);
}
}
return writtenBytes;
}
/**
* Increments the flow control window for this stream by the given delta and returns the new value.
*/
int incrementStreamWindow(int delta) throws Http2Exception {
if (delta > 0 && Integer.MAX_VALUE - delta < window) {
throw streamError(stream.id(), FLOW_CONTROL_ERROR,
"Window size overflow for stream: %d", stream.id());
}
window += delta;
streamByteDistributor.updateStreamableBytes(this);
return window;
}
/**
* Returns the maximum writable window (minimum of the stream and connection windows).
*/
private int writableWindow() {
return min(window, connectionWindowSize());
}
@Override
public int pendingBytes() {
return pendingBytes;
}
/**
* Adds the {@code frame} to the pending queue and increments the pending byte count.
*/
void enqueueFrame(FlowControlled frame) {
FlowControlled last = pendingWriteQueue.peekLast();
if (last == null) {
enqueueFrameWithoutMerge(frame);
return;
}
int lastSize = last.size();
if (last.merge(ctx, frame)) {
incrementPendingBytes(last.size() - lastSize, true);
return;
}
enqueueFrameWithoutMerge(frame);
}
private void enqueueFrameWithoutMerge(FlowControlled frame) {
pendingWriteQueue.offer(frame);
// This must be called after adding to the queue in order so that hasFrame() is
// updated before updating the stream state.
incrementPendingBytes(frame.size(), true);
}
@Override
public boolean hasFrame() {
return !pendingWriteQueue.isEmpty();
}
/**
* Returns the the head of the pending queue, or {@code null} if empty.
*/
private FlowControlled peek() {
return pendingWriteQueue.peek();
}
/**
* Any operations that may be pending are cleared and the status of these operations is failed.
*/
void cancel() {
cancel(null);
}
/**
* Clears the pending queue and writes errors for each remaining frame.
* @param cause the {@link Throwable} that caused this method to be invoked.
*/
private void cancel(Throwable cause) {
cancelled = true;
// Ensure that the queue can't be modified while we are writing.
if (writing) {
return;
}
for (;;) {
FlowControlled frame = pendingWriteQueue.poll();
if (frame == null) {
break;
}
writeError(frame, streamError(stream.id(), INTERNAL_ERROR, cause,
"Stream closed before write could take place"));
}
streamByteDistributor.updateStreamableBytes(this);
isWritableSupplier = BooleanSupplier.FALSE_SUPPLIER;
monitor.stateCancelled(this);
}
/**
* Increments the number of pending bytes for this node and optionally updates the
* {@link StreamByteDistributor}.
*/
private void incrementPendingBytes(int numBytes, boolean updateStreamableBytes) {
pendingBytes += numBytes;
monitor.incrementPendingBytes(numBytes);
if (updateStreamableBytes) {
streamByteDistributor.updateStreamableBytes(this);
}
}
/**
* If this frame is in the pending queue, decrements the number of pending bytes for the stream.
*/
private void decrementPendingBytes(int bytes, boolean updateStreamableBytes) {
incrementPendingBytes(-bytes, updateStreamableBytes);
}
/**
* Decrement the per stream and connection flow control window by {@code bytes}.
*/
private void decrementFlowControlWindow(int bytes) {
try {
int negativeBytes = -bytes;
connectionState.incrementStreamWindow(negativeBytes);
incrementStreamWindow(negativeBytes);
} catch (Http2Exception e) {
// Should never get here since we're decrementing.
throw new IllegalStateException("Invalid window state when writing frame: " + e.getMessage(), e);
}
}
/**
* Discards this {@link FlowControlled}, writing an error. If this frame is in the pending queue,
* the unwritten bytes are removed from this branch of the priority tree.
*/
private void writeError(FlowControlled frame, Http2Exception cause) {
assert ctx != null;
decrementPendingBytes(frame.size(), true);
frame.error(ctx, cause);
}
}
/**
* Abstract class which provides common functionality for writability monitor implementations.
*/
private class WritabilityMonitor {
private long totalPendingBytes;
private final Writer writer = new StreamByteDistributor.Writer() {
@Override
public void write(Http2Stream stream, int numBytes) {
state(stream).writeAllocatedBytes(numBytes);
}
};
/**
* Called when the writability of the underlying channel changes.
* @throws Http2Exception If a write occurs and an exception happens in the write operation.
*/
void channelWritabilityChange() throws Http2Exception { }
/**
* Called when the state is cancelled.
* @param state the state that was cancelled.
*/
void stateCancelled(FlowState state) { }
/**
* Set the initial window size for {@code state}.
* @param state the state to change the initial window size for.
* @param initialWindowSize the size of the window in bytes.
*/
void windowSize(FlowState state, int initialWindowSize) {
state.windowSize(initialWindowSize);
}
/**
* Increment the window size for a particular stream.
* @param state the state associated with the stream whose window is being incremented.
* @param delta The amount to increment by.
* @throws Http2Exception If this operation overflows the window for {@code state}.
*/
void incrementWindowSize(FlowState state, int delta) throws Http2Exception {
state.incrementStreamWindow(delta);
}
/**
* Add a frame to be sent via flow control.
* @param state The state associated with the stream which the {@code frame} is associated with.
* @param frame the frame to enqueue.
* @throws Http2Exception If a writability error occurs.
*/
void enqueueFrame(FlowState state, FlowControlled frame) throws Http2Exception {
state.enqueueFrame(frame);
}
/**
* Increment the total amount of pending bytes for all streams. When any stream's pending bytes changes
* method should be called.
* @param delta The amount to increment by.
*/
final void incrementPendingBytes(int delta) {
totalPendingBytes += delta;
// Notification of writibilty change should be delayed until the end of the top level event.
// This is to ensure the flow controller is more consistent state before calling external listener methods.
}
/**
* Determine if the stream associated with {@code state} is writable.
* @param state The state which is associated with the stream to test writability for.
* @return {@code true} if {@link FlowState#stream()} is writable. {@code false} otherwise.
*/
final boolean isWritable(FlowState state) {
return isWritableConnection() && state.isWritable();
}
final void writePendingBytes() throws Http2Exception {
int bytesToWrite = writableBytes();
// Make sure we always write at least once, regardless if we have bytesToWrite or not.
// This ensures that zero-length frames will always be written.
for (;;) {
if (!streamByteDistributor.distribute(bytesToWrite, writer) ||
(bytesToWrite = writableBytes()) <= 0 ||
!isChannelWritable0()) {
break;
}
}
}
void initialWindowSize(int newWindowSize) throws Http2Exception {
if (newWindowSize < 0) {
throw new IllegalArgumentException("Invalid initial window size: " + newWindowSize);
}
final int delta = newWindowSize - initialWindowSize;
initialWindowSize = newWindowSize;
connection.forEachActiveStream(new Http2StreamVisitor() {
@Override
public boolean visit(Http2Stream stream) throws Http2Exception {
state(stream).incrementStreamWindow(delta);
return true;
}
});
if (delta > 0 && isChannelWritable()) {
// The window size increased, send any pending frames for all streams.
writePendingBytes();
}
}
final boolean isWritableConnection() {
return connectionState.windowSize() - totalPendingBytes > 0 && isChannelWritable();
}
}
/**
* Writability of a {@code stream} is calculated using the following:
* <pre>
* Connection Window - Total Queued Bytes > 0 &&
* Stream Window - Bytes Queued for Stream > 0 &&
* isChannelWritable()
* </pre>
*/
private final class ListenerWritabilityMonitor extends WritabilityMonitor {
private final Listener listener;
private final Http2StreamVisitor checkStreamWritabilityVisitor = new Http2StreamVisitor() {
@Override
public boolean visit(Http2Stream stream) throws Http2Exception {
FlowState state = state(stream);
if (isWritable(state) != state.markedWritability()) {
notifyWritabilityChanged(state);
}
return true;
}
};
ListenerWritabilityMonitor(Listener listener) {
this.listener = listener;
}
@Override
void windowSize(FlowState state, int initialWindowSize) {
super.windowSize(state, initialWindowSize);
try {
checkStateWritability(state);
} catch (Http2Exception e) {
throw new RuntimeException("Caught unexpected exception from window", e);
}
}
@Override
void incrementWindowSize(FlowState state, int delta) throws Http2Exception {
super.incrementWindowSize(state, delta);
checkStateWritability(state);
}
@Override
void initialWindowSize(int newWindowSize) throws Http2Exception {
super.initialWindowSize(newWindowSize);
if (isWritableConnection()) {
// If the write operation does not occur we still need to check all streams because they
// may have transitioned from writable to not writable.
checkAllWritabilityChanged();
}
}
@Override
void enqueueFrame(FlowState state, FlowControlled frame) throws Http2Exception {
super.enqueueFrame(state, frame);
checkConnectionThenStreamWritabilityChanged(state);
}
@Override
void stateCancelled(FlowState state) {
try {
checkConnectionThenStreamWritabilityChanged(state);
} catch (Http2Exception e) {
throw new RuntimeException("Caught unexpected exception from checkAllWritabilityChanged", e);
}
}
@Override
void channelWritabilityChange() throws Http2Exception {
if (connectionState.markedWritability() != isChannelWritable()) {
checkAllWritabilityChanged();
}
}
private void checkStateWritability(FlowState state) throws Http2Exception {
if (isWritable(state) != state.markedWritability()) {
if (state == connectionState) {
checkAllWritabilityChanged();
} else {
notifyWritabilityChanged(state);
}
}
}
private void notifyWritabilityChanged(FlowState state) {
state.markedWritability(!state.markedWritability());
try {
listener.writabilityChanged(state.stream);
} catch (Throwable cause) {
logger.error("Caught Throwable from listener.writabilityChanged", cause);
}
}
private void checkConnectionThenStreamWritabilityChanged(FlowState state) throws Http2Exception {
// It is possible that the connection window and/or the individual stream writability could change.
if (isWritableConnection() != connectionState.markedWritability()) {
checkAllWritabilityChanged();
} else if (isWritable(state) != state.markedWritability()) {
notifyWritabilityChanged(state);
}
}
private void checkAllWritabilityChanged() throws Http2Exception {
// Make sure we mark that we have notified as a result of this change.
connectionState.markedWritability(isWritableConnection());
connection.forEachActiveStream(checkStreamWritabilityVisitor);
}
}
}
| |
package com.hazelcast.simulator.coordinator.tasks;
import com.hazelcast.simulator.agent.workerprocess.WorkerProcessSettings;
import com.hazelcast.simulator.common.SimulatorProperties;
import com.hazelcast.simulator.common.TestCase;
import com.hazelcast.simulator.common.TestPhase;
import com.hazelcast.simulator.common.WorkerType;
import com.hazelcast.simulator.coordinator.CoordinatorParameters;
import com.hazelcast.simulator.coordinator.FailureCollector;
import com.hazelcast.simulator.coordinator.PerformanceStatsCollector;
import com.hazelcast.simulator.coordinator.RemoteClient;
import com.hazelcast.simulator.coordinator.TestPhaseListeners;
import com.hazelcast.simulator.coordinator.TestSuite;
import com.hazelcast.simulator.protocol.connector.Connector;
import com.hazelcast.simulator.protocol.core.Response;
import com.hazelcast.simulator.protocol.core.ResponseType;
import com.hazelcast.simulator.protocol.core.SimulatorAddress;
import com.hazelcast.simulator.protocol.operation.CreateTestOperation;
import com.hazelcast.simulator.protocol.operation.FailureOperation;
import com.hazelcast.simulator.protocol.operation.SimulatorOperation;
import com.hazelcast.simulator.protocol.operation.StartTestOperation;
import com.hazelcast.simulator.protocol.operation.StartTestPhaseOperation;
import com.hazelcast.simulator.protocol.operation.StopTestOperation;
import com.hazelcast.simulator.protocol.registry.ComponentRegistry;
import com.hazelcast.simulator.protocol.registry.TargetType;
import com.hazelcast.simulator.protocol.registry.TestData;
import com.hazelcast.simulator.protocol.registry.WorkerQuery;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import static com.hazelcast.simulator.TestEnvironmentUtils.setupFakeEnvironment;
import static com.hazelcast.simulator.TestEnvironmentUtils.tearDownFakeEnvironment;
import static com.hazelcast.simulator.common.FailureType.WORKER_EXCEPTION;
import static com.hazelcast.simulator.common.FailureType.WORKER_NORMAL_EXIT;
import static com.hazelcast.simulator.common.TestPhase.RUN;
import static com.hazelcast.simulator.coordinator.tasks.RunTestSuiteTask.getTestPhaseSyncMap;
import static com.hazelcast.simulator.protocol.core.AddressLevel.WORKER;
import static com.hazelcast.simulator.utils.CommonUtils.await;
import static com.hazelcast.simulator.utils.CommonUtils.sleepMillis;
import static com.hazelcast.simulator.utils.FileUtils.deleteQuiet;
import static com.hazelcast.simulator.utils.TestUtils.createTmpDirectory;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonList;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.atLeast;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class RunTestSuiteTaskTest {
private static final AtomicLong ID_GENERATOR = new AtomicLong();
private CountDownLatch finishWorkerLatch = new CountDownLatch(1);
private File outputDirectory;
private TestSuite testSuite;
private FailureOperation criticalFailureOperation;
private SimulatorProperties simulatorProperties;
private ComponentRegistry componentRegistry;
private FailureCollector failureCollector;
private RemoteClient remoteClient;
private boolean parallel = false;
private boolean verifyEnabled = true;
private int monitorPerformanceMonitorIntervalSeconds = 0;
@BeforeClass
public static void prepareEnvironment() {
setupFakeEnvironment();
}
@AfterClass
public static void resetEnvironment() {
tearDownFakeEnvironment();
}
@Before
public void before() {
testSuite = new TestSuite();
testSuite.addTest(new TestCase("CoordinatorTest" + ID_GENERATOR.incrementAndGet()));
testSuite.addTest(new TestCase("CoordinatorTest" + ID_GENERATOR.incrementAndGet()));
outputDirectory = createTmpDirectory();
SimulatorAddress address = new SimulatorAddress(WORKER, 1, 1, 0);
criticalFailureOperation = new FailureOperation("expected critical failure", WORKER_EXCEPTION, address, "127.0.0.1",
"127.0.0.1:5701", "workerId", "CoordinatorTest1", "stacktrace");
simulatorProperties = new SimulatorProperties();
Response response = new Response(1, SimulatorAddress.COORDINATOR, address, ResponseType.SUCCESS);
Connector connector = mock(Connector.class);
when(connector.invoke(any(SimulatorAddress.class), any(SimulatorOperation.class))).thenReturn(response);
remoteClient = mock(RemoteClient.class);
when(remoteClient.getConnector()).thenReturn(connector);
}
@After
public void cleanUp() {
deleteQuiet(outputDirectory);
}
@Test
public void runParallel_waitForTestCase_and_duration() {
testSuite.setDurationSeconds(3);
parallel = true;
RunTestSuiteTask task = createRunTestSuiteTask();
task.run();
verifyRemoteClient();
}
@Test
public void runParallel_waitForTestCase_noVerify() {
testSuite.setDurationSeconds(0);
parallel = true;
verifyEnabled = false;
RunTestSuiteTask task = createRunTestSuiteTask();
task.run();
verifyRemoteClient();
}
@Test
public void runParallel_performanceMonitorEnabled() {
testSuite.setDurationSeconds(4);
parallel = true;
monitorPerformanceMonitorIntervalSeconds = 10;
RunTestSuiteTask task = createRunTestSuiteTask();
task.run();
verifyRemoteClient();
}
@Test
public void runParallel_withTargetCount() {
testSuite.setDurationSeconds(0);
parallel = true;
verifyEnabled = false;
RunTestSuiteTask task = createRunTestSuiteTask(1);
task.run();
verifyRemoteClient();
}
@Test
public void runParallel_withWarmup() {
testSuite.setDurationSeconds(1);
parallel = true;
verifyEnabled = false;
RunTestSuiteTask task = createRunTestSuiteTask();
task.run();
verifyRemoteClient();
}
@Test
public void runParallel_withWarmup_waitForTestCase() {
testSuite.setDurationSeconds(0);
parallel = true;
verifyEnabled = false;
RunTestSuiteTask task = createRunTestSuiteTask();
task.run();
verifyRemoteClient();
}
@Test
public void runSequential_withSingleTest() {
TestCase testCase = new TestCase("CoordinatorTest" + ID_GENERATOR.incrementAndGet());
testSuite = new TestSuite();
testSuite.addTest(testCase);
testSuite.setDurationSeconds(1);
RunTestSuiteTask task = createRunTestSuiteTask();
task.run();
verifyRemoteClient();
}
@Test
public void runParallel_withSingleTest() {
TestCase testCase = new TestCase("CoordinatorTest" + ID_GENERATOR.incrementAndGet());
testSuite = new TestSuite();
testSuite.addTest(testCase);
testSuite.setDurationSeconds(1);
parallel = true;
RunTestSuiteTask task = createRunTestSuiteTask();
task.run();
verifyRemoteClient();
}
@Test
public void runSequential_hasCriticalFailures() {
testSuite.setDurationSeconds(4);
parallel = false;
RunTestSuiteTask task = createRunTestSuiteTask();
failureCollector.notify(criticalFailureOperation);
task.run();
}
@Test
public void runParallel_hasCriticalFailures() {
testSuite.setDurationSeconds(4);
testSuite.setFailFast(false);
parallel = true;
RunTestSuiteTask task = createRunTestSuiteTask();
failureCollector.notify(criticalFailureOperation);
task.run();
}
@Test
public void runSequential_hasCriticalFailures_withFailFast() {
testSuite.setDurationSeconds(1);
testSuite.setFailFast(true);
RunTestSuiteTask task = createRunTestSuiteTask();
failureCollector.notify(criticalFailureOperation);
task.run();
}
@Test
public void runParallel_hasCriticalFailures_withFailFast() {
testSuite.setDurationSeconds(1);
testSuite.setFailFast(true);
parallel = true;
RunTestSuiteTask task = createRunTestSuiteTask();
failureCollector.notify(criticalFailureOperation);
task.run();
}
@Test(expected = IllegalStateException.class)
public void runSequential_withException() {
doThrow(new IllegalStateException("expected")).when(remoteClient).invokeOnAllWorkers(any(SimulatorOperation.class));
testSuite.setDurationSeconds(1);
parallel = false;
RunTestSuiteTask task = createRunTestSuiteTask();
task.run();
}
@Test(expected = IllegalStateException.class)
public void runParallel_withException() {
doThrow(new IllegalStateException("expected")).when(remoteClient).invokeOnAllWorkers(any(SimulatorOperation.class));
testSuite.setDurationSeconds(1);
parallel = true;
RunTestSuiteTask task = createRunTestSuiteTask();
task.run();
}
@Test
public void runSequential_withWorkerNotShuttingDown() {
simulatorProperties.set("WAIT_FOR_WORKER_SHUTDOWN_TIMEOUT_SECONDS", "1");
testSuite.setDurationSeconds(1);
finishWorkerLatch = null;
RunTestSuiteTask task = createRunTestSuiteTask();
task.run();
verifyRemoteClient();
}
private RunTestSuiteTask createRunTestSuiteTask() {
return createRunTestSuiteTask(0);
}
private RunTestSuiteTask createRunTestSuiteTask(int targetCount) {
WorkerProcessSettings workerProcessSettings = mock(WorkerProcessSettings.class);
when(workerProcessSettings.getWorkerIndex()).thenReturn(1);
when(workerProcessSettings.getWorkerType()).thenReturn(WorkerType.MEMBER);
componentRegistry = new ComponentRegistry();
componentRegistry.addAgent("127.0.0.1", "127.0.0.1");
componentRegistry.addWorkers(componentRegistry.getFirstAgent().getAddress(), singletonList(workerProcessSettings));
failureCollector = new FailureCollector(outputDirectory, componentRegistry);
PerformanceStatsCollector performanceStatsCollector = new PerformanceStatsCollector();
TestPhaseListeners testPhaseListeners = new TestPhaseListeners();
CoordinatorParameters coordinatorParameters = new CoordinatorParameters()
.setPerformanceMonitorIntervalSeconds(monitorPerformanceMonitorIntervalSeconds)
.setSimulatorProperties(simulatorProperties);
WorkerQuery query = new WorkerQuery().setTargetType(TargetType.ALL);
if (targetCount > 0) {
query.setMaxCount(targetCount);
}
testSuite.setVerifyEnabled(verifyEnabled)
.setParallel(parallel)
.setWorkerQuery(query);
RunTestSuiteTask task = new RunTestSuiteTask(testSuite, coordinatorParameters, componentRegistry, failureCollector,
testPhaseListeners, remoteClient, performanceStatsCollector);
new TestPhaseCompleter(componentRegistry, testPhaseListeners, failureCollector).start();
return task;
}
private void verifyRemoteClient() {
int testCount = testSuite.size();
List<TestPhase> expectedTestPhases = getExpectedTestPhases();
// in the remainingPhaseCount the remaining number of calls per phase. Eventually everything should be 0.
Map<TestPhase, AtomicInteger> remainingPhaseCount = new HashMap<TestPhase, AtomicInteger>();
for (TestPhase testPhase : expectedTestPhases) {
remainingPhaseCount.put(testPhase, new AtomicInteger(testCount));
}
// we check if the create calls have been made
verify(remoteClient, times(testCount)).invokeOnAllWorkers(any(CreateTestOperation.class));
// now we suck up all 'invokeOnTestOnAllWorkers'
ArgumentCaptor<SimulatorOperation> allTestOperations = ArgumentCaptor.forClass(SimulatorOperation.class);
verify(remoteClient, atLeast(0)).invokeOnTestOnAllWorkers(any(SimulatorAddress.class), allTestOperations.capture());
// now we suck up all 'invokeOnTestOnFirstWorker'
ArgumentCaptor<SimulatorOperation> firstTestOperations = ArgumentCaptor.forClass(SimulatorOperation.class);
verify(remoteClient, atLeast(0)).invokeOnTestOnFirstWorker(any(SimulatorAddress.class), firstTestOperations.capture());
int actualStopTestCount = 0;
//
for (SimulatorOperation operation : allTestOperations.getAllValues()) {
if (operation instanceof StartTestPhaseOperation) {
remainingPhaseCount.get(((StartTestPhaseOperation) operation).getTestPhase()).decrementAndGet();
} else if (operation instanceof StartTestOperation) {
TestPhase phase = RUN;
remainingPhaseCount.get(phase).decrementAndGet();
} else if (operation instanceof StopTestOperation) {
actualStopTestCount++;
} else {
fail("Unrecognized operation: " + operation);
}
}
for (SimulatorOperation operation : firstTestOperations.getAllValues()) {
if (operation instanceof StartTestPhaseOperation) {
remainingPhaseCount.get(((StartTestPhaseOperation) operation).getTestPhase()).decrementAndGet();
} else {
fail("Unrecognized operation: " + operation);
}
}
int expectedStopCount = testCount;
assertEquals("actualStopTestCount incorrect", expectedStopCount, actualStopTestCount);
for (Map.Entry<TestPhase, AtomicInteger> entry : remainingPhaseCount.entrySet()) {
TestPhase phase = entry.getKey();
int value = entry.getValue().get();
assertEquals("Number of remaining occurrences for phase: " + phase + " incorrect", 0, value);
}
}
private List<TestPhase> getExpectedTestPhases() {
// per default we expected all test phases to be called
List<TestPhase> expectedTestPhases = new ArrayList<TestPhase>(asList(TestPhase.values()));
if (!verifyEnabled) {
// exclude verify test phases
expectedTestPhases.remove(TestPhase.GLOBAL_VERIFY);
expectedTestPhases.remove(TestPhase.LOCAL_VERIFY);
}
return expectedTestPhases;
}
private class TestPhaseCompleter extends Thread {
private final ComponentRegistry componentRegistry;
private final TestPhaseListeners testPhaseListeners;
private final FailureCollector failureCollector;
private TestPhaseCompleter(ComponentRegistry componentRegistry, TestPhaseListeners testPhaseListeners,
FailureCollector failureCollector) {
super("TestPhaseCompleter");
this.componentRegistry = componentRegistry;
this.testPhaseListeners = testPhaseListeners;
this.failureCollector = failureCollector;
setDaemon(true);
}
@Override
public void run() {
SimulatorAddress workerAddress = new SimulatorAddress(WORKER, 1, 1, 0);
for (TestPhase testPhase : TestPhase.values()) {
sleepMillis(100);
for (TestData testData : componentRegistry.getTests()) {
testPhaseListeners.onCompletion(testData.getTestIndex(), testPhase, workerAddress);
}
}
if (finishWorkerLatch != null) {
await(finishWorkerLatch);
FailureOperation operation = new FailureOperation("Worker finished", WORKER_NORMAL_EXIT, workerAddress, "127.0.0.1",
"127.0.0.1:5701", "workerId", "testId", "stacktrace");
failureCollector.notify(operation);
}
}
}
@Test
public void testGetTestPhaseSyncMap() {
Map<TestPhase, CountDownLatch> testPhaseSyncMap = getTestPhaseSyncMap(5, true, TestPhase.RUN);
assertEquals(5, testPhaseSyncMap.get(TestPhase.SETUP).getCount());
assertEquals(5, testPhaseSyncMap.get(TestPhase.LOCAL_PREPARE).getCount());
assertEquals(5, testPhaseSyncMap.get(TestPhase.GLOBAL_PREPARE).getCount());
assertEquals(5, testPhaseSyncMap.get(TestPhase.RUN).getCount());
assertEquals(0, testPhaseSyncMap.get(TestPhase.GLOBAL_VERIFY).getCount());
assertEquals(0, testPhaseSyncMap.get(TestPhase.LOCAL_VERIFY).getCount());
assertEquals(0, testPhaseSyncMap.get(TestPhase.GLOBAL_TEARDOWN).getCount());
assertEquals(0, testPhaseSyncMap.get(TestPhase.LOCAL_TEARDOWN).getCount());
}
@Test
@SuppressWarnings("all")
public void testGetTestPhaseSyncMap_notParallel() {
Map<TestPhase, CountDownLatch> testPhaseSyncMap = getTestPhaseSyncMap(5, false, TestPhase.RUN);
assertNull(testPhaseSyncMap);
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2018 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.big.data.kettle.plugins.formats.avro.output;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.function.Function;
import org.apache.commons.vfs2.FileObject;
import org.pentaho.big.data.kettle.plugins.formats.avro.AvroTypeConverter;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleFileException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.injection.Injection;
import org.pentaho.di.core.injection.InjectionDeep;
import org.pentaho.di.core.util.StringUtil;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.vfs.AliasedFileObject;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.pentaho.di.workarounds.ResolvableResource;
import org.pentaho.metastore.api.IMetaStore;
import org.w3c.dom.Node;
/**
* Avro output meta step without Hadoop-dependent classes. Required for read meta in the spark native code.
*
* @author Alexander Buloichik@epam.com>
*/
public abstract class AvroOutputMetaBase extends BaseStepMeta implements StepMetaInterface, ResolvableResource {
private static final Class<?> PKG = AvroOutputMetaBase.class;
@Injection( name = "FILENAME" ) private String filename;
@InjectionDeep
private List<AvroOutputField> outputFields = new ArrayList<AvroOutputField>();
@Injection( name = "OPTIONS_DATE_IN_FILE_NAME" )
protected boolean dateInFileName = false;
@Injection( name = "OPTIONS_TIME_IN_FILE_NAME" )
protected boolean timeInFileName = false;
@Injection( name = "OPTIONS_DATE_FORMAT" )
protected String dateTimeFormat = "";
@Injection( name = "OPTIONS_COMPRESSION" ) protected String compressionType;
@Injection( name = "SCHEMA_FILENAME" ) protected String schemaFilename;
@Injection( name = "SCHEMA_NAMESPACE" ) protected String namespace;
@Injection( name = "SCHEMA_RECORD_NAME" ) protected String recordName;
@Injection( name = "SCHEMA_DOC_VALUE" ) protected String docValue;
@Injection( name = "OVERRIDE_OUTPUT" )
protected boolean overrideOutput;
@Override
public void setDefault() {
// TODO Auto-generated method stub
}
public boolean isOverrideOutput() {
return overrideOutput;
}
public void setOverrideOutput( boolean overrideOutput ) {
this.overrideOutput = overrideOutput;
}
public String getFilename() {
return filename;
}
public void setFilename( String filename ) {
this.filename = filename;
}
public List<AvroOutputField> getOutputFields() {
return outputFields;
}
public void setOutputFields( List<AvroOutputField> outputFields ) {
this.outputFields = outputFields;
}
public boolean isDateInFileName() {
return dateInFileName;
}
public void setDateInFileName( boolean dateInFileName ) {
this.dateInFileName = dateInFileName;
}
public boolean isTimeInFileName() {
return timeInFileName;
}
public void setTimeInFileName( boolean timeInFileName ) {
this.timeInFileName = timeInFileName;
}
public String getDateTimeFormat() {
return dateTimeFormat;
}
public void setDateTimeFormat( String dateTimeFormat ) {
this.dateTimeFormat = dateTimeFormat;
}
@Override
public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException {
readData( stepnode, metaStore );
}
private void readData( Node stepnode, IMetaStore metastore ) throws KettleXMLException {
try {
filename = XMLHandler.getTagValue( stepnode, "filename" );
// Since we had override set to true in the previous release by default, we need to ensure that if the flag is
// missing in the transformation xml, we set the override flag to true
String override = XMLHandler.getTagValue( stepnode, FieldNames.OVERRIDE_OUTPUT );
if ( override != null && override.length() > 0 ) {
overrideOutput = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, FieldNames.OVERRIDE_OUTPUT ) );
} else {
overrideOutput = true;
}
Node fields = XMLHandler.getSubNode( stepnode, "fields" );
int nrfields = XMLHandler.countNodes( fields, "field" );
List<AvroOutputField> avroOutputFields = new ArrayList<>();
for ( int i = 0; i < nrfields; i++ ) {
Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i );
AvroOutputField outputField = new AvroOutputField();
outputField.setFormatFieldName( XMLHandler.getTagValue( fnode, "path" ) );
outputField.setPentahoFieldName( XMLHandler.getTagValue( fnode, "name" ) );
outputField.setFormatType( AvroTypeConverter.convertToAvroType( XMLHandler.getTagValue( fnode, "type" ) ) );
outputField.setPrecision( XMLHandler.getTagValue( fnode, "precision" ) );
outputField.setScale( XMLHandler.getTagValue( fnode, "scale" ) );
outputField.setAllowNull( XMLHandler.getTagValue( fnode, "nullable" ) );
outputField.setDefaultValue( XMLHandler.getTagValue( fnode, "default" ) );
avroOutputFields.add( outputField );
}
this.outputFields = avroOutputFields;
compressionType = XMLHandler.getTagValue( stepnode, FieldNames.COMPRESSION );
dateTimeFormat = XMLHandler.getTagValue( stepnode, FieldNames.DATE_FORMAT );
dateInFileName = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, FieldNames.DATE_IN_FILE_NAME ) );
timeInFileName = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, FieldNames.TIME_IN_FILE_NAME ) );
schemaFilename = XMLHandler.getTagValue( stepnode, FieldNames.SCHEMA_FILENAME );
namespace = XMLHandler.getTagValue( stepnode, FieldNames.NAMESPACE );
docValue = XMLHandler.getTagValue( stepnode, FieldNames.DOC_VALUE );
recordName = XMLHandler.getTagValue( stepnode, FieldNames.RECORD_NAME );
} catch ( Exception e ) {
throw new KettleXMLException( "Unable to load step info from XML", e );
}
}
@Override
public String getXML() {
StringBuffer retval = new StringBuffer( 800 );
final String INDENT = " ";
retval.append( INDENT ).append( XMLHandler.addTagValue( "filename", filename ) );
retval.append( INDENT ).append( XMLHandler.addTagValue( FieldNames.OVERRIDE_OUTPUT, overrideOutput ) );
retval.append( " <fields>" ).append( Const.CR );
for ( int i = 0; i < outputFields.size(); i++ ) {
AvroOutputField field = outputFields.get( i );
if ( field.getPentahoFieldName() != null && field.getPentahoFieldName().length() != 0 ) {
retval.append( " <field>" ).append( Const.CR );
retval.append( " " ).append( XMLHandler.addTagValue( "path", field.getFormatFieldName() ) );
retval.append( " " ).append( XMLHandler.addTagValue( "name", field.getPentahoFieldName() ) );
retval.append( " " ).append( XMLHandler.addTagValue( "type", field.getAvroType().getId() ) );
retval.append( " " ).append( XMLHandler.addTagValue( "precision", field.getPrecision() ) );
retval.append( " " ).append( XMLHandler.addTagValue( "scale", field.getScale() ) );
retval.append( " " ).append( XMLHandler.addTagValue( "nullable", field.getAllowNull() ) );
retval.append( " " ).append( XMLHandler.addTagValue( "default", field.getDefaultValue() ) );
retval.append( " </field>" ).append( Const.CR );
}
}
retval.append( " </fields>" ).append( Const.CR );
retval.append( INDENT ).append( XMLHandler.addTagValue( FieldNames.COMPRESSION, compressionType ) );
retval.append( INDENT ).append( XMLHandler.addTagValue( FieldNames.DATE_FORMAT, dateTimeFormat ) );
retval.append( INDENT ).append( XMLHandler.addTagValue( FieldNames.DATE_IN_FILE_NAME, dateInFileName ) );
retval.append( INDENT ).append( XMLHandler.addTagValue( FieldNames.TIME_IN_FILE_NAME, timeInFileName ) );
retval.append( INDENT ).append( XMLHandler.addTagValue( FieldNames.SCHEMA_FILENAME, schemaFilename ) );
retval.append( INDENT ).append( XMLHandler.addTagValue( FieldNames.NAMESPACE, namespace ) );
retval.append( INDENT ).append( XMLHandler.addTagValue( FieldNames.DOC_VALUE, docValue ) );
retval.append( INDENT ).append( XMLHandler.addTagValue( FieldNames.RECORD_NAME, recordName ) );
return retval.toString();
}
@Override
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases )
throws KettleException {
try {
filename = rep.getStepAttributeString( id_step, "filename" );
// Since we had override set to true in the previous release by default, we need to ensure that if the flag is
// missing in the transformation xml, we set the override flag to true
String override = rep.getStepAttributeString( id_step, FieldNames.OVERRIDE_OUTPUT );
if ( override != null && override.length() > 0 ) {
overrideOutput = rep.getStepAttributeBoolean( id_step, FieldNames.OVERRIDE_OUTPUT );
} else {
overrideOutput = true;
}
// using the "type" column to get the number of field rows because "type" is guaranteed not to be null.
int nrfields = rep.countNrStepAttributes( id_step, "type" );
List<AvroOutputField> avroOutputFields = new ArrayList<>();
for ( int i = 0; i < nrfields; i++ ) {
AvroOutputField outputField = new AvroOutputField();
outputField.setFormatFieldName( rep.getStepAttributeString( id_step, i, "path" ) );
outputField.setPentahoFieldName( rep.getStepAttributeString( id_step, i, "name" ) );
outputField.setFormatType( AvroTypeConverter.convertToAvroType( rep.getStepAttributeString( id_step, i, "type" ) ) );
outputField.setPrecision( rep.getStepAttributeString( id_step, i, "precision" ) );
outputField.setScale( rep.getStepAttributeString( id_step, i, "scale" ) );
outputField.setAllowNull( rep.getStepAttributeString( id_step, i, "nullable" ) );
outputField.setDefaultValue( rep.getStepAttributeString( id_step, i, "default" ) );
avroOutputFields.add( outputField );
}
this.outputFields = avroOutputFields;
compressionType = rep.getStepAttributeString( id_step, FieldNames.COMPRESSION );
dateTimeFormat = rep.getStepAttributeString( id_step, FieldNames.DATE_FORMAT );
dateInFileName = rep.getStepAttributeBoolean( id_step, FieldNames.DATE_IN_FILE_NAME );
timeInFileName = rep.getStepAttributeBoolean( id_step, FieldNames.TIME_IN_FILE_NAME );
schemaFilename = rep.getStepAttributeString( id_step, FieldNames.SCHEMA_FILENAME );
namespace = rep.getStepAttributeString( id_step, FieldNames.NAMESPACE );
docValue = rep.getStepAttributeString( id_step, FieldNames.DOC_VALUE );
recordName = rep.getStepAttributeString( id_step, FieldNames.RECORD_NAME );
} catch ( Exception e ) {
throw new KettleException( "Unexpected error reading step information from the repository", e );
}
}
@Override
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step )
throws KettleException {
try {
rep.saveStepAttribute( id_transformation, id_step, "filename", filename );
rep.saveStepAttribute( id_transformation, id_step, FieldNames.OVERRIDE_OUTPUT, overrideOutput );
for ( int i = 0; i < outputFields.size(); i++ ) {
AvroOutputField field = outputFields.get( i );
rep.saveStepAttribute( id_transformation, id_step, i, "path", field.getFormatFieldName() );
rep.saveStepAttribute( id_transformation, id_step, i, "name", field.getPentahoFieldName() );
rep.saveStepAttribute( id_transformation, id_step, i, "type", field.getAvroType().getId() );
rep.saveStepAttribute( id_transformation, id_step, i, "precision", field.getPrecision() );
rep.saveStepAttribute( id_transformation, id_step, i, "scale", field.getScale() );
rep.saveStepAttribute( id_transformation, id_step, i, "nullable", Boolean.toString( field.getAllowNull() ) );
rep.saveStepAttribute( id_transformation, id_step, i, "default", field.getDefaultValue() );
}
super.saveRep( rep, metaStore, id_transformation, id_step );
rep.saveStepAttribute( id_transformation, id_step, FieldNames.COMPRESSION, compressionType );
rep.saveStepAttribute( id_transformation, id_step, FieldNames.DATE_FORMAT, dateTimeFormat );
rep.saveStepAttribute( id_transformation, id_step, FieldNames.DATE_IN_FILE_NAME, dateInFileName );
rep.saveStepAttribute( id_transformation, id_step, FieldNames.TIME_IN_FILE_NAME, timeInFileName );
rep.saveStepAttribute( id_transformation, id_step, FieldNames.SCHEMA_FILENAME, schemaFilename );
rep.saveStepAttribute( id_transformation, id_step, FieldNames.NAMESPACE, namespace );
rep.saveStepAttribute( id_transformation, id_step, FieldNames.DOC_VALUE, docValue );
rep.saveStepAttribute( id_transformation, id_step, FieldNames.RECORD_NAME, recordName );
} catch ( Exception e ) {
throw new KettleException( "Unable to save step information to the repository for id_step=" + id_step, e );
}
}
@Override
public void resolve() {
if ( filename != null && !filename.isEmpty() ) {
try {
String realFileName = getParentStepMeta().getParentTransMeta().environmentSubstitute( filename );
FileObject fileObject = KettleVFS.getFileObject( realFileName );
if ( AliasedFileObject.isAliasedFile( fileObject ) ) {
filename = ( (AliasedFileObject) fileObject ).getOriginalURIString();
}
} catch ( KettleFileException e ) {
throw new RuntimeException( e );
}
}
if ( schemaFilename != null && !schemaFilename.isEmpty() ) {
try {
String realSchemaFilename = getParentStepMeta().getParentTransMeta().environmentSubstitute( schemaFilename );
FileObject fileObject = KettleVFS.getFileObject( realSchemaFilename );
if ( AliasedFileObject.isAliasedFile( fileObject ) ) {
schemaFilename = ( (AliasedFileObject) fileObject ).getOriginalURIString();
}
} catch ( KettleFileException e ) {
throw new RuntimeException( e );
}
}
}
public String getSchemaFilename() {
return schemaFilename;
}
public void setSchemaFilename( String schemaFilename ) {
this.schemaFilename = schemaFilename;
}
public String getNamespace() {
return namespace;
}
public void setNamespace( String namespace ) {
this.namespace = namespace;
}
public String getRecordName() {
return recordName;
}
public void setRecordName( String recordName ) {
this.recordName = recordName;
}
public String getDocValue() {
return docValue;
}
public void setDocValue( String docValue ) {
this.docValue = docValue;
}
public String getCompressionType() {
return StringUtil.isVariable( compressionType ) ? compressionType : getCompressionType( null ).toString();
}
public void setCompressionType( String value ) {
compressionType = StringUtil.isVariable( value ) ? value : parseFromToString( value, CompressionType.values(), CompressionType.NONE ).name();
}
public CompressionType getCompressionType( VariableSpace vspace ) {
return parseReplace( compressionType, vspace, str -> findCompressionType( str ), CompressionType.NONE );
}
private CompressionType findCompressionType( String str ) {
try {
return CompressionType.valueOf( str );
} catch ( Throwable th ) {
return parseFromToString( str, CompressionType.values(), CompressionType.NONE );
}
}
public String[] getCompressionTypes() {
return getStrings( CompressionType.values() );
}
public static enum CompressionType {
NONE( getMsg( "AvroOutput.CompressionType.NONE" ) ),
DEFLATE( getMsg( "AvroOutput.CompressionType.DEFLATE" ) ),
SNAPPY( getMsg( "AvroOutput.CompressionType.SNAPPY" ) );
private final String name;
private CompressionType( String name ) {
this.name = name;
}
@Override
public String toString() {
return name;
}
}
protected static <T> String[] getStrings( T[] objects ) {
String[] names = new String[objects.length];
int i = 0;
for ( T obj : objects ) {
names[i++] = obj.toString();
}
return names;
}
protected static <T> T parseFromToString( String str, T[] values, T defaultValue ) {
if ( !Utils.isEmpty( str ) ) {
for ( T type : values ) {
if ( str.equalsIgnoreCase( type.toString() ) ) {
return type;
}
}
}
return defaultValue;
}
private <T> T parseReplace( String value, VariableSpace vspace, Function<String, T> parser, T defaultValue ) {
String replaced = vspace != null ? vspace.environmentSubstitute( value ) : value;
if ( !Utils.isEmpty( replaced ) ) {
try {
return parser.apply( replaced );
} catch ( Exception e ) {
// ignored
}
}
return defaultValue;
}
public String constructOutputFilename( String file ) {
int endIndex = file.lastIndexOf( "." );
String name = endIndex > 0 ? file.substring( 0, endIndex ) : file;
String extension = endIndex <= 0 ? "" : file.substring( endIndex, file.length() );
if ( dateTimeFormat != null && !dateTimeFormat.isEmpty() ) {
String dateTimeFormatPattern = getParentStepMeta().getParentTransMeta().environmentSubstitute( dateTimeFormat );
name += new SimpleDateFormat( dateTimeFormatPattern ).format( new Date() );
} else {
if ( dateInFileName ) {
name += '_' + new SimpleDateFormat( "yyyyMMdd" ).format( new Date() );
}
if ( timeInFileName ) {
name += '_' + new SimpleDateFormat( "HHmmss" ).format( new Date() );
}
}
return name + extension;
}
private static String getMsg( String key ) {
return BaseMessages.getString( PKG, key );
}
protected static class FieldNames {
public static final String COMPRESSION = "compression";
public static final String SCHEMA_FILENAME = "schemaFilename";
public static final String OVERRIDE_OUTPUT = "overrideOutput";
public static final String RECORD_NAME = "recordName";
public static final String DOC_VALUE = "docValue";
public static final String NAMESPACE = "namespace";
public static final String DATE_IN_FILE_NAME = "dateInFileName";
public static final String TIME_IN_FILE_NAME = "timeInFileName";
public static final String DATE_FORMAT = "dateTimeFormat";
}
}
| |
/*
*
* * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* * For more information: http://www.orientechnologies.com
*
*/
package com.orientechnologies.orient.core.db.tool;
import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.reflect.InvocationTargetException;
import java.text.ParseException;
import java.util.AbstractList;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.zip.GZIPInputStream;
import com.orientechnologies.common.listener.OProgressListener;
import com.orientechnologies.common.log.OLogManager;
import com.orientechnologies.common.serialization.types.OBinarySerializer;
import com.orientechnologies.orient.core.command.OCommandOutputListener;
import com.orientechnologies.orient.core.db.ODatabase.STATUS;
import com.orientechnologies.orient.core.db.ODatabaseDocumentInternal;
import com.orientechnologies.orient.core.db.document.ODocumentFieldVisitor;
import com.orientechnologies.orient.core.db.document.ODocumentFieldWalker;
import com.orientechnologies.orient.core.db.record.OClassTrigger;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.db.record.ORecordLazyMultiValue;
import com.orientechnologies.orient.core.db.record.ridbag.ORidBag;
import com.orientechnologies.orient.core.exception.OConfigurationException;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.id.ORecordId;
import com.orientechnologies.orient.core.index.OIndex;
import com.orientechnologies.orient.core.index.OIndexDefinition;
import com.orientechnologies.orient.core.index.OIndexManagerProxy;
import com.orientechnologies.orient.core.index.ORuntimeKeyIndexDefinition;
import com.orientechnologies.orient.core.index.OSimpleKeyIndexDefinition;
import com.orientechnologies.orient.core.index.hashindex.local.OMurmurHash3HashFunction;
import com.orientechnologies.orient.core.intent.OIntentMassiveInsert;
import com.orientechnologies.orient.core.metadata.OMetadataDefault;
import com.orientechnologies.orient.core.metadata.function.OFunction;
import com.orientechnologies.orient.core.metadata.schema.OClass;
import com.orientechnologies.orient.core.metadata.schema.OClassImpl;
import com.orientechnologies.orient.core.metadata.schema.OPropertyImpl;
import com.orientechnologies.orient.core.metadata.schema.OSchema;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.metadata.security.ORole;
import com.orientechnologies.orient.core.metadata.security.OSecurityShared;
import com.orientechnologies.orient.core.metadata.security.OUser;
import com.orientechnologies.orient.core.record.ORecord;
import com.orientechnologies.orient.core.record.ORecordInternal;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.record.impl.ODocumentInternal;
import com.orientechnologies.orient.core.serialization.serializer.OJSONReader;
import com.orientechnologies.orient.core.serialization.serializer.OStringSerializerHelper;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.OLinkSerializer;
import com.orientechnologies.orient.core.serialization.serializer.record.string.ORecordSerializerJSON;
import com.orientechnologies.orient.core.sql.OCommandSQL;
import com.orientechnologies.orient.core.storage.OPhysicalPosition;
import com.orientechnologies.orient.core.storage.OStorage;
import com.orientechnologies.orient.core.storage.impl.local.paginated.OLocalPaginatedStorage;
import com.orientechnologies.orient.core.type.tree.OMVRBTreeRIDSet;
import com.orientechnologies.orient.core.type.tree.provider.OMVRBTreeRIDProvider;
import com.orientechnologies.orient.core.version.OVersionFactory;
/**
* Import data from a file into a database.
*
* @author Luca Garulli (l.garulli--at--orientechnologies.com)
*/
public class ODatabaseImport extends ODatabaseImpExpAbstract {
public static final String EXPORT_IMPORT_MAP_NAME = "___exportImportRIDMap";
public static final int IMPORT_RECORD_DUMP_LAP_EVERY_MS = 5000;
private Map<OPropertyImpl, String> linkedClasses = new HashMap<OPropertyImpl, String>();
private Map<OClass, String> superClasses = new HashMap<OClass, String>();
private OJSONReader jsonReader;
private ORecord record;
private boolean schemaImported = false;
private int exporterVersion = -1;
private ORID schemaRecordId;
private ORID indexMgrRecordId;
private boolean deleteRIDMapping = true;
private OIndex<OIdentifiable> exportImportHashTable;
private boolean preserveClusterIDs = true;
private boolean migrateLinks = true;
private boolean merge = false;
private boolean rebuildIndexes = true;
private Set<String> indexesToRebuild = new HashSet<String>();
private interface ValuesConverter<T> {
T convert(T value);
}
private static final class ConvertersFactory {
public static final ConvertersFactory INSTANCE = new ConvertersFactory();
public ValuesConverter getConverter(Object value) {
if (value instanceof Map)
return MapConverter.INSTANCE;
if (value instanceof List)
return ListConverter.INSTANCE;
if (value instanceof Set)
return SetConverter.INSTANCE;
if (value instanceof ORidBag)
return RidBagConverter.INSTANCE;
if (value instanceof OIdentifiable)
return LinkConverter.INSTANCE;
return null;
}
}
private static final class LinksRewriter implements ODocumentFieldVisitor {
@Override
public Object visitField(OType type, OType linkedType, Object value) {
boolean oldAutoConvertValue = false;
if (value instanceof ORecordLazyMultiValue) {
ORecordLazyMultiValue multiValue = (ORecordLazyMultiValue) value;
oldAutoConvertValue = multiValue.isAutoConvertToRecord();
multiValue.setAutoConvertToRecord(false);
}
final ValuesConverter valuesConverter = ConvertersFactory.INSTANCE.getConverter(value);
if (valuesConverter == null)
return value;
final Object newValue = valuesConverter.convert(value);
if (value instanceof ORecordLazyMultiValue) {
ORecordLazyMultiValue multiValue = (ORecordLazyMultiValue) value;
multiValue.setAutoConvertToRecord(oldAutoConvertValue);
}
return newValue;
}
@Override
public boolean goFurther(OType type, OType linkedType, Object value, Object newValue) {
return true;
}
@Override
public boolean goDeeper(OType type, OType linkedType, Object value) {
return true;
}
@Override
public boolean updateMode() {
return true;
}
}
private static abstract class AbstractCollectionConverter<T> implements ValuesConverter<T> {
interface ResultCallback {
void add(Object item);
}
protected boolean convertSingleValue(final Object item, ResultCallback result, boolean updated) {
if (item == null)
return false;
if (item instanceof OIdentifiable) {
final ValuesConverter<OIdentifiable> converter = (ValuesConverter<OIdentifiable>) ConvertersFactory.INSTANCE
.getConverter(item);
final OIdentifiable newValue = converter.convert((OIdentifiable) item);
result.add(newValue);
if (!newValue.equals(item))
updated = true;
} else {
final ValuesConverter valuesConverter = ConvertersFactory.INSTANCE.getConverter(item.getClass());
if (valuesConverter == null)
result.add(item);
else {
final Object newValue = valuesConverter.convert(item);
if (newValue != item)
updated = true;
result.add(newValue);
}
}
return updated;
}
}
private static final class SetConverter extends AbstractCollectionConverter<Set> {
public static final SetConverter INSTANCE = new SetConverter();
@Override
public Set convert(Set value) {
boolean updated = false;
final Set result;
if (value instanceof OMVRBTreeRIDSet) {
OMVRBTreeRIDSet ridSet = new OMVRBTreeRIDSet();
ridSet.setAutoConvertToRecord(false);
result = ridSet;
} else
result = new HashSet();
final ResultCallback callback = new ResultCallback() {
@Override
public void add(Object item) {
result.add(item);
}
};
for (Object item : value)
updated = convertSingleValue(item, callback, updated);
if (updated)
return result;
return value;
}
}
private static final class ListConverter extends AbstractCollectionConverter<List> {
public static final ListConverter INSTANCE = new ListConverter();
@Override
public List convert(List value) {
final List result = new ArrayList();
final ResultCallback callback = new ResultCallback() {
@Override
public void add(Object item) {
result.add(item);
}
};
boolean updated = false;
for (Object item : value)
updated = convertSingleValue(item, callback, updated);
if (updated)
return result;
return value;
}
}
private static final class RidBagConverter extends AbstractCollectionConverter<ORidBag> {
public static final RidBagConverter INSTANCE = new RidBagConverter();
@Override
public ORidBag convert(ORidBag value) {
final ORidBag result = new ORidBag();
boolean updated = false;
final ResultCallback callback = new ResultCallback() {
@Override
public void add(Object item) {
result.add((OIdentifiable) item);
}
};
for (OIdentifiable identifiable : value)
updated = convertSingleValue(identifiable, callback, updated);
if (updated)
return result;
return value;
}
}
private static final class MapConverter extends AbstractCollectionConverter<Map> {
public static final MapConverter INSTANCE = new MapConverter();
@Override
public Map convert(Map value) {
final HashMap result = new HashMap();
boolean updated = false;
final class MapResultCallback implements ResultCallback {
private Object key;
@Override
public void add(Object item) {
result.put(key, item);
}
public void setKey(Object key) {
this.key = key;
}
}
final MapResultCallback callback = new MapResultCallback();
for (Map.Entry entry : (Iterable<Map.Entry>) value.entrySet()) {
callback.setKey(entry.getKey());
updated = convertSingleValue(entry.getValue(), callback, updated);
}
if (updated)
return result;
return value;
}
}
private static final class LinkConverter implements ValuesConverter<OIdentifiable> {
public static final LinkConverter INSTANCE = new LinkConverter();
private OIndex<OIdentifiable> exportImportHashTable;
@Override
public OIdentifiable convert(OIdentifiable value) {
final ORID rid = value.getIdentity();
if (!rid.isPersistent())
return value;
final OIdentifiable newRid = exportImportHashTable.get(rid);
if (newRid == null)
return value;
return newRid.getIdentity();
}
public void setExportImportHashTable(OIndex<OIdentifiable> exportImportHashTable) {
this.exportImportHashTable = exportImportHashTable;
}
}
public ODatabaseImport(final ODatabaseDocumentInternal database, final String iFileName, final OCommandOutputListener iListener)
throws IOException {
super(database, iFileName, iListener);
InputStream inStream;
final BufferedInputStream bf = new BufferedInputStream(new FileInputStream(fileName));
bf.mark(1024);
try {
inStream = new GZIPInputStream(bf, 16384); // 16KB
} catch (Exception e) {
bf.reset();
inStream = bf;
}
OMurmurHash3HashFunction<OIdentifiable> keyHashFunction = new OMurmurHash3HashFunction<OIdentifiable>();
keyHashFunction.setValueSerializer(OLinkSerializer.INSTANCE);
jsonReader = new OJSONReader(new InputStreamReader(inStream));
database.declareIntent(new OIntentMassiveInsert());
}
public ODatabaseImport(final ODatabaseDocumentInternal database, final InputStream iStream, final OCommandOutputListener iListener)
throws IOException {
super(database, "streaming", iListener);
jsonReader = new OJSONReader(new InputStreamReader(iStream));
database.declareIntent(new OIntentMassiveInsert());
}
@Override
public ODatabaseImport setOptions(String iOptions) {
super.setOptions(iOptions);
return this;
}
public ODatabaseImport importDatabase() {
try {
listener.onMessage("\nStarted import of database '" + database.getURL() + "' from " + fileName + "...");
long time = System.currentTimeMillis();
jsonReader.readNext(OJSONReader.BEGIN_OBJECT);
database.setMVCC(false);
database.setValidationEnabled(false);
database.setStatus(STATUS.IMPORTING);
for (OIndex<?> index : database.getMetadata().getIndexManager().getIndexes()) {
if (index.isAutomatic())
indexesToRebuild.add(index.getName().toLowerCase());
}
if (!merge)
removeDefaultNonSecurityClasses();
String tag;
while (jsonReader.hasNext() && jsonReader.lastChar() != '}') {
tag = jsonReader.readString(OJSONReader.FIELD_ASSIGNMENT);
if (tag.equals("info"))
importInfo();
else if (tag.equals("clusters"))
importClusters();
else if (tag.equals("schema"))
importSchema();
else if (tag.equals("records"))
importRecords();
else if (tag.equals("indexes"))
importIndexes();
else if (tag.equals("manualIndexes"))
importManualIndexes();
}
if (rebuildIndexes)
rebuildIndexes();
database.getStorage().synch();
database.setStatus(STATUS.OPEN);
if (isDeleteRIDMapping())
removeExportImportRIDsMap();
listener.onMessage("\n\nDatabase import completed in " + ((System.currentTimeMillis() - time)) + " ms");
} catch (Exception e) {
final StringWriter writer = new StringWriter();
writer.append("Error on database import happened just before line " + jsonReader.getLineNumber() + ", column "
+ jsonReader.getColumnNumber() + "\n");
final PrintWriter printWriter = new PrintWriter(writer);
e.printStackTrace(printWriter);
printWriter.flush();
listener.onMessage(writer.toString());
try {
writer.close();
} catch (IOException e1) {
throw new ODatabaseExportException("Error on importing database '" + database.getName() + "' from file: " + fileName, e1);
}
throw new ODatabaseExportException("Error on importing database '" + database.getName() + "' from file: " + fileName, e);
} finally {
close();
}
return this;
}
public void rebuildIndexes() {
database.getMetadata().getIndexManager().reload();
OIndexManagerProxy indexManager = database.getMetadata().getIndexManager();
listener.onMessage("\nRebuild of stale indexes...");
for (String indexName : indexesToRebuild) {
if (indexManager.getIndex(indexName) == null) {
listener.onMessage("\nIndex " + indexName + " is skipped because it is absent in imported DB.");
continue;
}
listener.onMessage("\nStart rebuild index " + indexName);
database.command(new OCommandSQL("rebuild index " + indexName)).execute();
listener.onMessage("\nRebuild of index " + indexName + " is completed.");
}
listener.onMessage("\nStale indexes were rebuilt...");
}
public ODatabaseImport removeExportImportRIDsMap() {
listener.onMessage("\nDeleting RID Mapping table...");
if (exportImportHashTable != null) {
database.command(new OCommandSQL("drop index " + EXPORT_IMPORT_MAP_NAME));
exportImportHashTable = null;
}
listener.onMessage("OK\n");
return this;
}
public void close() {
database.declareIntent(null);
}
public boolean isMigrateLinks() {
return migrateLinks;
}
public void setMigrateLinks(boolean migrateLinks) {
this.migrateLinks = migrateLinks;
}
public boolean isRebuildIndexes() {
return rebuildIndexes;
}
public void setRebuildIndexes(boolean rebuildIndexes) {
this.rebuildIndexes = rebuildIndexes;
}
public boolean isPreserveClusterIDs() {
return preserveClusterIDs;
}
public void setPreserveClusterIDs(boolean preserveClusterIDs) {
this.preserveClusterIDs = preserveClusterIDs;
}
public boolean isMerge() {
return merge;
}
public void setMerge(boolean merge) {
this.merge = merge;
}
public boolean isDeleteRIDMapping() {
return deleteRIDMapping;
}
public void setDeleteRIDMapping(boolean deleteRIDMapping) {
this.deleteRIDMapping = deleteRIDMapping;
}
@Override
protected void parseSetting(final String option, final List<String> items) {
if (option.equalsIgnoreCase("-deleteRIDMapping"))
deleteRIDMapping = Boolean.parseBoolean(items.get(0));
else if (option.equalsIgnoreCase("-preserveClusterIDs"))
preserveClusterIDs = Boolean.parseBoolean(items.get(0));
else if (option.equalsIgnoreCase("-merge"))
merge = Boolean.parseBoolean(items.get(0));
else if (option.equalsIgnoreCase("-migrateLinks"))
migrateLinks = Boolean.parseBoolean(items.get(0));
else if (option.equalsIgnoreCase("-rebuildIndexes"))
rebuildIndexes = Boolean.parseBoolean(items.get(0));
else
super.parseSetting(option, items);
}
protected void removeDefaultClusters() {
listener.onMessage("\nWARN: Exported database does not support manual index separation."
+ " Manual index cluster will be dropped.");
// In v4 new cluster for manual indexes has been implemented. To keep database consistent we should shift back
// all clusters and recreate cluster for manual indexes in the end.
database.dropCluster(OMetadataDefault.CLUSTER_MANUAL_INDEX_NAME, true);
final OSchema schema = database.getMetadata().getSchema();
if (schema.existsClass(OUser.CLASS_NAME))
schema.dropClass(OUser.CLASS_NAME);
if (schema.existsClass(ORole.CLASS_NAME))
schema.dropClass(ORole.CLASS_NAME);
if (schema.existsClass(OSecurityShared.RESTRICTED_CLASSNAME))
schema.dropClass(OSecurityShared.RESTRICTED_CLASSNAME);
if (schema.existsClass(OFunction.CLASS_NAME))
schema.dropClass(OFunction.CLASS_NAME);
if (schema.existsClass(OMVRBTreeRIDProvider.PERSISTENT_CLASS_NAME))
schema.dropClass(OMVRBTreeRIDProvider.PERSISTENT_CLASS_NAME);
if (schema.existsClass(OClassTrigger.CLASSNAME))
schema.dropClass(OClassTrigger.CLASSNAME);
schema.save();
database.dropCluster(OStorage.CLUSTER_DEFAULT_NAME, true);
database.getStorage().setDefaultClusterId(database.addCluster(OStorage.CLUSTER_DEFAULT_NAME));
// Starting from v4 schema has been moved to internal cluster.
// Create a stub at #2:0 to prevent cluster position shifting.
new ODocument().save(OStorage.CLUSTER_DEFAULT_NAME);
database.getMetadata().getSecurity().create();
}
private void removeDefaultNonSecurityClasses() {
listener.onMessage("\nNon merge mode (-merge=false): removing all default non security classes");
OSchema schema = database.getMetadata().getSchema();
Collection<OClass> classes = schema.getClasses();
final Map<String, OClass> classesToDrop = new HashMap<String, OClass>();
for (OClass dbClass : classes) {
String className = dbClass.getName();
if (!className.equalsIgnoreCase(ORole.CLASS_NAME) && !className.equalsIgnoreCase(OUser.CLASS_NAME)
&& !className.equalsIgnoreCase(OSecurityShared.IDENTITY_CLASSNAME)) {
classesToDrop.put(className, dbClass);
}
}
int removedClasses = 0;
while (!classesToDrop.isEmpty()) {
final AbstractList<String> classesReadyToDrop = new ArrayList<String>();
for (String className : classesToDrop.keySet()) {
boolean isSuperClass = false;
for (OClass dbClass : classesToDrop.values()) {
OClass parentClass = dbClass.getSuperClass();
if (parentClass != null) {
if (className.equalsIgnoreCase(parentClass.getName())) {
isSuperClass = true;
break;
}
}
}
if (!isSuperClass) {
classesReadyToDrop.add(className);
}
}
for (String className : classesReadyToDrop) {
schema.dropClass(className);
classesToDrop.remove(className);
removedClasses++;
listener.onMessage("\n- Class " + className + " was removed.");
}
}
schema.save();
schema.reload();
listener.onMessage("\nRemoved " + removedClasses + " classes.");
}
private void importInfo() throws IOException, ParseException {
listener.onMessage("\nImporting database info...");
jsonReader.readNext(OJSONReader.BEGIN_OBJECT);
while (jsonReader.lastChar() != '}') {
final String fieldName = jsonReader.readString(OJSONReader.FIELD_ASSIGNMENT);
if (fieldName.equals("exporter-version"))
exporterVersion = jsonReader.readInteger(OJSONReader.NEXT_IN_OBJECT);
else if (fieldName.equals("schemaRecordId"))
schemaRecordId = new ORecordId(jsonReader.readString(OJSONReader.NEXT_IN_OBJECT));
else if (fieldName.equals("indexMgrRecordId"))
indexMgrRecordId = new ORecordId(jsonReader.readString(OJSONReader.NEXT_IN_OBJECT));
else
jsonReader.readNext(OJSONReader.NEXT_IN_OBJECT);
}
jsonReader.readNext(OJSONReader.COMMA_SEPARATOR);
if (schemaRecordId == null)
schemaRecordId = new ORecordId(database.getStorage().getConfiguration().schemaRecordId);
if (indexMgrRecordId == null)
indexMgrRecordId = new ORecordId(database.getStorage().getConfiguration().indexMgrRecordId);
listener.onMessage("OK");
}
private void importManualIndexes() throws IOException, ParseException {
listener.onMessage("\nImporting manual index entries...");
ODocument doc = new ODocument();
OIndexManagerProxy indexManager = database.getMetadata().getIndexManager();
// FORCE RELOADING
indexManager.reload();
int n = 0;
do {
jsonReader.readNext(OJSONReader.BEGIN_OBJECT);
jsonReader.readString(OJSONReader.FIELD_ASSIGNMENT);
final String indexName = jsonReader.readString(OJSONReader.NEXT_IN_ARRAY);
if (indexName == null || indexName.length() == 0)
return;
listener.onMessage("\n- Index '" + indexName + "'...");
final OIndex<?> index = database.getMetadata().getIndexManager().getIndex(indexName);
long tot = 0;
jsonReader.readNext(OJSONReader.BEGIN_COLLECTION);
do {
final String value = jsonReader.readString(OJSONReader.NEXT_IN_ARRAY).trim();
if (!value.isEmpty() && !indexName.equalsIgnoreCase(EXPORT_IMPORT_MAP_NAME)) {
doc = (ODocument) ORecordSerializerJSON.INSTANCE.fromString(value, doc, null);
doc.setLazyLoad(false);
final OIdentifiable oldRid = doc.<OIdentifiable> field("rid");
final OIdentifiable newRid;
if (!doc.<Boolean> field("binary")) {
if (exportImportHashTable != null)
newRid = exportImportHashTable.get(oldRid);
else
newRid = oldRid;
index.put(doc.field("key"), newRid != null ? newRid.getIdentity() : oldRid.getIdentity());
} else {
ORuntimeKeyIndexDefinition<?> runtimeKeyIndexDefinition = (ORuntimeKeyIndexDefinition<?>) index.getDefinition();
OBinarySerializer<?> binarySerializer = runtimeKeyIndexDefinition.getSerializer();
if (exportImportHashTable != null)
newRid = exportImportHashTable.get(doc.<OIdentifiable> field("rid")).getIdentity();
else
newRid = doc.<OIdentifiable> field("rid");
index.put(binarySerializer.deserialize(doc.<byte[]> field("key"), 0), newRid != null ? newRid : oldRid);
}
tot++;
}
} while (jsonReader.lastChar() == ',');
if (index != null) {
listener.onMessage("OK (" + tot + " entries)");
n++;
} else
listener.onMessage("ERR, the index wasn't found in configuration");
jsonReader.readNext(OJSONReader.END_OBJECT);
jsonReader.readNext(OJSONReader.NEXT_IN_ARRAY);
} while (jsonReader.lastChar() == ',');
listener.onMessage("\nDone. Imported " + String.format("%,d", n) + " indexes.");
jsonReader.readNext(OJSONReader.NEXT_IN_OBJECT);
}
private void importSchema() throws IOException, ParseException {
listener.onMessage("\nImporting database schema...");
jsonReader.readNext(OJSONReader.BEGIN_OBJECT);
@SuppressWarnings("unused")
int schemaVersion = jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).checkContent("\"version\"")
.readNumber(OJSONReader.ANY_NUMBER, true);
jsonReader.readNext(OJSONReader.COMMA_SEPARATOR);
jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT);
// This can be removed after the M1 expires
if (jsonReader.getValue().equals("\"globalProperties\"")) {
jsonReader.readNext(OJSONReader.BEGIN_COLLECTION);
do {
jsonReader.readNext(OJSONReader.BEGIN_OBJECT);
jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).checkContent("\"name\"");
String name = jsonReader.readString(OJSONReader.NEXT_IN_OBJECT);
jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).checkContent("\"global-id\"");
String id = jsonReader.readString(OJSONReader.NEXT_IN_OBJECT);
jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).checkContent("\"type\"");
String type = jsonReader.readString(OJSONReader.NEXT_IN_OBJECT);
// getDatabase().getMetadata().getSchema().createGlobalProperty(name, OType.valueOf(type), Integer.valueOf(id));
jsonReader.readNext(OJSONReader.NEXT_IN_ARRAY);
} while (jsonReader.lastChar() == ',');
jsonReader.readNext(OJSONReader.COMMA_SEPARATOR);
jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT);
}
jsonReader.checkContent("\"classes\"").readNext(OJSONReader.BEGIN_COLLECTION);
long classImported = 0;
try {
do {
jsonReader.readNext(OJSONReader.BEGIN_OBJECT);
final String className = jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).checkContent("\"name\"")
.readString(OJSONReader.COMMA_SEPARATOR);
String next = jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).getValue();
if (next.equals("\"id\"")) {
// @COMPATIBILITY 1.0rc4 IGNORE THE ID
next = jsonReader.readString(OJSONReader.COMMA_SEPARATOR);
next = jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).getValue();
}
final int classDefClusterId;
if (jsonReader.isContent("\"default-cluster-id\"")) {
next = jsonReader.readString(OJSONReader.NEXT_IN_OBJECT);
classDefClusterId = Integer.parseInt(next);
} else
classDefClusterId = database.getDefaultClusterId();
String classClusterIds = jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).checkContent("\"cluster-ids\"")
.readString(OJSONReader.END_COLLECTION, true).trim();
jsonReader.readNext(OJSONReader.NEXT_IN_OBJECT);
OClassImpl cls = (OClassImpl) database.getMetadata().getSchema().getClass(className);
if (cls != null) {
if (cls.getDefaultClusterId() != classDefClusterId)
cls.setDefaultClusterId(classDefClusterId);
} else
cls = (OClassImpl) database.getMetadata().getSchema().createClass(className, classDefClusterId);
if (classClusterIds != null) {
// REMOVE BRACES
classClusterIds = classClusterIds.substring(1, classClusterIds.length() - 1);
// ASSIGN OTHER CLUSTER IDS
for (int i : OStringSerializerHelper.splitIntArray(classClusterIds)) {
if (i != -1)
cls.addClusterId(i);
}
}
String value;
while (jsonReader.lastChar() == ',') {
jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT);
value = jsonReader.getValue();
if (value.equals("\"strictMode\"")) {
cls.setStrictMode(jsonReader.readBoolean(OJSONReader.NEXT_IN_OBJECT));
} else if (value.equals("\"abstract\"")) {
cls.setAbstract(jsonReader.readBoolean(OJSONReader.NEXT_IN_OBJECT));
} else if (value.equals("\"oversize\"")) {
final String oversize = jsonReader.readString(OJSONReader.NEXT_IN_OBJECT);
cls.setOverSize(Float.parseFloat(oversize));
} else if (value.equals("\"strictMode\"")) {
final String strictMode = jsonReader.readString(OJSONReader.NEXT_IN_OBJECT);
cls.setStrictMode(Boolean.parseBoolean(strictMode));
} else if (value.equals("\"short-name\"")) {
final String shortName = jsonReader.readString(OJSONReader.NEXT_IN_OBJECT);
cls.setShortName(shortName);
} else if (value.equals("\"super-class\"")) {
final String classSuper = jsonReader.readString(OJSONReader.NEXT_IN_OBJECT);
superClasses.put(cls, classSuper);
} else if (value.equals("\"properties\"")) {
// GET PROPERTIES
jsonReader.readNext(OJSONReader.BEGIN_COLLECTION);
while (jsonReader.lastChar() != ']') {
importProperty(cls);
if (jsonReader.lastChar() == '}')
jsonReader.readNext(OJSONReader.NEXT_IN_ARRAY);
}
jsonReader.readNext(OJSONReader.END_OBJECT);
} else if (value.equals("\"cluster-selection\"")) {
// @SINCE 1.7
cls.setClusterSelection(jsonReader.readString(OJSONReader.NEXT_IN_OBJECT));
}
}
classImported++;
jsonReader.readNext(OJSONReader.NEXT_IN_ARRAY);
} while (jsonReader.lastChar() == ',');
// REBUILD ALL THE INHERITANCE
for (Map.Entry<OClass, String> entry : superClasses.entrySet())
entry.getKey().setSuperClass(database.getMetadata().getSchema().getClass(entry.getValue()));
// SET ALL THE LINKED CLASSES
for (Map.Entry<OPropertyImpl, String> entry : linkedClasses.entrySet()) {
entry.getKey().setLinkedClass(database.getMetadata().getSchema().getClass(entry.getValue()));
}
database.getMetadata().getSchema().save();
if (exporterVersion < 11) {
OClass role = database.getMetadata().getSchema().getClass("ORole");
role.dropProperty("rules");
}
listener.onMessage("OK (" + classImported + " classes)");
schemaImported = true;
jsonReader.readNext(OJSONReader.END_OBJECT);
jsonReader.readNext(OJSONReader.COMMA_SEPARATOR);
} catch (Exception e) {
OLogManager.instance().error(this, "Error on importing schema", e);
listener.onMessage("ERROR (" + classImported + " entries): " + e);
}
}
private void importProperty(final OClass iClass) throws IOException, ParseException {
jsonReader.readNext(OJSONReader.NEXT_OBJ_IN_ARRAY);
if (jsonReader.lastChar() == ']')
return;
final String propName = jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).checkContent("\"name\"")
.readString(OJSONReader.COMMA_SEPARATOR);
String next = jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).getValue();
if (next.equals("\"id\"")) {
// @COMPATIBILITY 1.0rc4 IGNORE THE ID
next = jsonReader.readString(OJSONReader.COMMA_SEPARATOR);
next = jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).getValue();
}
next = jsonReader.checkContent("\"type\"").readString(OJSONReader.NEXT_IN_OBJECT);
final OType type = OType.valueOf(next);
String attrib;
String value = null;
String min = null;
String max = null;
String linkedClass = null;
OType linkedType = null;
boolean mandatory = false;
boolean readonly = false;
boolean notNull = false;
String collate = null;
Map<String, String> customFields = null;
while (jsonReader.lastChar() == ',') {
jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT);
attrib = jsonReader.getValue();
if (!attrib.equals("\"customFields\""))
value = jsonReader.readString(OJSONReader.NEXT_IN_OBJECT);
if (attrib.equals("\"min\""))
min = value;
else if (attrib.equals("\"max\""))
max = value;
else if (attrib.equals("\"linked-class\""))
linkedClass = value;
else if (attrib.equals("\"mandatory\""))
mandatory = Boolean.parseBoolean(value);
else if (attrib.equals("\"readonly\""))
readonly = Boolean.parseBoolean(value);
else if (attrib.equals("\"not-null\""))
notNull = Boolean.parseBoolean(value);
else if (attrib.equals("\"linked-type\""))
linkedType = OType.valueOf(value);
else if (attrib.equals("\"collate\""))
collate = value;
else if (attrib.equals("\"customFields\""))
customFields = importCustomFields();
}
OPropertyImpl prop = (OPropertyImpl) iClass.getProperty(propName);
if (prop == null) {
// CREATE IT
prop = (OPropertyImpl) iClass.createProperty(propName, type);
}
prop.setMandatory(mandatory);
prop.setReadonly(readonly);
prop.setNotNull(notNull);
if (min != null)
prop.setMin(min);
if (max != null)
prop.setMax(max);
if (linkedClass != null)
linkedClasses.put(prop, linkedClass);
if (linkedType != null)
prop.setLinkedType(linkedType);
if (collate != null)
prop.setCollate(value);
if (customFields != null) {
for (Map.Entry<String, String> entry : customFields.entrySet()) {
prop.setCustom(entry.getKey(), entry.getValue());
}
}
}
private Map<String, String> importCustomFields() throws ParseException, IOException {
Map<String, String> result = new HashMap<String, String>();
jsonReader.readNext(OJSONReader.BEGIN_OBJECT);
while (jsonReader.lastChar() != '}') {
final String key = jsonReader.readString(OJSONReader.FIELD_ASSIGNMENT);
final String value = jsonReader.readString(OJSONReader.NEXT_IN_OBJECT);
result.put(key, value);
}
jsonReader.readString(OJSONReader.NEXT_IN_OBJECT);
return result;
}
private long importClusters() throws ParseException, IOException {
listener.onMessage("\nImporting clusters...");
long total = 0;
jsonReader.readNext(OJSONReader.BEGIN_COLLECTION);
boolean makeFullCheckPointAfterClusterCreation = false;
if (database.getStorage() instanceof OLocalPaginatedStorage) {
makeFullCheckPointAfterClusterCreation = ((OLocalPaginatedStorage) database.getStorage())
.isMakeFullCheckPointAfterClusterCreate();
((OLocalPaginatedStorage) database.getStorage()).disableFullCheckPointAfterClusterCreate();
}
boolean recreateManualIndex = false;
if (exporterVersion <= 4) {
removeDefaultClusters();
recreateManualIndex = true;
}
final Set<String> indexesToRebuild = new HashSet<String>();
@SuppressWarnings("unused")
ORecordId rid = null;
while (jsonReader.lastChar() != ']') {
jsonReader.readNext(OJSONReader.BEGIN_OBJECT);
String name = jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).checkContent("\"name\"")
.readString(OJSONReader.COMMA_SEPARATOR);
if (name.length() == 0)
name = null;
if (name != null)
// CHECK IF THE CLUSTER IS INCLUDED
if (includeClusters != null) {
if (!includeClusters.contains(name)) {
jsonReader.readNext(OJSONReader.NEXT_IN_ARRAY);
continue;
}
} else if (excludeClusters != null) {
if (excludeClusters.contains(name)) {
jsonReader.readNext(OJSONReader.NEXT_IN_ARRAY);
continue;
}
}
int id;
if (exporterVersion < 9) {
id = jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).checkContent("\"id\"").readInteger(OJSONReader.COMMA_SEPARATOR);
String type = jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).checkContent("\"type\"")
.readString(OJSONReader.NEXT_IN_OBJECT);
} else
id = jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).checkContent("\"id\"").readInteger(OJSONReader.NEXT_IN_OBJECT);
String type;
if (jsonReader.lastChar() == ',')
type = jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).checkContent("\"type\"").readString(OJSONReader.NEXT_IN_OBJECT);
else
type = "PHYSICAL";
if (jsonReader.lastChar() == ',') {
rid = new ORecordId(jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT).checkContent("\"rid\"")
.readString(OJSONReader.NEXT_IN_OBJECT));
} else
rid = null;
listener.onMessage("\n- Creating cluster " + (name != null ? "'" + name + "'" : "NULL") + "...");
int clusterId = name != null ? database.getClusterIdByName(name) : -1;
if (clusterId == -1) {
// CREATE IT
if (!preserveClusterIDs)
clusterId = database.addCluster(name);
else {
clusterId = database.addCluster(name, id, null);
assert clusterId == id;
}
}
if (clusterId != id) {
if (!preserveClusterIDs) {
if (database.countClusterElements(clusterId - 1) == 0) {
listener.onMessage("Found previous version: migrating old clusters...");
database.dropCluster(name, true);
database.addCluster("temp_" + clusterId, null);
clusterId = database.addCluster(name);
} else
throw new OConfigurationException("Imported cluster '" + name + "' has id=" + clusterId
+ " different from the original: " + id + ". To continue the import drop the cluster '"
+ database.getClusterNameById(clusterId - 1) + "' that has " + database.countClusterElements(clusterId - 1)
+ " records");
} else {
database.dropCluster(clusterId, false);
database.addCluster(name, id, null);
}
}
if (name != null
&& !(name.equalsIgnoreCase(OMetadataDefault.CLUSTER_MANUAL_INDEX_NAME)
|| name.equalsIgnoreCase(OMetadataDefault.CLUSTER_INTERNAL_NAME) || name
.equalsIgnoreCase(OMetadataDefault.CLUSTER_INDEX_NAME))) {
if (!merge)
database.command(new OCommandSQL("truncate cluster " + name)).execute();
for (OIndex existingIndex : database.getMetadata().getIndexManager().getIndexes()) {
if (existingIndex.getClusters().contains(name)) {
indexesToRebuild.add(existingIndex.getName());
}
}
}
listener.onMessage("OK, assigned id=" + clusterId);
total++;
jsonReader.readNext(OJSONReader.NEXT_IN_ARRAY);
}
jsonReader.readNext(OJSONReader.COMMA_SEPARATOR);
listener.onMessage("\nRebuilding indexes of truncated clusters ...");
for (final String indexName : indexesToRebuild)
database.getMetadata().getIndexManager().getIndex(indexName).rebuild(new OProgressListener() {
private long last = 0;
@Override
public void onBegin(Object iTask, long iTotal, Object metadata) {
listener.onMessage("\n- Cluster content was updated: rebuilding index '" + indexName + "'...");
}
@Override
public boolean onProgress(Object iTask, long iCounter, float iPercent) {
final long now = System.currentTimeMillis();
if (last == 0)
last = now;
else if (now - last > 1000) {
listener.onMessage(String.format("\nIndex '%s' is rebuilding (%.2f/100)", indexName, iPercent));
last = now;
}
return true;
}
@Override
public void onCompletition(Object iTask, boolean iSucceed) {
listener.onMessage(" Index " + indexName + " was successfully rebuilt.");
}
});
listener.onMessage("\nDone " + indexesToRebuild.size() + " indexes were rebuilt.");
if (recreateManualIndex) {
database.addCluster(OMetadataDefault.CLUSTER_MANUAL_INDEX_NAME);
database.getMetadata().getIndexManager().create();
listener.onMessage("\nManual index cluster was recreated.");
}
listener.onMessage("\nDone. Imported " + total + " clusters");
if (database.load(new ORecordId(database.getStorage().getConfiguration().indexMgrRecordId)) == null) {
ODocument indexDocument = new ODocument();
indexDocument.save(OMetadataDefault.CLUSTER_INTERNAL_NAME);
database.getStorage().getConfiguration().indexMgrRecordId = indexDocument.getIdentity().toString();
database.getStorage().getConfiguration().update();
}
if (database.getStorage() instanceof OLocalPaginatedStorage && makeFullCheckPointAfterClusterCreation)
((OLocalPaginatedStorage) database.getStorage()).enableFullCheckPointAfterClusterCreate();
return total;
}
private long importRecords() throws Exception {
long total = 0;
database.getMetadata().getIndexManager().dropIndex(EXPORT_IMPORT_MAP_NAME);
exportImportHashTable = (OIndex<OIdentifiable>) database
.getMetadata()
.getIndexManager()
.createIndex(EXPORT_IMPORT_MAP_NAME, OClass.INDEX_TYPE.DICTIONARY_HASH_INDEX.toString(),
new OSimpleKeyIndexDefinition(OType.LINK), null, null, null);
jsonReader.readNext(OJSONReader.BEGIN_COLLECTION);
long totalRecords = 0;
listener.onMessage("\n\nImporting records...");
ORID rid;
ORID lastRid = new ORecordId();
final long begin = System.currentTimeMillis();
long lastLapRecords = 0;
long last = begin;
Set<String> involvedClusters = new HashSet<String>();
while (jsonReader.lastChar() != ']') {
rid = importRecord();
if (rid != null) {
++lastLapRecords;
++totalRecords;
if (rid.getClusterId() != lastRid.getClusterId())
involvedClusters.add(database.getClusterNameById(rid.getClusterId()));
final long now = System.currentTimeMillis();
if (now - last > IMPORT_RECORD_DUMP_LAP_EVERY_MS) {
final List<String> sortedClusters = new ArrayList<String>(involvedClusters);
Collections.sort(sortedClusters);
listener.onMessage(String.format(
"\n- Imported %,d records into clusters: %s. Total records imported so far: %,d (%,.2f/sec)", lastLapRecords,
sortedClusters, totalRecords, (float) lastLapRecords * 1000 / (float) IMPORT_RECORD_DUMP_LAP_EVERY_MS));
// RESET LAP COUNTERS
last = now;
lastLapRecords = 0;
involvedClusters.clear();
}
lastRid = rid;
}
record = null;
}
if (migrateLinks)
migrateLinksInImportedDocuments();
listener.onMessage(String.format("\n\nDone. Imported %,d records in %,.2f secs\n", totalRecords,
((float) (System.currentTimeMillis() - begin)) / 1000));
jsonReader.readNext(OJSONReader.COMMA_SEPARATOR);
return total;
}
private ORID importRecord() throws Exception {
String value = jsonReader.readString(OJSONReader.END_OBJECT, true);
// JUMP EMPTY RECORDS
while (!value.isEmpty() && value.charAt(0) != '{') {
value = value.substring(1);
}
record = null;
try {
record = ORecordSerializerJSON.INSTANCE.fromString(value, record, null);
if (schemaImported && record.getIdentity().equals(schemaRecordId)) {
// JUMP THE SCHEMA
return null;
}
// CHECK IF THE CLUSTER IS INCLUDED
if (includeClusters != null) {
if (!includeClusters.contains(database.getClusterNameById(record.getIdentity().getClusterId()))) {
jsonReader.readNext(OJSONReader.NEXT_IN_ARRAY);
return null;
}
} else if (excludeClusters != null) {
if (excludeClusters.contains(database.getClusterNameById(record.getIdentity().getClusterId())))
return null;
}
if (record.getIdentity().getClusterId() == 0 && record.getIdentity().getClusterPosition() == 1)
// JUMP INTERNAL RECORDS
return null;
if (exporterVersion >= 3) {
int oridsId = database.getClusterIdByName(OMVRBTreeRIDProvider.PERSISTENT_CLASS_NAME);
int indexId = database.getClusterIdByName(OMetadataDefault.CLUSTER_INDEX_NAME);
if (record.getIdentity().getClusterId() == indexId || record.getIdentity().getClusterId() == oridsId)
// JUMP INDEX RECORDS
return null;
}
final int manualIndexCluster = database.getClusterIdByName(OMetadataDefault.CLUSTER_MANUAL_INDEX_NAME);
final int internalCluster = database.getClusterIdByName(OMetadataDefault.CLUSTER_INTERNAL_NAME);
final int indexCluster = database.getClusterIdByName(OMetadataDefault.CLUSTER_INDEX_NAME);
if (exporterVersion >= 4) {
if (record.getIdentity().getClusterId() == manualIndexCluster)
// JUMP INDEX RECORDS
return null;
}
if (record.getIdentity().equals(indexMgrRecordId))
return null;
final ORID rid = record.getIdentity();
final int clusterId = rid.getClusterId();
if ((clusterId != manualIndexCluster && clusterId != internalCluster && clusterId != indexCluster)) {
record.getRecordVersion().copyFrom(OVersionFactory.instance().createVersion());
record.setDirty();
ORecordInternal.setIdentity(record, new ORecordId());
if (!preserveRids && record instanceof ODocument && ODocumentInternal.getImmutableSchemaClass(((ODocument) record)) != null)
record.save();
else
record.save(database.getClusterNameById(clusterId));
if (!rid.equals(record.getIdentity()))
// SAVE IT ONLY IF DIFFERENT
exportImportHashTable.put(rid, record.getIdentity());
if (record.getIdentity().equals(new ORecordId(37, 8))) {
record = ORecordSerializerJSON.INSTANCE.fromString(value, record, null);
}
}
} catch (Exception t) {
if (record != null)
OLogManager.instance().error(
this,
"Error importing record " + record.getIdentity() + ". Source line " + jsonReader.getLineNumber() + ", column "
+ jsonReader.getColumnNumber());
else
OLogManager.instance().error(this,
"Error importing record. Source line " + jsonReader.getLineNumber() + ", column " + jsonReader.getColumnNumber());
throw t;
} finally {
jsonReader.readNext(OJSONReader.NEXT_IN_ARRAY);
}
return record.getIdentity();
}
private void importIndexes() throws IOException, ParseException {
listener.onMessage("\n\nImporting indexes ...");
OIndexManagerProxy indexManager = database.getMetadata().getIndexManager();
indexManager.reload();
jsonReader.readNext(OJSONReader.BEGIN_COLLECTION);
int n = 0;
while (jsonReader.lastChar() != ']') {
jsonReader.readNext(OJSONReader.BEGIN_OBJECT);
String blueprintsIndexClass = null;
String indexName = null;
String indexType = null;
Set<String> clustersToIndex = new HashSet<String>();
OIndexDefinition indexDefinition = null;
ODocument metadata = null;
while (jsonReader.lastChar() != '}') {
final String fieldName = jsonReader.readString(OJSONReader.FIELD_ASSIGNMENT);
if (fieldName.equals("name"))
indexName = jsonReader.readString(OJSONReader.NEXT_IN_OBJECT);
else if (fieldName.equals("type"))
indexType = jsonReader.readString(OJSONReader.NEXT_IN_OBJECT);
else if (fieldName.equals("clustersToIndex"))
clustersToIndex = importClustersToIndex();
else if (fieldName.equals("definition")) {
indexDefinition = importIndexDefinition();
jsonReader.readNext(OJSONReader.NEXT_IN_OBJECT);
} else if (fieldName.equals("metadata")) {
String jsonMetadata = jsonReader.readString(OJSONReader.END_OBJECT, true);
metadata = new ODocument().fromJSON(jsonMetadata);
jsonReader.readNext(OJSONReader.NEXT_IN_OBJECT);
} else if (fieldName.equals("blueprintsIndexClass"))
blueprintsIndexClass = jsonReader.readString(OJSONReader.NEXT_IN_OBJECT);
}
if (indexName == null)
throw new IllegalArgumentException("Index name is missing");
jsonReader.readNext(OJSONReader.NEXT_IN_ARRAY);
// drop automatically created indexes
if (!indexName.equalsIgnoreCase(EXPORT_IMPORT_MAP_NAME)) {
listener.onMessage("\n- Index '" + indexName + "'...");
indexManager.dropIndex(indexName);
indexesToRebuild.remove(indexName.toLowerCase());
int[] clusterIdsToIndex = new int[clustersToIndex.size()];
int i = 0;
for (final String clusterName : clustersToIndex) {
clusterIdsToIndex[i] = database.getClusterIdByName(clusterName);
i++;
}
OIndex index = indexManager.createIndex(indexName, indexType, indexDefinition, clusterIdsToIndex, null, metadata);
if (blueprintsIndexClass != null) {
ODocument configuration = index.getConfiguration();
configuration.field("blueprintsIndexClass", blueprintsIndexClass);
indexManager.save();
}
n++;
listener.onMessage("OK");
}
}
listener.onMessage("\nDone. Created " + n + " indexes.");
jsonReader.readNext(OJSONReader.NEXT_IN_OBJECT);
}
private Set<String> importClustersToIndex() throws IOException, ParseException {
final Set<String> clustersToIndex = new HashSet<String>();
jsonReader.readNext(OJSONReader.BEGIN_COLLECTION);
while (jsonReader.lastChar() != ']') {
final String clusterToIndex = jsonReader.readString(OJSONReader.NEXT_IN_ARRAY);
clustersToIndex.add(clusterToIndex);
}
jsonReader.readString(OJSONReader.NEXT_IN_OBJECT);
return clustersToIndex;
}
private OIndexDefinition importIndexDefinition() throws IOException, ParseException {
jsonReader.readString(OJSONReader.BEGIN_OBJECT);
jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT);
final String className = jsonReader.readString(OJSONReader.NEXT_IN_OBJECT);
jsonReader.readNext(OJSONReader.FIELD_ASSIGNMENT);
final String value = jsonReader.readString(OJSONReader.END_OBJECT, true);
final OIndexDefinition indexDefinition;
final ODocument indexDefinitionDoc = (ODocument) ORecordSerializerJSON.INSTANCE.fromString(value, null, null);
try {
final Class<?> indexDefClass = Class.forName(className);
indexDefinition = (OIndexDefinition) indexDefClass.getDeclaredConstructor().newInstance();
indexDefinition.fromStream(indexDefinitionDoc);
} catch (final ClassNotFoundException e) {
throw new IOException("Error during deserialization of index definition", e);
} catch (final NoSuchMethodException e) {
throw new IOException("Error during deserialization of index definition", e);
} catch (final InvocationTargetException e) {
throw new IOException("Error during deserialization of index definition", e);
} catch (final InstantiationException e) {
throw new IOException("Error during deserialization of index definition", e);
} catch (final IllegalAccessException e) {
throw new IOException("Error during deserialization of index definition", e);
}
jsonReader.readNext(OJSONReader.NEXT_IN_OBJECT);
return indexDefinition;
}
private void migrateLinksInImportedDocuments() throws IOException {
listener.onMessage("\n\nStarted migration of links (-migrateLinks=true). Links are going to be updated according to new RIDs:");
final long begin = System.currentTimeMillis();
long last = begin;
long documentsLastLap = 0;
long totalDocuments = 0;
Collection<String> clusterNames = database.getClusterNames();
for (String clusterName : clusterNames) {
if (OMetadataDefault.CLUSTER_INDEX_NAME.equals(clusterName) || OMetadataDefault.CLUSTER_INTERNAL_NAME.equals(clusterName)
|| OMetadataDefault.CLUSTER_MANUAL_INDEX_NAME.equals(clusterName))
continue;
long documents = 0;
String prefix = "";
listener.onMessage("\n- Cluster " + clusterName + "...");
final int clusterId = database.getClusterIdByName(clusterName);
final long clusterRecords = database.countClusterElements(clusterId);
OStorage storage = database.getStorage();
OPhysicalPosition[] positions = storage.ceilingPhysicalPositions(clusterId, new OPhysicalPosition(0));
while (positions.length > 0) {
for (OPhysicalPosition position : positions) {
ORecord record = database.load(new ORecordId(clusterId, position.clusterPosition));
if (record instanceof ODocument) {
ODocument document = (ODocument) record;
rewriteLinksInDocument(document);
documents++;
documentsLastLap++;
totalDocuments++;
final long now = System.currentTimeMillis();
if (now - last > IMPORT_RECORD_DUMP_LAP_EVERY_MS) {
listener.onMessage(String.format("\n--- Migrated %,d of %,d records (%,.2f/sec)", documents, clusterRecords,
(float) documentsLastLap * 1000 / (float) IMPORT_RECORD_DUMP_LAP_EVERY_MS));
// RESET LAP COUNTERS
last = now;
documentsLastLap = 0;
prefix = "\n---";
}
}
}
positions = storage.higherPhysicalPositions(clusterId, positions[positions.length - 1]);
}
listener.onMessage(String.format("%s Completed migration of %,d records in current cluster", prefix, documents));
}
listener.onMessage(String.format("\nTotal links updated: %,d", totalDocuments));
}
private void rewriteLinksInDocument(ODocument document) {
LinkConverter.INSTANCE.setExportImportHashTable(exportImportHashTable);
final LinksRewriter rewriter = new LinksRewriter();
final ODocumentFieldWalker documentFieldWalker = new ODocumentFieldWalker();
documentFieldWalker.walkDocument(document, rewriter);
document.save();
}
}
| |
package com.ggstudios.views;
import android.content.Context;
import android.graphics.Rect;
import android.os.Bundle;
import android.support.v4.view.ViewPager;
import android.support.v4.view.ViewPager.OnPageChangeListener;
import android.util.AttributeSet;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.ggstudios.lolcraft.R;
public class TabIndicator extends ViewGroup implements OnPageChangeListener {
private ViewPager pager;
private TabAdapter adapter;
private Context context;
private OnPageChangeListener onPageChangeListener;
private int unselectedColor;
private int selectedColor;
public TabIndicator(Context context) {
super(context);
}
public TabIndicator(Context context, AttributeSet attrs) {
super(context, attrs);
}
public TabIndicator(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
@Override
protected void onFinishInflate() {
initialize();
unselectedColor = context.getResources().getColor(R.color.lightgrey50);
selectedColor = context.getResources().getColor(R.color.white80);
}
private void initialize() {
context = getContext();
}
public void setAdapter(final ViewPager pager) {
this.pager = pager;
this.adapter = (TabAdapter) pager.getAdapter();
pager.setOnPageChangeListener(this);
removeAllViews();
if (adapter != null) {
Rect bounds = new Rect();
float totalW = 0f;
for (int i = 0; i < adapter.getCount(); i++) {
TabItem item = adapter.getTab(i);
TextView tv = new TextView(context);
tv.setText(item.tabName);
tv.setBackgroundColor(unselectedColor);
tv.setGravity(Gravity.CENTER);
addView(tv);
final int index = i;
tv.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
pager.setCurrentItem(index);
}
});
tv.getPaint().getTextBounds(item.tabName, 0, item.tabName.length(), bounds);
item.stringW = bounds.width();
totalW += item.stringW;
}
for (int i = 0; i < adapter.getCount(); i++) {
TabItem item = adapter.getTab(i);
item.ratioW = item.stringW / totalW;
}
getChildAt(0).setBackgroundColor(selectedColor);
requestLayout();
}
}
@Override
public void onPageScrollStateChanged(int state) {
if (onPageChangeListener != null)
onPageChangeListener.onPageScrollStateChanged(state);
}
@Override
public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {
if (onPageChangeListener != null)
onPageChangeListener.onPageScrolled(position, positionOffset, positionOffsetPixels);
}
@Override
public void onPageSelected(int position) {
if (onPageChangeListener != null)
onPageChangeListener.onPageSelected(position);
for (int i = 0; i < adapter.getCount(); i++) {
getChildAt(i).setBackgroundColor(unselectedColor);
}
getChildAt(position).setBackgroundColor(selectedColor);
invalidate();
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int widthSize = MeasureSpec.getSize(widthMeasureSpec);
measureTabs(widthMeasureSpec, heightMeasureSpec);
int height = 0;
final View v = getChildAt(0);
if (v != null) {
height = v.getMeasuredHeight();
}
setMeasuredDimension(
resolveSize(getPaddingLeft() + widthSize + getPaddingRight(),
widthMeasureSpec),
resolveSize(height + getPaddingTop()
+ getPaddingBottom(), heightMeasureSpec));
}
/**
* Measure our tab text views
*
* @param widthMeasureSpec
* @param heightMeasureSpec
*/
private void measureTabs(int widthMeasureSpec, int heightMeasureSpec) {
if (adapter == null) {
return;
}
int widthSize = MeasureSpec.getSize(widthMeasureSpec);
int heightSize = MeasureSpec.getSize(heightMeasureSpec);
final int count = adapter.getCount();
for (int i = 0; i < count; i++) {
TabItem item = adapter.getTab(i);
LayoutParams layoutParams = (LayoutParams) getChildAt(i)
.getLayoutParams();
final int widthSpec = MeasureSpec.makeMeasureSpec((int) (widthSize * item.ratioW),
MeasureSpec.EXACTLY);
final int heightSpec = MeasureSpec.makeMeasureSpec(
heightSize, MeasureSpec.EXACTLY);
layoutParams.height = heightSize;
getChildAt(i).measure(widthSpec, heightSpec);
}
}
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
if (adapter == null) {
return;
}
final int count = adapter.getCount();
int x = 0;
for (int i = 0; i < count; i++) {
View v = getChildAt(i);
if (i + 1 == count) {
v.layout(x, this.getPaddingTop(), getMeasuredWidth(),
this.getPaddingTop() + v.getMeasuredHeight());
} else {
v.layout(x, this.getPaddingTop(), x
+ v.getMeasuredWidth(),
this.getPaddingTop() + v.getMeasuredHeight());
}
x += v.getMeasuredWidth();
}
}
public void setOnPageChangeListener(OnPageChangeListener listener) {
onPageChangeListener = listener;
}
public interface TabAdapter {
/**
* Return the number swipey tabs. Needs to be aligned with the number of
* items in your {@link PagerAdapter}.
*
* @return
*/
int getCount();
/**
* Build {@link TextView} to diplay as a swipey tab.
*
* @param position the position of the tab
* @param root the root view
* @return
*/
TabItem getTab(int position);
}
public static class TabItem {
private String className;
private Bundle args;
private String tabName;
private int stringW;
private float ratioW;
public TabItem(String tabName, String className) {
this.tabName = tabName;
this.className = className;
}
public TabItem(String tabName, String className, Bundle args) {
this.tabName = tabName;
this.className = className;
this.args = args;
}
public Bundle getArguments() {
return args;
}
public String getClassName() {
return className;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.maven.packaging;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringWriter;
import java.io.Writer;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import org.apache.camel.tooling.model.BaseOptionModel;
import org.apache.camel.tooling.util.ReflectionHelper;
import org.apache.camel.tooling.util.Strings;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
import org.apache.maven.artifact.resolver.filter.ExcludesArtifactFilter;
import org.apache.maven.artifact.versioning.InvalidVersionSpecificationException;
import org.apache.maven.artifact.versioning.VersionRange;
import org.apache.maven.model.Dependency;
import org.apache.maven.model.Exclusion;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.Component;
import org.apache.maven.plugins.annotations.Parameter;
import org.jboss.jandex.AnnotationInstance;
import org.jboss.jandex.AnnotationTarget;
import org.jboss.jandex.AnnotationValue;
import org.jboss.jandex.ClassInfo;
import org.jboss.jandex.DotName;
import org.jboss.jandex.Index;
import org.jboss.jandex.IndexReader;
import static org.apache.camel.tooling.util.ReflectionHelper.doWithMethods;
import static org.apache.camel.tooling.util.Strings.between;
/**
* Abstract class for configurer generator.
*/
public abstract class AbstractGenerateConfigurerMojo extends AbstractGeneratorMojo {
public static final DotName CONFIGURER = DotName.createSimple("org.apache.camel.spi.Configurer");
/**
* Whether to discover configurer classes from classpath by scanning for @Configurer annotations. This requires
* using jandex-maven-plugin.
*/
@Parameter(defaultValue = "true")
protected boolean discoverClasses = true;
@Component
private ArtifactFactory artifactFactory;
private DynamicClassLoader projectClassLoader;
public static class ConfigurerOption extends BaseOptionModel {
public ConfigurerOption(String name, Class type, String getter) {
// we just use name, type
setName(name);
if (byte[].class == type) {
// special for byte array
setJavaType("byte[]");
} else if (long[].class == type) {
// special for long array
setJavaType("long[]");
} else if (type.isArray()) {
// special for array
String arrType = between(type.getName(), "[L", ";") + "[]";
setJavaType(arrType);
} else {
setJavaType(type.getName());
}
setGetterMethod(getter);
}
}
public AbstractGenerateConfigurerMojo() {
}
protected void doExecute(File sourcesOutputDir, File resourcesOutputDir, List<String> classes, boolean testClasspathOnly)
throws MojoExecutionException {
if ("pom".equals(project.getPackaging())) {
return;
}
if (sourcesOutputDir == null) {
sourcesOutputDir = new File(project.getBasedir(), "src/generated/java");
}
if (resourcesOutputDir == null) {
resourcesOutputDir = new File(project.getBasedir(), "src/generated/resources");
}
List<URL> urls = new ArrayList<>();
// need to include project compile dependencies (code similar to camel-maven-plugin)
addRelevantProjectDependenciesToClasspath(urls, testClasspathOnly);
projectClassLoader = DynamicClassLoader.createDynamicClassLoaderFromUrls(urls);
Set<String> set = new LinkedHashSet<>();
Set<String> extendedSet = new LinkedHashSet<>();
Set<String> bootstrapSet = new LinkedHashSet<>();
Set<String> bootstrapAndExtendedSet = new LinkedHashSet<>();
if (discoverClasses) {
Path output = Paths.get(project.getBuild().getOutputDirectory());
Index index;
try (InputStream is = Files.newInputStream(output.resolve("META-INF/jandex.idx"))) {
index = new IndexReader(is).read();
} catch (IOException e) {
throw new MojoExecutionException("IOException: " + e.getMessage(), e);
}
// discover all classes annotated with @Configurer
List<AnnotationInstance> annotations = index.getAnnotations(CONFIGURER);
annotations.stream()
.filter(annotation -> annotation.target().kind() == AnnotationTarget.Kind.CLASS)
.filter(annotation -> annotation.target().asClass().nestingType() == ClassInfo.NestingType.TOP_LEVEL)
.filter(annotation -> asBooleanDefaultTrue(annotation, "generateConfigurer"))
.forEach(annotation -> {
String currentClass = annotation.target().asClass().name().toString();
boolean bootstrap = asBooleanDefaultFalse(annotation, "bootstrap");
boolean extended = asBooleanDefaultFalse(annotation, "extended");
if (bootstrap && extended) {
bootstrapAndExtendedSet.add(currentClass);
} else if (bootstrap) {
bootstrapSet.add(currentClass);
} else if (extended) {
extendedSet.add(currentClass);
} else {
set.add(currentClass);
}
});
}
// additional classes
if (classes != null && !classes.isEmpty()) {
Path output = Paths.get(project.getBuild().getOutputDirectory());
Index index;
try (InputStream is = Files.newInputStream(output.resolve("META-INF/jandex.idx"))) {
index = new IndexReader(is).read();
} catch (IOException e) {
throw new MojoExecutionException("IOException: " + e.getMessage(), e);
}
for (String clazz : classes) {
ClassInfo ci = index.getClassByName(DotName.createSimple(clazz));
AnnotationInstance ai = ci != null ? ci.classAnnotation(CONFIGURER) : null;
if (ai != null) {
boolean bootstrap = asBooleanDefaultFalse(ai, "bootstrap");
boolean extended = asBooleanDefaultFalse(ai, "extended");
if (bootstrap && extended) {
bootstrapAndExtendedSet.add(clazz);
} else if (bootstrap) {
bootstrapSet.add(clazz);
} else if (extended) {
extendedSet.add(clazz);
} else {
set.add(clazz);
}
} else {
set.add(clazz);
}
}
}
for (String fqn : set) {
try {
String targetFqn = fqn;
int pos = fqn.indexOf('=');
if (pos != -1) {
targetFqn = fqn.substring(pos + 1);
fqn = fqn.substring(0, pos);
}
List<ConfigurerOption> options = processClass(fqn);
generateConfigurer(fqn, targetFqn, options, sourcesOutputDir, false, false);
generateMetaInfConfigurer(fqn, targetFqn, resourcesOutputDir);
} catch (Exception e) {
throw new MojoExecutionException("Error processing class: " + fqn, e);
}
}
for (String fqn : bootstrapSet) {
try {
String targetFqn = fqn;
int pos = fqn.indexOf('=');
if (pos != -1) {
targetFqn = fqn.substring(pos + 1);
fqn = fqn.substring(0, pos);
}
List<ConfigurerOption> options = processClass(fqn);
generateConfigurer(fqn, targetFqn, options, sourcesOutputDir, false, true);
generateMetaInfConfigurer(fqn, targetFqn, resourcesOutputDir);
} catch (Exception e) {
throw new MojoExecutionException("Error processing class: " + fqn, e);
}
}
for (String fqn : extendedSet) {
try {
String targetFqn = fqn;
int pos = fqn.indexOf('=');
if (pos != -1) {
targetFqn = fqn.substring(pos + 1);
fqn = fqn.substring(0, pos);
}
List<ConfigurerOption> options = processClass(fqn);
generateConfigurer(fqn, targetFqn, options, sourcesOutputDir, true, false);
generateMetaInfConfigurer(fqn, targetFqn, resourcesOutputDir);
} catch (Exception e) {
throw new MojoExecutionException("Error processing class: " + fqn, e);
}
}
for (String fqn : bootstrapAndExtendedSet) {
try {
String targetFqn = fqn;
int pos = fqn.indexOf('=');
if (pos != -1) {
targetFqn = fqn.substring(pos + 1);
fqn = fqn.substring(0, pos);
}
List<ConfigurerOption> options = processClass(fqn);
generateConfigurer(fqn, targetFqn, options, sourcesOutputDir, true, true);
generateMetaInfConfigurer(fqn, targetFqn, resourcesOutputDir);
} catch (Exception e) {
throw new MojoExecutionException("Error processing class: " + fqn, e);
}
}
}
/**
* Add any relevant project dependencies to the classpath. Takes includeProjectDependencies into consideration.
*
* @param path classpath of {@link URL} objects
*/
private void addRelevantProjectDependenciesToClasspath(List<URL> path, boolean testClasspathOnly)
throws MojoExecutionException {
try {
getLog().debug("Project Dependencies will be included.");
if (testClasspathOnly) {
URL testClasses = new File(project.getBuild().getTestOutputDirectory()).toURI().toURL();
getLog().debug("Adding to classpath : " + testClasses);
path.add(testClasses);
} else {
URL mainClasses = new File(project.getBuild().getOutputDirectory()).toURI().toURL();
getLog().debug("Adding to classpath : " + mainClasses);
path.add(mainClasses);
}
Set<Artifact> dependencies = project.getArtifacts();
// system scope dependencies are not returned by maven 2.0. See
// MEXEC-17
dependencies.addAll(getAllNonTestScopedDependencies());
Iterator<Artifact> iter = dependencies.iterator();
while (iter.hasNext()) {
Artifact classPathElement = iter.next();
getLog().debug("Adding project dependency artifact: " + classPathElement.getArtifactId()
+ " to classpath");
File file = classPathElement.getFile();
if (file != null) {
path.add(file.toURI().toURL());
}
}
} catch (MalformedURLException e) {
throw new MojoExecutionException("Error during setting up classpath", e);
}
}
private Collection<Artifact> getAllNonTestScopedDependencies() throws MojoExecutionException {
List<Artifact> answer = new ArrayList<>();
for (Artifact artifact : getAllDependencies()) {
// do not add test artifacts
if (!artifact.getScope().equals(Artifact.SCOPE_TEST)) {
answer.add(artifact);
}
}
return answer;
}
// generic method to retrieve all the transitive dependencies
private Collection<Artifact> getAllDependencies() throws MojoExecutionException {
List<Artifact> artifacts = new ArrayList<>();
for (Iterator<?> dependencies = project.getDependencies().iterator(); dependencies.hasNext();) {
Dependency dependency = (Dependency) dependencies.next();
String groupId = dependency.getGroupId();
String artifactId = dependency.getArtifactId();
VersionRange versionRange;
try {
versionRange = VersionRange.createFromVersionSpec(dependency.getVersion());
} catch (InvalidVersionSpecificationException e) {
throw new MojoExecutionException("unable to parse version", e);
}
String type = dependency.getType();
if (type == null) {
type = "jar";
}
String classifier = dependency.getClassifier();
boolean optional = dependency.isOptional();
String scope = dependency.getScope();
if (scope == null) {
scope = Artifact.SCOPE_COMPILE;
}
if (this.artifactFactory != null) {
Artifact art = this.artifactFactory.createDependencyArtifact(groupId, artifactId, versionRange,
type, classifier, scope, null, optional);
if (scope.equalsIgnoreCase(Artifact.SCOPE_SYSTEM)) {
art.setFile(new File(dependency.getSystemPath()));
}
List<String> exclusions = new ArrayList<>();
for (Exclusion exclusion : dependency.getExclusions()) {
exclusions.add(exclusion.getGroupId() + ":" + exclusion.getArtifactId());
}
ArtifactFilter newFilter = new ExcludesArtifactFilter(exclusions);
art.setDependencyFilter(newFilter);
artifacts.add(art);
}
}
return artifacts;
}
private List<ConfigurerOption> processClass(String fqn) throws ClassNotFoundException {
List<ConfigurerOption> answer = new ArrayList<>();
// filter out duplicates by using a names set that has already added
Set<String> names = new HashSet<>();
Class clazz = projectClassLoader.loadClass(fqn);
// find all public setters
doWithMethods(clazz, m -> {
boolean setter = m.getName().length() >= 4 && m.getName().startsWith("set")
&& Character.isUpperCase(m.getName().charAt(3));
setter &= Modifier.isPublic(m.getModifiers()) && m.getParameterCount() == 1;
setter &= filterSetter(m);
if (setter) {
String getter = "get" + Character.toUpperCase(m.getName().charAt(3)) + m.getName().substring(4);
Class type = m.getParameterTypes()[0];
if (boolean.class == type || Boolean.class == type) {
try {
String isGetter = "is" + getter.substring(3);
clazz.getMethod(isGetter, null);
getter = isGetter;
} catch (Exception e) {
// ignore as its then assumed to be get
}
}
ConfigurerOption option = null;
String t = Character.toUpperCase(m.getName().charAt(3)) + m.getName().substring(3 + 1);
if (names.add(t)) {
option = new ConfigurerOption(t, type, getter);
answer.add(option);
} else {
boolean replace = false;
// try to find out what the real type is of the correspondent field so we chose among the clash
Field field = ReflectionHelper.findField(clazz, Character.toLowerCase(t.charAt(0)) + t.substring(1));
if (field != null && field.getType().equals(type)) {
// this is the correct type for the new option
replace = true;
}
if (replace) {
answer.removeIf(o -> o.getName().equals(t));
option = new ConfigurerOption(t, type, getter);
answer.add(option);
}
}
if (option != null) {
String desc = type.isArray() ? type.getComponentType().getName() : m.toGenericString();
if (desc.contains("<") && desc.contains(">")) {
desc = Strings.between(desc, "<", ">");
// if it has additional nested types, then we only want the outer type
int pos = desc.indexOf('<');
if (pos != -1) {
desc = desc.substring(0, pos);
}
// if its a map then it has a key/value, so we only want the last part
pos = desc.indexOf(',');
if (pos != -1) {
desc = desc.substring(pos + 1);
}
desc = desc.replace('$', '.');
desc = desc.trim();
// skip if the type is generic or a wildcard
if (!desc.isEmpty() && desc.indexOf('?') == -1 && !desc.contains(" extends ")) {
option.setNestedType(desc);
}
}
}
}
});
return answer;
}
private boolean filterSetter(Method setter) {
// special for some
if ("setBindingMode".equals(setter.getName())) {
// we only want the string setter
return setter.getParameterTypes()[0] == String.class;
} else if ("setHostNameResolver".equals(setter.getName())) {
// we only want the string setter
return setter.getParameterTypes()[0] == String.class;
}
return true;
}
private void generateConfigurer(
String fqn, String targetFqn, List<ConfigurerOption> options, File outputDir, boolean extended, boolean bootstrap)
throws IOException {
int pos = targetFqn.lastIndexOf('.');
String pn = targetFqn.substring(0, pos);
String cn = targetFqn.substring(pos + 1) + "Configurer";
String en = fqn;
String pfqn = fqn;
String psn = "org.apache.camel.support.component.PropertyConfigurerSupport";
StringWriter sw = new StringWriter();
PropertyConfigurerGenerator.generatePropertyConfigurer(pn, cn, en, pfqn, psn,
false, false, extended, bootstrap, options, null, sw);
String source = sw.toString();
String fileName = pn.replace('.', '/') + "/" + cn + ".java";
outputDir.mkdirs();
boolean updated = updateResource(buildContext, outputDir.toPath().resolve(fileName), source);
if (updated) {
getLog().info("Updated " + fileName);
}
}
private void generateMetaInfConfigurer(String fqn, String targetFqn, File resourcesOutputDir) {
int pos = targetFqn.lastIndexOf('.');
String pn = targetFqn.substring(0, pos);
String en = targetFqn.substring(pos + 1);
try (Writer w = new StringWriter()) {
w.append("# " + GENERATED_MSG + "\n");
w.append("class=").append(pn).append(".").append(en).append("Configurer").append("\n");
String fileName = "META-INF/services/org/apache/camel/configurer/" + fqn;
boolean updated = updateResource(buildContext, resourcesOutputDir.toPath().resolve(fileName), w.toString());
if (updated) {
getLog().info("Updated " + fileName);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private static boolean asBooleanDefaultTrue(AnnotationInstance ai, String name) {
AnnotationValue av = ai.value(name);
return av == null || av.asBoolean();
}
private static boolean asBooleanDefaultFalse(AnnotationInstance ai, String name) {
AnnotationValue av = ai.value(name);
return av != null && av.asBoolean();
}
}
| |
/**
* Licensed to Jasig under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Jasig licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.portlet.calendar.mvc.controller;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Resource;
import javax.portlet.ActionRequest;
import javax.portlet.ActionResponse;
import javax.portlet.PortletPreferences;
import javax.portlet.PortletRequest;
import javax.portlet.PortletSession;
import javax.portlet.RenderRequest;
import javax.portlet.RenderResponse;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jasig.portlet.calendar.CalendarConfiguration;
import org.jasig.portlet.calendar.PredefinedCalendarConfiguration;
import org.jasig.portlet.calendar.PredefinedCalendarDefinition;
import org.jasig.portlet.calendar.UserDefinedCalendarConfiguration;
import org.jasig.portlet.calendar.dao.CalendarStore;
import org.jasig.portlet.calendar.mvc.CalendarPreferencesCommand;
import org.jasig.portlet.calendar.mvc.IViewSelector;
import org.jasig.portlet.calendar.service.SessionSetupInitializationService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Required;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.portlet.ModelAndView;
import org.springframework.web.portlet.bind.annotation.ActionMapping;
/**
* EditCalendarPreferencesController provides the main edit page for the calendars
* portlet. The page allows users to view, add, delete and edit all available
* calendars.
*
* @author Jen Bourey
*/
@Controller
@RequestMapping("EDIT")
public class EditCalendarSubscriptionsController {
private static final String FORM_NAME = "calendarPreferencesCommand";
protected final Log log = LogFactory.getLog(this.getClass());
@RequestMapping
public ModelAndView viewEditOptions(RenderRequest request, RenderResponse response) {
return viewSubscriptions(request, response);
}
@ActionMapping
public void defaultAction(ActionRequest request) {
// default action mapping
}
@RequestMapping(params = "action=editSubscriptions")
public ModelAndView viewSubscriptions(RenderRequest request,
RenderResponse response) {
Map<String, Object> model = new HashMap<String, Object>();
PortletSession session = request.getPortletSession();
// get user information
String subscribeId = (String) session.getAttribute(SessionSetupInitializationService.USERNAME_KEY);
if ("guest".equalsIgnoreCase(subscribeId)) {
model.put("guest", true);
} else {
model.put("guest", false);
}
// See if the timezone is a read-only preference, or not. If so, we
// do not want them to be able to try and edit that value.
PortletPreferences prefs = request.getPreferences();
model.put( "timezoneReadOnly", prefs.isReadOnly( "timezone" ) );
// add the user-defined calendars to the model
List<UserDefinedCalendarConfiguration> mycalendars = calendarStore.getUserDefinedCalendarConfigurations(subscribeId, false);
model.put("mycalendars", mycalendars);
// add the predefined calendars to the model
List<PredefinedCalendarConfiguration> calendars = calendarStore.getPredefinedCalendarConfigurations(subscribeId, false);
model.put("calendars", calendars);
// get the user's role listings
@SuppressWarnings("unchecked")
Set<String> userRoles = (Set<String>) session.getAttribute("userRoles");
// get a list of predefined calendars the user doesn't
// currently have configured
List<PredefinedCalendarDefinition> definitions = calendarStore.getHiddenPredefinedCalendarDefinitions(subscribeId, userRoles);
model.put("hiddencalendars", definitions);
model.put("predefinedEditActions", predefinedEditActions);
// return the edit view
String view = viewSelector.getEditViewName(request);
return new ModelAndView(view, "model", model);
}
@ActionMapping(params = "action=deleteUserCalendar")
public void removeSubscription(ActionRequest request,
ActionResponse response, @RequestParam("configurationId") Long id) {
CalendarConfiguration config = calendarStore.getCalendarConfiguration(id);
calendarStore.deleteCalendarConfiguration(config);
// remove the calendar from the hidden calendars list
PortletSession session = request.getPortletSession();
@SuppressWarnings("unchecked")
Map<Long, String> hidden = (Map<Long, String>) session.getAttribute("hiddenCalendars");
hidden.remove(config.getId());
response.setRenderParameter("action", "editSubscriptions");
}
@ActionMapping(params = "action=showCalendar")
public void showCalendar(ActionRequest request,
ActionResponse response, @RequestParam("configurationId") Long id) {
CalendarConfiguration config = calendarStore.getCalendarConfiguration(id);
config.setDisplayed(true);
calendarStore.storeCalendarConfiguration(config);
// remove the calendar from the hidden calendars list
PortletSession session = request.getPortletSession();
@SuppressWarnings("unchecked")
Map<Long, String> hidden = (Map<Long, String>) session.getAttribute("hiddenCalendars");
hidden.remove(config.getId());
response.setRenderParameter("action", "editSubscriptions");
}
@ActionMapping(params = "action=hideCalendar")
public void hideCalendar(ActionRequest request,
ActionResponse response, @RequestParam("configurationId") Long id) {
CalendarConfiguration config = calendarStore.getCalendarConfiguration(id);
config.setDisplayed(false);
calendarStore.storeCalendarConfiguration(config);
// remove the calendar from the hidden calendars list
PortletSession session = request.getPortletSession();
@SuppressWarnings("unchecked")
Map<Long, String> hidden = (Map<Long, String>) session.getAttribute("hiddenCalendars");
hidden.remove(config.getId());
response.setRenderParameter("action", "editSubscriptions");
}
@ActionMapping(params = "action=addSharedCalendar")
public void addSharedCalendar(ActionRequest request,
ActionResponse response, @RequestParam("definitionId") Long id) {
PortletSession session = request.getPortletSession();
String subscribeId = (String) session.getAttribute(SessionSetupInitializationService.USERNAME_KEY);
PredefinedCalendarDefinition definition = (PredefinedCalendarDefinition) calendarStore.getCalendarDefinition(id);
log.debug("definition to save " + definition.toString());
PredefinedCalendarConfiguration config = new PredefinedCalendarConfiguration();
config.setSubscribeId(subscribeId);
config.setCalendarDefinition(definition);
calendarStore.storeCalendarConfiguration(config);
response.setRenderParameter("action", "editSubscriptions");
}
/**
* Process the preferences update request.
*
* @param request
* @param response
* @param form
* @throws Exception
*/
@ActionMapping(params = "action=editPreferences")
public void updatePreferences(ActionRequest request,
ActionResponse response, @ModelAttribute(FORM_NAME) CalendarPreferencesCommand form)
throws Exception {
PortletPreferences prefs = request.getPreferences();
// If the timezone preference is read only don't try to change it.
// Pluto will throw an exception if you do.
if ( prefs.isReadOnly( "timezone" ) == false ) {
prefs.setValue("timezone", form.getTimezone());
prefs.store();
PortletSession session = request.getPortletSession();
session.setAttribute("timezone", form.getTimezone());
}
// send the user back to the main edit page
response.setRenderParameter("action", "editSubscriptions");
// provide feedback indicating the preferences were saved successfully
response.setRenderParameter("preferencesSaved", "true");
}
/**
* Return the list of available time zone IDs.
*
* @return
*/
@ModelAttribute("timezones")
public List<String> getTimeZones() {
return this.timeZones;
}
/**
* Return a pre-populated preferences form for the current user.
*
* @param request
* @return
* @throws Exception
*/
@ModelAttribute(FORM_NAME)
public CalendarPreferencesCommand getForm(PortletRequest request) throws Exception {
CalendarPreferencesCommand form = new CalendarPreferencesCommand();
PortletPreferences prefs = request.getPreferences();
form.setTimezone(prefs.getValue("timezone", "America/New_York"));
return form;
}
private Map<String, String> predefinedEditActions = new HashMap<String, String>();
@Required
@Resource(name="predefinedEditActions")
public void setPredefinedEditActions(Map<String, String> predefinedEditActions) {
this.predefinedEditActions = predefinedEditActions;
}
private List<String> timeZones = null;
/**
* Set the list of time zone IDs that should be presented as options for
* user time zones.
*
* @param timeZones
*/
@Required
@Resource(name="timeZones")
public void setTimeZones(List<String> timeZones) {
this.timeZones = timeZones;
}
private CalendarStore calendarStore;
@Required
@Resource(name="calendarStore")
public void setCalendarStore(CalendarStore calendarStore) {
this.calendarStore = calendarStore;
}
private IViewSelector viewSelector;
@Autowired(required = true)
public void setViewSelector(IViewSelector viewSelector) {
this.viewSelector = viewSelector;
}
}
| |
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import org.eclipse.core.runtime.IAdaptable;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.gef.EditPart;
import org.eclipse.gef.commands.Command;
import org.eclipse.gmf.runtime.diagram.core.util.ViewUtil;
import org.eclipse.gmf.runtime.diagram.ui.commands.DeferredLayoutCommand;
import org.eclipse.gmf.runtime.diagram.ui.commands.ICommandProxy;
import org.eclipse.gmf.runtime.diagram.ui.commands.SetViewMutabilityCommand;
import org.eclipse.gmf.runtime.diagram.ui.editpolicies.CanonicalEditPolicy;
import org.eclipse.gmf.runtime.diagram.ui.requests.CreateViewRequest;
import org.eclipse.gmf.runtime.emf.core.util.EObjectAdapter;
import org.eclipse.gmf.runtime.notation.Node;
import org.eclipse.gmf.runtime.notation.View;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.APIResourceEndpointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AddressEndPointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AddressingEndpointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.AggregateMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BAMMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BeanMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BuilderMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CacheMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CallMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CallTemplateMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CalloutMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ClassMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloneMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloudConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CloudConnectorOperationEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.CommandMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ConditionalRouterMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DBLookupMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DBReportMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DataMapperMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DefaultEndPointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.DropMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EJBMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EnqueueMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EnrichMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EntitlementMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EventMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FailoverEndPointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FaultMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.FilterMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.HTTPEndpointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.HeaderMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.IterateMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LoadBalanceEndPointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LogMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.LoopBackMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.NamedEndpointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.OAuthMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PayloadFactoryMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.PropertyMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RMSequenceMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RecipientListEndPointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RespondMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RouterMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.RuleMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ScriptMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SendMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SequenceEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SmooksMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SpringMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.StoreMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.SwitchMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.TemplateEndpointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ThrottleMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.TransactionMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.URLRewriteMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.ValidateMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.WSDLEndPointEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.XQueryMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.XSLTMediatorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbDiagramUpdater;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbNodeDescriptor;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbVisualIDRegistry;
/**
* @generated
*/
public class MediatorFlowMediatorFlowCompartment13CanonicalEditPolicy extends CanonicalEditPolicy {
/**
* @generated
*/
protected void refreshOnActivate() {
// Need to activate editpart children before invoking the canonical refresh for EditParts to add event listeners
List<?> c = getHost().getChildren();
for (int i = 0; i < c.size(); i++) {
((EditPart) c.get(i)).activate();
}
super.refreshOnActivate();
}
/**
* @generated
*/
protected EStructuralFeature getFeatureToSynchronize() {
return EsbPackage.eINSTANCE.getMediatorFlow_Children();
}
/**
* @generated
*/
@SuppressWarnings("rawtypes")
protected List getSemanticChildrenList() {
View viewObject = (View) getHost().getModel();
LinkedList<EObject> result = new LinkedList<EObject>();
List<EsbNodeDescriptor> childDescriptors = EsbDiagramUpdater
.getMediatorFlowMediatorFlowCompartment_7036SemanticChildren(viewObject);
for (EsbNodeDescriptor d : childDescriptors) {
result.add(d.getModelElement());
}
return result;
}
/**
* @generated
*/
protected boolean isOrphaned(Collection<EObject> semanticChildren, final View view) {
return isMyDiagramElement(view) && !semanticChildren.contains(view.getElement());
}
/**
* @generated
*/
private boolean isMyDiagramElement(View view) {
int visualID = EsbVisualIDRegistry.getVisualID(view);
switch (visualID) {
case DropMediatorEditPart.VISUAL_ID:
case PropertyMediatorEditPart.VISUAL_ID:
case ThrottleMediatorEditPart.VISUAL_ID:
case FilterMediatorEditPart.VISUAL_ID:
case LogMediatorEditPart.VISUAL_ID:
case EnrichMediatorEditPart.VISUAL_ID:
case XSLTMediatorEditPart.VISUAL_ID:
case SwitchMediatorEditPart.VISUAL_ID:
case SequenceEditPart.VISUAL_ID:
case EventMediatorEditPart.VISUAL_ID:
case EntitlementMediatorEditPart.VISUAL_ID:
case ClassMediatorEditPart.VISUAL_ID:
case SpringMediatorEditPart.VISUAL_ID:
case ScriptMediatorEditPart.VISUAL_ID:
case FaultMediatorEditPart.VISUAL_ID:
case XQueryMediatorEditPart.VISUAL_ID:
case CommandMediatorEditPart.VISUAL_ID:
case DBLookupMediatorEditPart.VISUAL_ID:
case DBReportMediatorEditPart.VISUAL_ID:
case SmooksMediatorEditPart.VISUAL_ID:
case SendMediatorEditPart.VISUAL_ID:
case HeaderMediatorEditPart.VISUAL_ID:
case CloneMediatorEditPart.VISUAL_ID:
case CacheMediatorEditPart.VISUAL_ID:
case IterateMediatorEditPart.VISUAL_ID:
case CalloutMediatorEditPart.VISUAL_ID:
case TransactionMediatorEditPart.VISUAL_ID:
case RMSequenceMediatorEditPart.VISUAL_ID:
case RuleMediatorEditPart.VISUAL_ID:
case OAuthMediatorEditPart.VISUAL_ID:
case AggregateMediatorEditPart.VISUAL_ID:
case StoreMediatorEditPart.VISUAL_ID:
case BuilderMediatorEditPart.VISUAL_ID:
case CallTemplateMediatorEditPart.VISUAL_ID:
case PayloadFactoryMediatorEditPart.VISUAL_ID:
case EnqueueMediatorEditPart.VISUAL_ID:
case URLRewriteMediatorEditPart.VISUAL_ID:
case ValidateMediatorEditPart.VISUAL_ID:
case RouterMediatorEditPart.VISUAL_ID:
case ConditionalRouterMediatorEditPart.VISUAL_ID:
case BAMMediatorEditPart.VISUAL_ID:
case BeanMediatorEditPart.VISUAL_ID:
case EJBMediatorEditPart.VISUAL_ID:
case DefaultEndPointEditPart.VISUAL_ID:
case AddressEndPointEditPart.VISUAL_ID:
case FailoverEndPointEditPart.VISUAL_ID:
case RecipientListEndPointEditPart.VISUAL_ID:
case WSDLEndPointEditPart.VISUAL_ID:
case NamedEndpointEditPart.VISUAL_ID:
case LoadBalanceEndPointEditPart.VISUAL_ID:
case APIResourceEndpointEditPart.VISUAL_ID:
case AddressingEndpointEditPart.VISUAL_ID:
case HTTPEndpointEditPart.VISUAL_ID:
case TemplateEndpointEditPart.VISUAL_ID:
case CloudConnectorEditPart.VISUAL_ID:
case CloudConnectorOperationEditPart.VISUAL_ID:
case LoopBackMediatorEditPart.VISUAL_ID:
case RespondMediatorEditPart.VISUAL_ID:
case CallMediatorEditPart.VISUAL_ID:
case DataMapperMediatorEditPart.VISUAL_ID:
return true;
}
return false;
}
/**
* @generated
*/
protected void refreshSemantic() {
if (resolveSemanticElement() == null) {
return;
}
LinkedList<IAdaptable> createdViews = new LinkedList<IAdaptable>();
List<EsbNodeDescriptor> childDescriptors = EsbDiagramUpdater
.getMediatorFlowMediatorFlowCompartment_7036SemanticChildren((View) getHost()
.getModel());
LinkedList<View> orphaned = new LinkedList<View>();
// we care to check only views we recognize as ours
LinkedList<View> knownViewChildren = new LinkedList<View>();
for (View v : getViewChildren()) {
if (isMyDiagramElement(v)) {
knownViewChildren.add(v);
}
}
// alternative to #cleanCanonicalSemanticChildren(getViewChildren(), semanticChildren)
//
// iteration happens over list of desired semantic elements, trying to find best matching View, while original CEP
// iterates views, potentially losing view (size/bounds) information - i.e. if there are few views to reference same EObject, only last one
// to answer isOrphaned == true will be used for the domain element representation, see #cleanCanonicalSemanticChildren()
for (Iterator<EsbNodeDescriptor> descriptorsIterator = childDescriptors.iterator(); descriptorsIterator
.hasNext();) {
EsbNodeDescriptor next = descriptorsIterator.next();
String hint = EsbVisualIDRegistry.getType(next.getVisualID());
LinkedList<View> perfectMatch = new LinkedList<View>(); // both semanticElement and hint match that of NodeDescriptor
for (View childView : getViewChildren()) {
EObject semanticElement = childView.getElement();
if (next.getModelElement().equals(semanticElement)) {
if (hint.equals(childView.getType())) {
perfectMatch.add(childView);
// actually, can stop iteration over view children here, but
// may want to use not the first view but last one as a 'real' match (the way original CEP does
// with its trick with viewToSemanticMap inside #cleanCanonicalSemanticChildren
}
}
}
if (perfectMatch.size() > 0) {
descriptorsIterator.remove(); // precise match found no need to create anything for the NodeDescriptor
// use only one view (first or last?), keep rest as orphaned for further consideration
knownViewChildren.remove(perfectMatch.getFirst());
}
}
// those left in knownViewChildren are subject to removal - they are our diagram elements we didn't find match to,
// or those we have potential matches to, and thus need to be recreated, preserving size/location information.
orphaned.addAll(knownViewChildren);
//
ArrayList<CreateViewRequest.ViewDescriptor> viewDescriptors = new ArrayList<CreateViewRequest.ViewDescriptor>(
childDescriptors.size());
for (EsbNodeDescriptor next : childDescriptors) {
String hint = EsbVisualIDRegistry.getType(next.getVisualID());
IAdaptable elementAdapter = new CanonicalElementAdapter(next.getModelElement(), hint);
CreateViewRequest.ViewDescriptor descriptor = new CreateViewRequest.ViewDescriptor(
elementAdapter, Node.class, hint, ViewUtil.APPEND, false, host()
.getDiagramPreferencesHint());
viewDescriptors.add(descriptor);
}
boolean changed = deleteViews(orphaned.iterator());
//
CreateViewRequest request = getCreateViewRequest(viewDescriptors);
Command cmd = getCreateViewCommand(request);
if (cmd != null && cmd.canExecute()) {
SetViewMutabilityCommand.makeMutable(new EObjectAdapter(host().getNotationView()))
.execute();
executeCommand(cmd);
@SuppressWarnings("unchecked")
List<IAdaptable> nl = (List<IAdaptable>) request.getNewObject();
createdViews.addAll(nl);
}
if (changed || createdViews.size() > 0) {
postProcessRefreshSemantic(createdViews);
}
if (createdViews.size() > 1) {
// perform a layout of the container
DeferredLayoutCommand layoutCmd = new DeferredLayoutCommand(host().getEditingDomain(),
createdViews, host());
executeCommand(new ICommandProxy(layoutCmd));
}
makeViewsImmutable(createdViews);
}
}
| |
/**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) 2009 frentix GmbH<br>
* http://www.frentix.com<br>
* <p>
*/
package org.olat.presentation.course.editor;
import org.olat.lms.course.nodes.CourseNode;
import org.olat.presentation.framework.core.UserRequest;
import org.olat.presentation.framework.core.components.form.flexible.FormItemContainer;
import org.olat.presentation.framework.core.components.form.flexible.elements.RichTextElement;
import org.olat.presentation.framework.core.components.form.flexible.elements.SingleSelection;
import org.olat.presentation.framework.core.components.form.flexible.elements.TextElement;
import org.olat.presentation.framework.core.components.form.flexible.impl.FormBasicController;
import org.olat.presentation.framework.core.components.form.flexible.impl.FormLayoutContainer;
import org.olat.presentation.framework.core.control.Controller;
import org.olat.presentation.framework.core.control.WindowControl;
import org.olat.system.commons.Formatter;
import org.olat.system.commons.StringHelper;
import org.olat.system.event.Event;
/**
* Provides a FlexiForm that lets the user configure details for a course node.
*
* @author twuersch
*/
public class NodeConfigFormController extends FormBasicController {
/**
* Maximum length of a course's short title.
*/
public final static int SHORT_TITLE_MAX_LENGTH = 25;
private final static String[] displayOptionsKeys = new String[] { CourseNode.DISPLAY_OPTS_TITLE_DESCRIPTION_CONTENT, CourseNode.DISPLAY_OPTS_TITLE_CONTENT,
CourseNode.DISPLAY_OPTS_CONTENT };
private final String menuTitle;
private final String displayTitle;
private final String learningObjectives;
private final String displayOption;
/**
* Input element for this course's short title.
*/
private TextElement shortTitle;
/**
* Input element for this course's title.
*/
private TextElement title;
/**
* Input element for the description of this course's objectives.
*/
private RichTextElement objectives;
/**
* Selection fot the options title
*/
private SingleSelection displayOptions;
/**
* Decides whether to show a <i>cancel</i> button.
*/
private final boolean withCancel;
/**
* Initializes this controller.
*
* @param ureq
* The user request.
* @param wControl
* The window control.
* @param courseNode
* The course node this controller will access.
* @param withCancel
* Decides whether to show a <i>cancel</i> button.
*/
public NodeConfigFormController(final UserRequest ureq, final WindowControl wControl, final CourseNode courseNode, final boolean withCancel) {
super(ureq, wControl, FormBasicController.LAYOUT_DEFAULT);
this.withCancel = withCancel;
menuTitle = Formatter.truncate(courseNode.getShortTitle(), SHORT_TITLE_MAX_LENGTH);
displayTitle = courseNode.getLongTitle();
learningObjectives = courseNode.getLearningObjectives();
displayOption = courseNode.getDisplayOption();
initForm(ureq);
}
/**
*/
@Override
protected void doDispose() {
// Don't dispose anything.
}
/**
*/
@Override
protected void formOK(final UserRequest ureq) {
fireEvent(ureq, Event.DONE_EVENT);
}
/**
*/
@Override
protected void formNOK(final UserRequest ureq) {
fireEvent(ureq, Event.FAILED_EVENT);
}
/**
*/
@Override
protected void formCancelled(final UserRequest ureq) {
fireEvent(ureq, Event.CANCELLED_EVENT);
}
/**
* org.olat.presentation.framework.control.Controller, org.olat.presentation.framework.UserRequest)
*/
@Override
protected void initForm(final FormItemContainer formLayout, final Controller listener, final UserRequest ureq) {
// add the short title text input element
shortTitle = uifactory.addTextElement("nodeConfigForm.menutitle", "nodeConfigForm.menutitle", SHORT_TITLE_MAX_LENGTH, (menuTitle == null ? "" : menuTitle),
formLayout);
shortTitle.setMandatory(true);
// add the title input text element
title = uifactory.addTextElement("nodeConfigForm.displaytitle", "nodeConfigForm.displaytitle", 255, (displayTitle == null ? "" : displayTitle), formLayout);
// add the learning objectives rich text input element
objectives = uifactory.addRichTextElementForStringData("nodeConfigForm.learningobjectives", "nodeConfigForm.learningobjectives", (learningObjectives == null ? ""
: learningObjectives), 10, -1, false, false, null, null, formLayout, ureq.getUserSession(), getWindowControl());
objectives.setMaxLength(4000);
final String[] values = new String[] { translate("nodeConfigForm.title_desc_content"), translate("nodeConfigForm.title_content"),
translate("nodeConfigForm.content_only") };
displayOptions = uifactory.addDropdownSingleselect("displayOptions", "nodeConfigForm.display_options", formLayout, displayOptionsKeys, values, null);
displayOptions.select(displayOption, true);
// Create submit and cancel buttons
final FormLayoutContainer buttonLayout = FormLayoutContainer.createButtonLayout("buttonLayout", getTranslator());
formLayout.add(buttonLayout);
uifactory.addFormSubmitButton("nodeConfigForm.save", buttonLayout);
if (withCancel) {
uifactory.addFormCancelButton("search.form.cancel", buttonLayout, ureq, getWindowControl());
}
}
/**
*/
@Override
protected boolean validateFormLogic(final UserRequest ureq) {
boolean shortTitleOk = true;
if (!StringHelper.containsNonWhitespace(shortTitle.getValue())) {
// the short title is mandatory
shortTitle.setErrorKey("nodeConfigForm.menumust", new String[] {});
shortTitleOk = false;
}
if (shortTitleOk && super.validateFormLogic(ureq)) {
shortTitle.clearError();
return true;
} else {
return false;
}
}
/**
* Get the short title.
*
* @return The short title.
*/
public String getMenuTitle() {
return shortTitle.getValue();
}
/**
* Gets the title.
*
* @return The title.
*/
public String getDisplayTitle() {
return title.getValue();
}
/**
* Gets the description of this course's objectives.
*
* @return The description of this course's objectives.
*/
public String getLearningObjectives() {
return objectives.getValue();
}
/**
* Return the selected option
*
* @return
*/
public String getDisplayOption() {
return displayOptions.getSelectedKey();
}
}
| |
/*
* #%L
* OW2 Chameleon - Fuchsia Framework
* %%
* Copyright (C) 2009 - 2014 OW2 Chameleon
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
/*
Calimero - A library for KNX network access
Copyright (C) 2006-2008 W. Kastner
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package tuwien.auto.calimero.tools;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.util.HashMap;
import java.util.Map;
import tuwien.auto.calimero.CloseEvent;
import tuwien.auto.calimero.DataUnitBuilder;
import tuwien.auto.calimero.FrameEvent;
import tuwien.auto.calimero.Settings;
import tuwien.auto.calimero.exception.KNXException;
import tuwien.auto.calimero.exception.KNXIllegalArgumentException;
import tuwien.auto.calimero.knxnetip.KNXnetIPConnection;
import tuwien.auto.calimero.link.KNXNetworkMonitor;
import tuwien.auto.calimero.link.KNXNetworkMonitorFT12;
import tuwien.auto.calimero.link.KNXNetworkMonitorIP;
import tuwien.auto.calimero.link.event.LinkListener;
import tuwien.auto.calimero.link.event.MonitorFrameEvent;
import tuwien.auto.calimero.link.medium.KNXMediumSettings;
import tuwien.auto.calimero.link.medium.PLSettings;
import tuwien.auto.calimero.link.medium.RFSettings;
import tuwien.auto.calimero.link.medium.RawFrame;
import tuwien.auto.calimero.link.medium.RawFrameBase;
import tuwien.auto.calimero.link.medium.TPSettings;
import tuwien.auto.calimero.log.LogLevel;
import tuwien.auto.calimero.log.LogManager;
import tuwien.auto.calimero.log.LogStreamWriter;
import tuwien.auto.calimero.log.LogWriter;
/**
* A tool for Calimero allowing monitoring of KNX network messages.
* <p>
* NetworkMonitor is a console based tool implementation allowing a user to track KNX
* network messages in a KNX network. It allows monitoring access using a KNXnet/IP
* connection or FT1.2 connection. It shows the necessary interaction with the Calimero
* API for this particular task. To start monitoring invoke the <code>main</code>-method
* of this class. Note that by default the network monitor will run with common settings,
* if not specified otherwise using command line options. Since these settings might be
* system dependent (for example the local host) and not always predictable, a user may
* want to specify particular settings using available option flags.
* <p>
* The main part of this tool implementation interacts with the type
* {@link KNXNetworkMonitor}, which offers monitoring access to a KNX network. All
* monitoring output, as well as occurring problems are written to <code>System.out
* </code>.
* <p>
* To quit a running monitor in the console, use a user interrupt for termination (
* <code>^C</code> for example).
*
* @author B. Malinowsky
*/
public class NetworkMonitor
{
private static final String tool = "NetworkMonitor";
private static final String version = "0.2";
private static final String sep = System.getProperty("line.separator");
private final Map options;
private KNXNetworkMonitor m;
private LogWriter w;
private final class MonitorListener implements LinkListener
{
private MonitorListener()
{}
public void indication(FrameEvent e)
{
final StringBuffer sb = new StringBuffer();
sb.append(e.getFrame().toString());
// since we specified decoding of raw frames in createMonitor(), we
// can get the decoded raw frame here
// but note, that on decoding error null is returned
final RawFrame raw = ((MonitorFrameEvent) e).getRawFrame();
if (raw != null) {
sb.append(": ").append(raw.toString());
if (raw instanceof RawFrameBase) {
final RawFrameBase f = (RawFrameBase) raw;
sb.append(": ").append(
DataUnitBuilder.decode(f.getTPDU(), f.getDestination()));
}
}
System.out.println(sb);
}
public void linkClosed(CloseEvent e)
{}
}
/**
* Creates a new NetworkMonitor instance using the supplied options.
* <p>
* See {@link #main(String[])} for a list of options.
*
* @param args list with options
* @param w a log writer, can be <code>null</code>
* @throws KNXException on instantiation problems
* @throws KNXIllegalArgumentException on unknown/invalid options
*/
public NetworkMonitor(String[] args, LogWriter w) throws KNXException
{
this.w = w;
try {
// read the command line options
options = new HashMap();
if (!parseOptions(args, options))
throw new KNXException("only show usage/version information, abort "
+ tool);
}
catch (final RuntimeException e) {
throw new KNXIllegalArgumentException(e.getMessage());
}
}
/**
* Entry point for running the NetworkMonitor.
* <p>
* An IP host or port identifier has to be supplied, specifying the endpoint for the
* KNX network access.<br>
* To show the usage message of this tool on the console, supply the command line
* option -help (or -h).<br>
* Command line options are treated case sensitive. Available options for network
* monitoring:
* <ul>
* <li><code>-help -h</code> show help message</li>
* <li><code>-version</code> show tool/library version and exit</li>
* <li><code>-verbose -v</code> enable verbose status output</li>
* <li><code>-localhost</code> <i>id</i> local IP/host name</li>
* <li><code>-localport</code> <i>number</i> local UDP port (default system
* assigned)</li>
* <li><code>-port -p</code> <i>number</i> UDP port on host (default 3671)</li>
* <li><code>-nat -n</code> enable Network Address Translation</li>
* <li><code>-serial -s</code> use FT1.2 serial communication</li>
* <li><code>-medium -m</code> <i>id</i> KNX medium [tp0|tp1|p110|p132|rf]
* (defaults to tp1)</li>
* </ul>
*
* @param args command line options for network monitoring
*/
public static void main(String[] args)
{
try {
final NetworkMonitor m = new NetworkMonitor(args, null);
// supply a log writer for System.out (console)
m.w = new ConsoleWriter(m.options.containsKey("verbose"));
m.run(m.new MonitorListener());
}
catch (final Throwable t) {
if (t.getMessage() != null)
System.out.println(t.getMessage());
else
System.out.println(t.getClass().getName());
}
}
/**
* Runs the network monitor.
* <p>
* This method returns when the network monitor is closed.
*
* @param l a link listener for monitor events
* @throws KNXException on problems on creating monitor or during monitoring
*/
public void run(LinkListener l) throws KNXException
{
createMonitor(l);
final Thread sh = registerShutdownHandler();
// TODO actually, this waiting block is just necessary if we're in console mode
// to keep the current thread alive and for clean up
// when invoked externally by a user, an immediate return could save one
// additional thread (with the requirement to call quit for cleanup)
try {
// just wait for the network monitor to quit
synchronized (this) {
while (m.isOpen())
try {
wait();
}
catch (final InterruptedException e) {}
}
}
finally {
Runtime.getRuntime().removeShutdownHook(sh);
}
}
/**
* Quits the network monitor, if running.
* <p>
*/
public void quit()
{
if (m != null && m.isOpen()) {
m.close();
synchronized (this) {
notifyAll();
}
}
}
/**
* Creates a new network monitor using the supplied options.
* <p>
*
* @throws KNXException on problems on monitor creation
*/
private void createMonitor(LinkListener l) throws KNXException
{
final KNXMediumSettings medium = (KNXMediumSettings) options.get("medium");
if (options.containsKey("serial")) {
final String p = (String) options.get("serial");
try {
m = new KNXNetworkMonitorFT12(Integer.parseInt(p), medium);
}
catch (final NumberFormatException e) {
m = new KNXNetworkMonitorFT12(p, medium);
}
}
else {
// create local and remote socket address for monitor link
final InetSocketAddress local = createLocalSocket((InetAddress) options
.get("localhost"), (Integer) options.get("localport"));
final InetSocketAddress host = new InetSocketAddress((InetAddress) options
.get("host"), ((Integer) options.get("port")).intValue());
// create the monitor link, based on the KNXnet/IP protocol
// specify whether network address translation shall be used,
// and tell the physical medium of the KNX network
m = new KNXNetworkMonitorIP(local, host, options.containsKey("nat"), medium);
}
// add the log writer for monitor log events
LogManager.getManager().addWriter(m.getName(), w);
// on console we want to have all possible information, so enable
// decoding of a received raw frame by the monitor link
m.setDecodeRawFrames(true);
// listen to monitor link events
m.addMonitorListener(l);
// we always need a link closed notification (even with user supplied listener)
m.addMonitorListener(new LinkListener() {
public void indication(FrameEvent e)
{}
public void linkClosed(CloseEvent e)
{
System.out.println("network monitor exit, " + e.getReason());
synchronized (NetworkMonitor.this) {
NetworkMonitor.this.notify();
}
}
});
}
private Thread registerShutdownHandler()
{
final class ShutdownHandler extends Thread
{
ShutdownHandler()
{}
public void run()
{
System.out.println("shutdown");
quit();
}
}
final ShutdownHandler sh = new ShutdownHandler();
Runtime.getRuntime().addShutdownHook(sh);
return sh;
}
/**
* Reads all options in the specified array, and puts relevant options into the
* supplied options map.
* <p>
* On options not relevant for doing network monitoring (like <code>help</code>),
* this method will take appropriate action (like showing usage information). On
* occurrence of such an option, other options will be ignored. On unknown options, an
* IllegalArgumentException is thrown.
*
* @param args array with command line options
* @param options map to store options, optionally with its associated value
* @return <code>true</code> if the supplied provide enough information to continue
* with monitoring, <code>false</code> otherwise or if the options were
* handled by this method
*/
private static boolean parseOptions(String[] args, Map options)
{
if (args.length == 0) {
System.out.println("A tool for monitoring a KNX network");
showVersion();
System.out.println("type -help for help message");
return false;
}
// add defaults
options.put("port", new Integer(KNXnetIPConnection.IP_PORT));
options.put("medium", TPSettings.TP1);
int i = 0;
for (; i < args.length; i++) {
final String arg = args[i];
if (isOption(arg, "-help", "-h")) {
showUsage();
return false;
}
if (isOption(arg, "-version", null)) {
showVersion();
return false;
}
if (isOption(arg, "-verbose", "-v"))
options.put("verbose", null);
else if (isOption(arg, "-localhost", null))
parseHost(args[++i], true, options);
else if (isOption(arg, "-localport", null))
options.put("localport", Integer.decode(args[++i]));
else if (isOption(arg, "-port", "-p"))
options.put("port", Integer.decode(args[++i]));
else if (isOption(arg, "-nat", "-n"))
options.put("nat", null);
else if (isOption(arg, "-serial", "-s"))
options.put("serial", null);
else if (isOption(arg, "-medium", "-m"))
options.put("medium", getMedium(args[++i]));
else if (options.containsKey("serial"))
// add port number/identifier to serial option
options.put("serial", arg);
else if (!options.containsKey("host"))
parseHost(arg, false, options);
else
throw new IllegalArgumentException("unknown option " + arg);
}
return true;
}
private static void showUsage()
{
final StringBuffer sb = new StringBuffer();
sb.append("usage: ").append(tool).append(" [options] <host|port>").append(sep);
sb.append("options:").append(sep);
sb.append(" -help -h show this help message").append(sep);
sb.append(" -version show tool/library version and exit").append(
sep);
sb.append(" -verbose -v enable verbose status output").append(sep);
sb.append(" -localhost <id> local IP/host name").append(sep);
sb.append(
" -localport <number> local UDP port (default system " + "assigned)")
.append(sep);
sb.append(" -port -p <number> UDP port on host (default ").append(
KNXnetIPConnection.IP_PORT).append(")").append(sep);
sb.append(" -nat -n enable Network Address Translation").append(
sep);
sb.append(" -serial -s use FT1.2 serial communication").append(sep);
sb.append(
" -medium -m <id> KNX medium [tp0|tp1|p110|p132|rf] "
+ "(default tp1)").append(sep);
System.out.println(sb);
}
//
// utility methods
//
private static void showVersion()
{
System.out.println(tool + " version " + version + " using "
+ Settings.getLibraryHeader(false));
}
/**
* Creates a medium settings type for the supplied medium identifier.
* <p>
*
* @param id a medium identifier from command line option
* @return medium settings object
* @throws KNXIllegalArgumentException on unknown medium identifier
*/
private static KNXMediumSettings getMedium(String id)
{
if (id.equals("tp0"))
return TPSettings.TP0;
else if (id.equals("tp1"))
return TPSettings.TP1;
else if (id.equals("p110"))
return new PLSettings(false);
else if (id.equals("p132"))
return new PLSettings(true);
else if (id.equals("rf"))
return new RFSettings(null);
else
throw new KNXIllegalArgumentException("unknown medium");
}
private static void parseHost(String host, boolean local, Map options)
{
try {
options.put(local ? "localhost" : "host", InetAddress.getByName(host));
}
catch (final UnknownHostException e) {
throw new IllegalArgumentException("failed to read host " + host);
}
}
private static InetSocketAddress createLocalSocket(InetAddress host, Integer port)
{
final int p = port != null ? port.intValue() : 0;
try {
return host != null ? new InetSocketAddress(host, p) : p != 0
? new InetSocketAddress(InetAddress.getLocalHost(), p) : null;
}
catch (final UnknownHostException e) {
throw new IllegalArgumentException("failed to create local host "
+ e.getMessage());
}
}
private static boolean isOption(String arg, String longOpt, String shortOpt)
{
return arg.equals(longOpt) || shortOpt != null && arg.equals(shortOpt);
}
private static final class ConsoleWriter extends LogStreamWriter
{
ConsoleWriter(boolean verbose)
{
super(verbose ? LogLevel.TRACE : LogLevel.WARN, System.out, true);
}
public void close()
{}
}
}
| |
/*
* Copyright (c) 2021 - Manifold Systems LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package manifold.preprocessor.android.syms;
import com.sun.source.tree.CompilationUnitTree;
import com.sun.source.tree.Tree;
import com.sun.tools.javac.tree.JCTree;
import manifold.api.fs.IFile;
import manifold.internal.javac.JavacPlugin;
import manifold.preprocessor.api.SymbolProvider;
import manifold.preprocessor.definitions.Definitions;
import manifold.rt.api.util.StreamUtil;
import manifold.util.JreUtil;
import manifold.util.ManExceptionUtil;
import manifold.util.ReflectUtil;
import manifold.util.concurrent.LocklessLazyVar;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.lang.reflect.Modifier;
import java.util.*;
public class BuildVariantSymbols implements SymbolProvider
{
private final LocklessLazyVar<Map<String, String>> _buildConfigSyms =
LocklessLazyVar.make( () -> loadBuildConfigSymbols() );
@Override
public boolean isDefined( Definitions rootDefinitions, IFile sourceFile, String def )
{
return _buildConfigSyms.get().containsKey( def );
}
@Override
public String getValue( Definitions rootDefinitions, IFile sourceFile, String def )
{
return _buildConfigSyms.get().get( def );
}
private Map<String, String> loadBuildConfigSymbols()
{
String generatedClassesDir = getBuildConfigSourcePath();
if( generatedClassesDir == null )
{
return Collections.emptyMap();
}
File dir = new File( generatedClassesDir );
File buildConfig = findBuildConfig( dir );
if( buildConfig != null )
{
return extractBuildConfigSymbols( buildConfig );
}
return Collections.emptyMap();
}
private Map<String, String> extractBuildConfigSymbols( File buildConfig )
{
Map<String, String> map = new HashMap<>();
try
{
FileReader fileReader = new FileReader( buildConfig );
ArrayList<CompilationUnitTree> trees = new ArrayList<>();
JavacPlugin.instance().getHost().getJavaParser()
.parseText( StreamUtil.getContent( fileReader ), trees, null, null, null );
if( !trees.isEmpty() )
{
CompilationUnitTree tree = trees.get( 0 );
List<? extends Tree> typeDecls = tree.getTypeDecls();
if( typeDecls != null && !typeDecls.isEmpty() )
{
Tree cls = typeDecls.get( 0 );
if( cls instanceof JCTree.JCClassDecl )
{
com.sun.tools.javac.util.List<JCTree> defs = ((JCTree.JCClassDecl)cls).defs;
if( !defs.isEmpty() )
{
for( JCTree def: defs )
{
if( def instanceof JCTree.JCVariableDecl )
{
processConstant( map, (JCTree.JCVariableDecl)def );
}
}
}
}
}
}
}
catch( IOException e )
{
throw ManExceptionUtil.unchecked( e );
}
return map;
}
private void processConstant( Map<String, String> map, JCTree.JCVariableDecl def )
{
JCTree.JCModifiers modifiers = def.getModifiers();
long mods = modifiers == null ? 0 : modifiers.flags;
int psf = Modifier.PUBLIC | Modifier.STATIC | Modifier.FINAL;
if( (mods & psf) == psf )
{
JCTree.JCExpression initializer = def.getInitializer();
if( initializer != null )
{
String value = null;
String init = initializer.toString();
if( init.startsWith( "\"" ) )
{
value = init.substring( 1, init.length()-1 );
}
else
{
try
{
long l = Long.parseLong( init );
value = init;
}
catch( Exception e )
{
try
{
double d = Double.parseDouble( init );
value = init;
}
catch( Exception e2 )
{
// hack to handle DEBUG init, which can be like: Boolean.parseBooean("true")
if( init.contains( "true" ) )
{
// preprocessor definition will be just defined, a "false" value will not be defined
value = "";
}
}
}
}
if( value != null )
{
map.put( def.getName().toString(), value );
}
}
}
}
private String getBuildConfigSourcePath()
{
Set<String> sourcePath = JavacPlugin.instance().deriveJavaSourcePath();
String generatedClassesDir = null;
for( String path: sourcePath )
{
int index = path.lastIndexOf( "/app/src/".replace( '/', File.separatorChar ) );
if( index > 0 )
{
generatedClassesDir = path.substring( 0, index ) + "/app/build/generated/source/buildConfig".replace( '/', File.separatorChar );
String variantPart = getVariantPart();
if( variantPart != null )
{
generatedClassesDir += File.separatorChar + variantPart;
}
break;
}
}
return generatedClassesDir;
}
private String getVariantPart()
{
try
{
String variantPart = null;
if( JreUtil.isJava8() )
{
String[] args = (String[])ReflectUtil.field( JavacPlugin.instance().getJavacTask(), "args" ).get();
boolean found = false;
for( String arg : args )
{
// derive build variant from generated source output path, which has the variant directory
if( arg != null && arg.equalsIgnoreCase( "-s" ) )
{
found = true;
}
else if( found )
{
variantPart = arg;
break;
}
}
}
else // Java 9+
{
// javacTask.args.options.get( "-s" )
Object args = ReflectUtil.field( JavacPlugin.instance().getJavacTask(), "args" ).get();
Object options = ReflectUtil.field( args, "options" ).get();
variantPart = (String)ReflectUtil.method( options, "get", String.class ).invoke( "-s" );
}
if( variantPart == null )
{
return null;
}
String marker = File.separatorChar + "ap_generated_sources" + File.separatorChar;
int index = variantPart.lastIndexOf( marker );
if( index > 0 )
{
// C:\Users\scott\AndroidStudioProjects\MyBasicActivityApplication\app\build\generated\ap_generated_sources\release\out
variantPart = variantPart.substring( index + marker.length() );
int outIndex = variantPart.lastIndexOf( File.separatorChar );
variantPart = variantPart.substring( 0, outIndex );
return variantPart;
}
return null;
}
catch( Exception e )
{
throw new RuntimeException( e );
}
}
private File findBuildConfig( File file )
{
if( file.isFile() )
{
if( file.getName().equals( "BuildConfig.java" ) )
{
return file;
}
return null;
}
else
{
File[] listing = file.listFiles();
if( listing != null )
{
for( File f : listing )
{
File buildConfig = findBuildConfig( f );
if( buildConfig != null )
{
return buildConfig;
}
}
}
}
return null;
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.codeInsight;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Ref;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiPolyVariantReference;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.containers.HashSet;
import com.jetbrains.python.PyNames;
import com.jetbrains.python.psi.*;
import com.jetbrains.python.psi.impl.PyExpressionCodeFragmentImpl;
import com.jetbrains.python.psi.resolve.PyResolveContext;
import com.jetbrains.python.psi.types.*;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static com.intellij.util.containers.ContainerUtil.list;
import static com.jetbrains.python.psi.PyUtil.as;
/**
* @author vlan
*/
public class PyTypingTypeProvider extends PyTypeProviderBase {
public static final Pattern TYPE_COMMENT_PATTERN = Pattern.compile("# *type: *(.*)");
private static ImmutableMap<String, String> COLLECTION_CLASSES = ImmutableMap.<String, String>builder()
.put("typing.List", "list")
.put("typing.Dict", "dict")
.put("typing.Set", PyNames.SET)
.put("typing.FrozenSet", "frozenset")
.put("typing.Tuple", PyNames.TUPLE)
.put("typing.Iterable", PyNames.COLLECTIONS + "." + PyNames.ITERABLE)
.put("typing.Iterator", PyNames.COLLECTIONS + "." + PyNames.ITERATOR)
.put("typing.Container", PyNames.COLLECTIONS + "." + PyNames.CONTAINER)
.put("typing.Sequence", PyNames.COLLECTIONS + "." + PyNames.SEQUENCE)
.put("typing.MutableSequence", PyNames.COLLECTIONS + "." + "MutableSequence")
.put("typing.Mapping", PyNames.COLLECTIONS + "." + PyNames.MAPPING)
.put("typing.MutableMapping", PyNames.COLLECTIONS + "." + "MutableMapping")
.put("typing.AbstractSet", PyNames.COLLECTIONS + "." + "Set")
.put("typing.MutableSet", PyNames.COLLECTIONS + "." + "MutableSet")
.build();
public static ImmutableMap<String, String> TYPING_COLLECTION_CLASSES = ImmutableMap.<String, String>builder()
.put("list", "List")
.put("dict", "Dict")
.put("set", "Set")
.put("frozenset", "FrozenSet")
.build();
private static ImmutableSet<String> GENERIC_CLASSES = ImmutableSet.<String>builder()
.add("typing.Generic")
.add("typing.AbstractGeneric")
.add("typing.Protocol")
.build();
@Nullable
public Ref<PyType> getParameterType(@NotNull PyNamedParameter param, @NotNull PyFunction func, @NotNull TypeEvalContext context) {
final PyAnnotation annotation = param.getAnnotation();
if (annotation != null) {
// XXX: Requires switching from stub to AST
final PyExpression value = annotation.getValue();
if (value != null) {
final PyType type = getType(value, new Context(context));
if (type != null) {
final PyType optionalType = getOptionalTypeFromDefaultNone(param, type, context);
return Ref.create(optionalType != null ? optionalType : type);
}
}
}
final String paramComment = param.getTypeCommentAnnotation();
if (paramComment != null) {
return Ref.create(getStringBasedType(paramComment, param, new Context(context)));
}
final String comment = func.getTypeCommentAnnotation();
if (comment != null) {
final PyTypeParser.ParseResult result = PyTypeParser.parsePep484FunctionTypeComment(param, comment);
final PyCallableType functionType = as(result.getType(), PyCallableType.class);
if (functionType != null) {
final List<PyCallableParameter> paramTypes = functionType.getParameters(context);
// Function annotation of kind (...) -> Type
if (paramTypes == null) {
return Ref.create();
}
final PyParameter[] funcParams = func.getParameterList().getParameters();
final int startOffset = omitFirstParamInTypeComment(func) ? 1 : 0;
for (int paramIndex = 0; paramIndex < funcParams.length; paramIndex++) {
if (funcParams[paramIndex] == param) {
final int typeIndex = paramIndex - startOffset;
if (typeIndex >= 0 && typeIndex < paramTypes.size()) {
return Ref.create(paramTypes.get(typeIndex).getType(context));
}
break;
}
}
}
}
return null;
}
private static boolean omitFirstParamInTypeComment(@NotNull PyFunction func) {
return func.getContainingClass() != null && func.getModifier() != PyFunction.Modifier.STATICMETHOD;
}
@Nullable
@Override
public Ref<PyType> getReturnType(@NotNull PyCallable callable, @NotNull TypeEvalContext context) {
if (callable instanceof PyFunction) {
final PyFunction function = (PyFunction)callable;
final PyAnnotation annotation = function.getAnnotation();
if (annotation != null) {
// XXX: Requires switching from stub to AST
final PyExpression value = annotation.getValue();
if (value != null) {
final PyType type = getType(value, new Context(context));
return type != null ? Ref.create(type) : null;
}
}
final PyType constructorType = getGenericConstructorType(function, new Context(context));
if (constructorType != null) {
return Ref.create(constructorType);
}
final String comment = function.getTypeCommentAnnotation();
if (comment != null) {
final PyTypeParser.ParseResult result = PyTypeParser.parsePep484FunctionTypeComment(callable, comment);
final PyCallableType funcType = as(result.getType(), PyCallableType.class);
if (funcType != null) {
return Ref.create(funcType.getReturnType(context));
}
}
}
return null;
}
@Nullable
@Override
public Ref<PyType> getCallType(@NotNull PyFunction function, @Nullable PyCallSiteExpression callSite, @NotNull TypeEvalContext context) {
if ("typing.cast".equals(function.getQualifiedName())) {
return Optional
.ofNullable(as(callSite, PyCallExpression.class))
.map(PyCallExpression::getArguments)
.filter(args -> args.length > 0)
.map(args -> getType(args[0], new Context(context)))
.map(Ref::create)
.orElse(null);
}
return null;
}
@Override
public PyType getReferenceType(@NotNull PsiElement referenceTarget, TypeEvalContext context, @Nullable PsiElement anchor) {
if (referenceTarget instanceof PyTargetExpression) {
final PyTargetExpression target = (PyTargetExpression)referenceTarget;
final String comment = target.getTypeCommentAnnotation();
if (comment != null) {
final PyType type = getStringBasedType(comment, referenceTarget, new Context(context));
if (type instanceof PyTupleType) {
final PyTupleExpression tupleExpr = PsiTreeUtil.getParentOfType(target, PyTupleExpression.class);
if (tupleExpr != null) {
return PyTypeChecker.getTargetTypeFromTupleAssignment(target, tupleExpr, (PyTupleType)type);
}
}
return type;
}
}
return null;
}
/**
* Checks that text of a comment starts with the "type:" prefix and returns trimmed part afterwards. This trailing part is supposed to
* contain type annotation in PEP 484 compatible format, that can be parsed with either {@link PyTypeParser#parse(PsiElement, String)}
* or {@link PyTypeParser#parsePep484FunctionTypeComment(PsiElement, String)}.
*/
@Nullable
public static String getTypeCommentValue(@NotNull String text) {
final Matcher m = TYPE_COMMENT_PATTERN.matcher(text);
if (m.matches()) {
return m.group(1);
}
return null;
}
private static boolean isAny(@NotNull PyType type) {
return type instanceof PyClassType && "typing.Any".equals(((PyClassType)type).getPyClass().getQualifiedName());
}
@Nullable
private static PyType getOptionalTypeFromDefaultNone(@NotNull PyNamedParameter param,
@NotNull PyType type,
@NotNull TypeEvalContext context) {
final PyExpression defaultValue = param.getDefaultValue();
if (defaultValue != null) {
final PyType defaultType = context.getType(defaultValue);
if (defaultType instanceof PyNoneType) {
return PyUnionType.union(type, defaultType);
}
}
return null;
}
@Nullable
private static PyType getGenericConstructorType(@NotNull PyFunction function, @NotNull Context context) {
if (PyUtil.isInit(function)) {
final PyClass cls = function.getContainingClass();
if (cls != null) {
final List<PyGenericType> genericTypes = collectGenericTypes(cls, context);
final List<PyType> elementTypes = new ArrayList<>(genericTypes);
if (!elementTypes.isEmpty()) {
return new PyCollectionTypeImpl(cls, false, elementTypes);
}
}
}
return null;
}
@NotNull
private static List<PyGenericType> collectGenericTypes(@NotNull PyClass cls, @NotNull Context context) {
boolean isGeneric = false;
for (PyClass ancestor : cls.getAncestorClasses(context.getTypeContext())) {
if (GENERIC_CLASSES.contains(ancestor.getQualifiedName())) {
isGeneric = true;
break;
}
}
if (isGeneric) {
final ArrayList<PyGenericType> results = new ArrayList<>();
// XXX: Requires switching from stub to AST
for (PyExpression expr : cls.getSuperClassExpressions()) {
if (expr instanceof PySubscriptionExpression) {
final PyExpression indexExpr = ((PySubscriptionExpression)expr).getIndexExpression();
if (indexExpr != null) {
for (PsiElement resolved : tryResolving(indexExpr, context.getTypeContext())) {
final PyGenericType genericType = getGenericType(resolved, context);
if (genericType != null) {
results.add(genericType);
}
}
}
}
}
return results;
}
return Collections.emptyList();
}
@Nullable
private static PyType getType(@NotNull PyExpression expression, @NotNull Context context) {
final List<PyType> members = Lists.newArrayList();
for (PsiElement resolved : tryResolving(expression, context.getTypeContext())) {
members.add(getTypeForResolvedElement(resolved, context));
}
return PyUnionType.union(members);
}
@Nullable
private static PyType getTypeForResolvedElement(@NotNull PsiElement resolved, @NotNull Context context) {
if (context.getExpressionCache().contains(resolved)) {
// Recursive types are not yet supported
return null;
}
context.getExpressionCache().add(resolved);
try {
final PyType unionType = getUnionType(resolved, context);
if (unionType != null) {
return unionType;
}
final Ref<PyType> optionalType = getOptionalType(resolved, context);
if (optionalType != null) {
return optionalType.get();
}
final PyType callableType = getCallableType(resolved, context);
if (callableType != null) {
return callableType;
}
final PyType parameterizedType = getParameterizedType(resolved, context);
if (parameterizedType != null) {
return parameterizedType;
}
final PyType builtinCollection = getBuiltinCollection(resolved);
if (builtinCollection != null) {
return builtinCollection;
}
final PyType genericType = getGenericType(resolved, context);
if (genericType != null) {
return genericType;
}
final Ref<PyType> classType = getClassType(resolved, context.getTypeContext());
if (classType != null) {
return classType.get();
}
final PyType stringBasedType = getStringBasedType(resolved, context);
if (stringBasedType != null) {
return stringBasedType;
}
return null;
}
finally {
context.getExpressionCache().remove(resolved);
}
}
@Nullable
public static PyType getType(@NotNull PsiElement resolved, @NotNull List<PyType> elementTypes) {
final String qualifiedName = getQualifiedName(resolved);
final List<Integer> paramListTypePositions = new ArrayList<>();
final List<Integer> ellipsisTypePositions = new ArrayList<>();
for (int i = 0; i < elementTypes.size(); i++) {
final PyType type = elementTypes.get(i);
if (type instanceof PyTypeParser.ParameterListType) {
paramListTypePositions.add(i);
}
else if (type instanceof PyTypeParser.EllipsisType) {
ellipsisTypePositions.add(i);
}
}
if (!paramListTypePositions.isEmpty()) {
if (!("typing.Callable".equals(qualifiedName) && paramListTypePositions.equals(list(0)))) {
return null;
}
}
if (!ellipsisTypePositions.isEmpty()) {
if (!("typing.Callable".equals(qualifiedName) && ellipsisTypePositions.equals(list(0)) ||
"typing.Tuple".equals(qualifiedName) && ellipsisTypePositions.equals(list(1)) && elementTypes.size() == 2)) {
return null;
}
}
if ("typing.Union".equals(qualifiedName)) {
return PyUnionType.union(elementTypes);
}
if ("typing.Optional".equals(qualifiedName) && elementTypes.size() == 1) {
return PyUnionType.union(elementTypes.get(0), PyNoneType.INSTANCE);
}
if ("typing.Callable".equals(qualifiedName) && elementTypes.size() == 2) {
final PyTypeParser.ParameterListType paramList = as(elementTypes.get(0), PyTypeParser.ParameterListType.class);
if (paramList != null) {
return new PyCallableTypeImpl(paramList.getCallableParameters(), elementTypes.get(1));
}
if (elementTypes.get(0) instanceof PyTypeParser.EllipsisType) {
return new PyCallableTypeImpl(null, elementTypes.get(1));
}
}
if ("typing.Tuple".equals(qualifiedName)) {
if (elementTypes.size() > 1 && elementTypes.get(1) instanceof PyTypeParser.EllipsisType) {
return PyTupleType.createHomogeneous(resolved, elementTypes.get(0));
}
return PyTupleType.create(resolved, elementTypes.toArray(new PyType[elementTypes.size()]));
}
final PyType builtinCollection = getBuiltinCollection(resolved);
if (builtinCollection instanceof PyClassType) {
final PyClassType classType = (PyClassType)builtinCollection;
return new PyCollectionTypeImpl(classType.getPyClass(), false, elementTypes);
}
return null;
}
@Nullable
public static PyType getTypeFromTargetExpression(@NotNull PyTargetExpression expression, @NotNull TypeEvalContext context) {
return getTypeFromTargetExpression(expression, new Context(context));
}
@Nullable
private static PyType getTypeFromTargetExpression(@NotNull PyTargetExpression expression,
@NotNull Context context) {
// XXX: Requires switching from stub to AST
final PyExpression assignedValue = expression.findAssignedValue();
return assignedValue != null ? getTypeForResolvedElement(assignedValue, context) : null;
}
@Nullable
private static Ref<PyType> getClassType(@NotNull PsiElement element, @NotNull TypeEvalContext context) {
if (element instanceof PyTypedElement) {
final PyType type = context.getType((PyTypedElement)element);
if (type != null && isAny(type)) {
return Ref.create();
}
if (type instanceof PyClassLikeType) {
final PyClassLikeType classType = (PyClassLikeType)type;
if (classType.isDefinition()) {
final PyType instanceType = classType.toInstance();
return Ref.create(instanceType);
}
}
else if (type instanceof PyNoneType) {
return Ref.create(type);
}
}
return null;
}
@Nullable
private static Ref<PyType> getOptionalType(@NotNull PsiElement element, @NotNull Context context) {
if (element instanceof PySubscriptionExpression) {
final PySubscriptionExpression subscriptionExpr = (PySubscriptionExpression)element;
final PyExpression operand = subscriptionExpr.getOperand();
final Collection<String> operandNames = resolveToQualifiedNames(operand, context.getTypeContext());
if (operandNames.contains("typing.Optional")) {
final PyExpression indexExpr = subscriptionExpr.getIndexExpression();
if (indexExpr != null) {
final PyType type = getType(indexExpr, context);
if (type != null) {
return Ref.create(PyUnionType.union(type, PyNoneType.INSTANCE));
}
}
return Ref.create();
}
}
return null;
}
@Nullable
private static PyType getStringBasedType(@NotNull PsiElement element, @NotNull Context context) {
if (element instanceof PyStringLiteralExpression) {
// XXX: Requires switching from stub to AST
final String contents = ((PyStringLiteralExpression)element).getStringValue();
return getStringBasedType(contents, element, context);
}
return null;
}
@Nullable
private static PyType getStringBasedType(@NotNull String contents, @NotNull PsiElement anchor, @NotNull Context context) {
final Project project = anchor.getProject();
final PyExpressionCodeFragmentImpl codeFragment = new PyExpressionCodeFragmentImpl(project, "dummy.py", contents, false);
codeFragment.setContext(anchor.getContainingFile());
final PsiElement element = codeFragment.getFirstChild();
if (element instanceof PyExpressionStatement) {
final PyExpression expr = ((PyExpressionStatement)element).getExpression();
if (expr instanceof PyTupleExpression) {
final PyTupleExpression tupleExpr = (PyTupleExpression)expr;
final List<PyType> elementTypes = new ArrayList<>();
for (PyExpression elementExpr : tupleExpr.getElements()) {
elementTypes.add(getType(elementExpr, context));
}
return PyTupleType.create(anchor, elementTypes.toArray(new PyType[elementTypes.size()]));
}
return getType(expr, context);
}
return null;
}
@Nullable
private static PyType getCallableType(@NotNull PsiElement resolved, @NotNull Context context) {
if (resolved instanceof PySubscriptionExpression) {
final PySubscriptionExpression subscriptionExpr = (PySubscriptionExpression)resolved;
final PyExpression operand = subscriptionExpr.getOperand();
final Collection<String> operandNames = resolveToQualifiedNames(operand, context.getTypeContext());
if (operandNames.contains("typing.Callable")) {
final PyExpression indexExpr = subscriptionExpr.getIndexExpression();
if (indexExpr instanceof PyTupleExpression) {
final PyTupleExpression tupleExpr = (PyTupleExpression)indexExpr;
final PyExpression[] elements = tupleExpr.getElements();
if (elements.length == 2) {
final PyExpression parametersExpr = elements[0];
final PyExpression returnTypeExpr = elements[1];
if (parametersExpr instanceof PyListLiteralExpression) {
final List<PyCallableParameter> parameters = new ArrayList<>();
final PyListLiteralExpression listExpr = (PyListLiteralExpression)parametersExpr;
for (PyExpression argExpr : listExpr.getElements()) {
parameters.add(new PyCallableParameterImpl(null, getType(argExpr, context)));
}
final PyType returnType = getType(returnTypeExpr, context);
return new PyCallableTypeImpl(parameters, returnType);
}
if (isEllipsis(parametersExpr)) {
return new PyCallableTypeImpl(null, getType(returnTypeExpr, context));
}
}
}
}
}
return null;
}
private static boolean isEllipsis(@NotNull PyExpression parametersExpr) {
return parametersExpr instanceof PyNoneLiteralExpression && ((PyNoneLiteralExpression)parametersExpr).isEllipsis();
}
@Nullable
private static PyType getUnionType(@NotNull PsiElement element, @NotNull Context context) {
if (element instanceof PySubscriptionExpression) {
final PySubscriptionExpression subscriptionExpr = (PySubscriptionExpression)element;
final PyExpression operand = subscriptionExpr.getOperand();
final Collection<String> operandNames = resolveToQualifiedNames(operand, context.getTypeContext());
if (operandNames.contains("typing.Union")) {
return PyUnionType.union(getIndexTypes(subscriptionExpr, context));
}
}
return null;
}
@Nullable
private static PyGenericType getGenericType(@NotNull PsiElement element, @NotNull Context context) {
if (element instanceof PyCallExpression) {
final PyCallExpression assignedCall = (PyCallExpression)element;
final PyExpression callee = assignedCall.getCallee();
if (callee != null) {
final Collection<String> calleeQNames = resolveToQualifiedNames(callee, context.getTypeContext());
if (calleeQNames.contains("typing.TypeVar")) {
final PyExpression[] arguments = assignedCall.getArguments();
if (arguments.length > 0) {
final PyExpression firstArgument = arguments[0];
if (firstArgument instanceof PyStringLiteralExpression) {
final String name = ((PyStringLiteralExpression)firstArgument).getStringValue();
if (name != null) {
return new PyGenericType(name, getGenericTypeBound(arguments, context));
}
}
}
}
}
}
return null;
}
@Nullable
private static PyType getGenericTypeBound(@NotNull PyExpression[] typeVarArguments, @NotNull Context context) {
final List<PyType> types = new ArrayList<>();
for (int i = 1; i < typeVarArguments.length; i++) {
types.add(getType(typeVarArguments[i], context));
}
return PyUnionType.union(types);
}
@NotNull
private static List<PyType> getIndexTypes(@NotNull PySubscriptionExpression expression, @NotNull Context context) {
final List<PyType> types = new ArrayList<>();
final PyExpression indexExpr = expression.getIndexExpression();
if (indexExpr instanceof PyTupleExpression) {
final PyTupleExpression tupleExpr = (PyTupleExpression)indexExpr;
for (PyExpression expr : tupleExpr.getElements()) {
types.add(getType(expr, context));
}
}
else if (indexExpr != null) {
types.add(getType(indexExpr, context));
}
return types;
}
@Nullable
private static PyType getParameterizedType(@NotNull PsiElement element, @NotNull Context context) {
if (element instanceof PySubscriptionExpression) {
final PySubscriptionExpression subscriptionExpr = (PySubscriptionExpression)element;
final PyExpression operand = subscriptionExpr.getOperand();
final PyExpression indexExpr = subscriptionExpr.getIndexExpression();
final PyType operandType = getType(operand, context);
if (operandType instanceof PyClassType) {
final PyClass cls = ((PyClassType)operandType).getPyClass();
final List<PyType> indexTypes = getIndexTypes(subscriptionExpr, context);
if (PyNames.TUPLE.equals(cls.getQualifiedName())) {
if (indexExpr instanceof PyTupleExpression) {
final PyExpression[] elements = ((PyTupleExpression)indexExpr).getElements();
if (elements.length == 2 && isEllipsis(elements[1])) {
return PyTupleType.createHomogeneous(element, indexTypes.get(0));
}
}
return PyTupleType.create(element, indexTypes.toArray(new PyType[indexTypes.size()]));
}
else if (indexExpr != null) {
return new PyCollectionTypeImpl(cls, false, indexTypes);
}
}
}
return null;
}
@Nullable
private static PyType getBuiltinCollection(@NotNull PsiElement element) {
final String collectionName = getQualifiedName(element);
final String builtinName = COLLECTION_CLASSES.get(collectionName);
return builtinName != null ? PyTypeParser.getTypeByName(element, builtinName) : null;
}
@NotNull
private static List<PsiElement> tryResolving(@NotNull PyExpression expression, @NotNull TypeEvalContext context) {
final List<PsiElement> elements = Lists.newArrayList();
if (expression instanceof PyReferenceExpression) {
final PyReferenceExpression referenceExpr = (PyReferenceExpression)expression;
final PyResolveContext resolveContext = PyResolveContext.noImplicits().withTypeEvalContext(context);
final PsiPolyVariantReference reference = referenceExpr.getReference(resolveContext);
final List<PsiElement> resolved = PyUtil.multiResolveTopPriority(reference);
for (PsiElement element : resolved) {
if (element instanceof PyFunction) {
final PyFunction function = (PyFunction)element;
if (PyUtil.isInit(function)) {
final PyClass cls = function.getContainingClass();
if (cls != null) {
elements.add(cls);
continue;
}
}
}
else if (element instanceof PyTargetExpression) {
final PyTargetExpression targetExpr = (PyTargetExpression)element;
// XXX: Requires switching from stub to AST
final PyExpression assignedValue = targetExpr.findAssignedValue();
if (assignedValue != null) {
elements.add(assignedValue);
continue;
}
}
if (element != null) {
elements.add(element);
}
}
}
return !elements.isEmpty() ? elements : Collections.<PsiElement>singletonList(expression);
}
@NotNull
private static Collection<String> resolveToQualifiedNames(@NotNull PyExpression expression, @NotNull TypeEvalContext context) {
final Set<String> names = Sets.newLinkedHashSet();
for (PsiElement resolved : tryResolving(expression, context)) {
final String name = getQualifiedName(resolved);
if (name != null) {
names.add(name);
}
}
return names;
}
@Nullable
private static String getQualifiedName(@NotNull PsiElement element) {
if (element instanceof PyQualifiedNameOwner) {
final PyQualifiedNameOwner qualifiedNameOwner = (PyQualifiedNameOwner)element;
return qualifiedNameOwner.getQualifiedName();
}
return null;
}
private static class Context {
@NotNull private final TypeEvalContext myContext;
@NotNull private final Set<PsiElement> myCache = new HashSet<>();
private Context(@NotNull TypeEvalContext context) {
myContext = context;
}
@NotNull
public TypeEvalContext getTypeContext() {
return myContext;
}
@NotNull
public Set<PsiElement> getExpressionCache() {
return myCache;
}
}
}
| |
/*
* Copyright (C) 2011 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.collect.BoundType.CLOSED;
import static com.google.common.collect.BoundType.OPEN;
import com.google.common.annotations.GwtCompatible;
import com.google.common.base.Objects;
import junit.framework.TestCase;
import java.util.Arrays;
import java.util.List;
/**
* Tests for {@code GeneralRange}.
*
* @author Louis Wasserman
*/
@GwtCompatible(emulated = true)
public class GeneralRangeTest extends TestCase {
private static final Ordering<Integer> ORDERING = Ordering.natural().nullsFirst();
private static final List<Integer> IN_ORDER_VALUES = Arrays.asList(null, 1, 2, 3, 4, 5);
public void testCreateEmptyRangeFails() {
for (BoundType lboundType : BoundType.values()) {
for (BoundType uboundType : BoundType.values()) {
try {
GeneralRange.range(ORDERING, 4, lboundType, 2, uboundType);
fail("Expected IAE");
} catch (IllegalArgumentException expected) {}
}
}
}
public void testCreateEmptyRangeOpenOpenFails() {
for (Integer i : IN_ORDER_VALUES) {
try {
GeneralRange.range(ORDERING, i, OPEN, i, OPEN);
fail("Expected IAE");
} catch (IllegalArgumentException expected) {}
}
}
public void testCreateEmptyRangeClosedOpenSucceeds() {
for (Integer i : IN_ORDER_VALUES) {
GeneralRange<Integer> range = GeneralRange.range(ORDERING, i, CLOSED, i, OPEN);
for (Integer j : IN_ORDER_VALUES) {
assertFalse(range.contains(j));
}
}
}
public void testCreateEmptyRangeOpenClosedSucceeds() {
for (Integer i : IN_ORDER_VALUES) {
GeneralRange<Integer> range = GeneralRange.range(ORDERING, i, OPEN, i, CLOSED);
for (Integer j : IN_ORDER_VALUES) {
assertFalse(range.contains(j));
}
}
}
public void testCreateSingletonRangeSucceeds() {
for (Integer i : IN_ORDER_VALUES) {
GeneralRange<Integer> range = GeneralRange.range(ORDERING, i, CLOSED, i, CLOSED);
for (Integer j : IN_ORDER_VALUES) {
assertEquals(Objects.equal(i, j), range.contains(j));
}
}
}
public void testSingletonRange() {
GeneralRange<Integer> range = GeneralRange.range(ORDERING, 3, CLOSED, 3, CLOSED);
for (Integer i : IN_ORDER_VALUES) {
assertEquals(ORDERING.compare(i, 3) == 0, range.contains(i));
}
}
public void testLowerRange() {
for (BoundType lBoundType : BoundType.values()) {
GeneralRange<Integer> range = GeneralRange.downTo(ORDERING, 3, lBoundType);
for (Integer i : IN_ORDER_VALUES) {
assertEquals(ORDERING.compare(i, 3) > 0
|| (ORDERING.compare(i, 3) == 0 && lBoundType == CLOSED), range.contains(i));
assertEquals(ORDERING.compare(i, 3) < 0
|| (ORDERING.compare(i, 3) == 0 && lBoundType == OPEN), range.tooLow(i));
assertFalse(range.tooHigh(i));
}
}
}
public void testUpperRange() {
for (BoundType lBoundType : BoundType.values()) {
GeneralRange<Integer> range = GeneralRange.upTo(ORDERING, 3, lBoundType);
for (Integer i : IN_ORDER_VALUES) {
assertEquals(ORDERING.compare(i, 3) < 0
|| (ORDERING.compare(i, 3) == 0 && lBoundType == CLOSED), range.contains(i));
assertEquals(ORDERING.compare(i, 3) > 0
|| (ORDERING.compare(i, 3) == 0 && lBoundType == OPEN), range.tooHigh(i));
assertFalse(range.tooLow(i));
}
}
}
public void testDoublyBoundedAgainstRange() {
for (BoundType lboundType : BoundType.values()) {
for (BoundType uboundType : BoundType.values()) {
Range<Integer> range = Range.range(2, lboundType, 4, uboundType);
GeneralRange<Integer> gRange = GeneralRange.range(ORDERING, 2, lboundType, 4, uboundType);
for (Integer i : IN_ORDER_VALUES) {
assertEquals(i != null && range.contains(i), gRange.contains(i));
}
}
}
}
public void testIntersectAgainstMatchingEndpointsRange() {
GeneralRange<Integer> range = GeneralRange.range(ORDERING, 2, CLOSED, 4, OPEN);
assertEquals(GeneralRange.range(ORDERING, 2, OPEN, 4, OPEN),
range.intersect(GeneralRange.range(ORDERING, 2, OPEN, 4, CLOSED)));
}
public void testIntersectAgainstBiggerRange() {
GeneralRange<Integer> range = GeneralRange.range(ORDERING, 2, CLOSED, 4, OPEN);
assertEquals(GeneralRange.range(ORDERING, 2, CLOSED, 4, OPEN),
range.intersect(GeneralRange.range(ORDERING, null, OPEN, 5, CLOSED)));
assertEquals(GeneralRange.range(ORDERING, 2, OPEN, 4, OPEN),
range.intersect(GeneralRange.range(ORDERING, 2, OPEN, 5, CLOSED)));
assertEquals(GeneralRange.range(ORDERING, 2, CLOSED, 4, OPEN),
range.intersect(GeneralRange.range(ORDERING, 1, OPEN, 4, OPEN)));
}
public void testIntersectAgainstSmallerRange() {
GeneralRange<Integer> range = GeneralRange.range(ORDERING, 2, OPEN, 4, OPEN);
assertEquals(GeneralRange.range(ORDERING, 3, CLOSED, 4, OPEN),
range.intersect(GeneralRange.range(ORDERING, 3, CLOSED, 4, CLOSED)));
}
public void testIntersectOverlappingRange() {
GeneralRange<Integer> range = GeneralRange.range(ORDERING, 2, OPEN, 4, CLOSED);
assertEquals(GeneralRange.range(ORDERING, 3, CLOSED, 4, CLOSED),
range.intersect(GeneralRange.range(ORDERING, 3, CLOSED, 5, CLOSED)));
assertEquals(GeneralRange.range(ORDERING, 2, OPEN, 3, OPEN),
range.intersect(GeneralRange.range(ORDERING, 1, OPEN, 3, OPEN)));
}
public void testIntersectNonOverlappingRange() {
GeneralRange<Integer> range = GeneralRange.range(ORDERING, 2, OPEN, 4, CLOSED);
assertTrue(range.intersect(GeneralRange.range(ORDERING, 5, CLOSED, 6, CLOSED)).isEmpty());
assertTrue(range.intersect(GeneralRange.range(ORDERING, 1, OPEN, 2, OPEN)).isEmpty());
}
public void testFromRangeAll() {
assertEquals(GeneralRange.all(Ordering.natural()), GeneralRange.from(Range.all()));
}
public void testFromRangeOneEnd() {
for (BoundType endpointType : BoundType.values()) {
assertEquals(GeneralRange.upTo(Ordering.natural(), 3, endpointType),
GeneralRange.from(Range.upTo(3, endpointType)));
assertEquals(GeneralRange.downTo(Ordering.natural(), 3, endpointType),
GeneralRange.from(Range.downTo(3, endpointType)));
}
}
public void testFromRangeTwoEnds() {
for (BoundType lowerType : BoundType.values()) {
for (BoundType upperType : BoundType.values()) {
assertEquals(GeneralRange.range(Ordering.natural(), 3, lowerType, 4, upperType),
GeneralRange.from(Range.range(3, lowerType, 4, upperType)));
}
}
}
public void testReverse() {
assertEquals(GeneralRange.all(ORDERING.reverse()), GeneralRange.all(ORDERING).reverse());
assertEquals(GeneralRange.downTo(ORDERING.reverse(), 3, CLOSED),
GeneralRange.upTo(ORDERING, 3, CLOSED).reverse());
assertEquals(GeneralRange.upTo(ORDERING.reverse(), 3, OPEN),
GeneralRange.downTo(ORDERING, 3, OPEN).reverse());
assertEquals(GeneralRange.range(ORDERING.reverse(), 5, OPEN, 3, CLOSED),
GeneralRange.range(ORDERING, 3, CLOSED, 5, OPEN).reverse());
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ec2;
import com.amazonaws.services.ec2.model.*;
import com.amazonaws.*;
/**
* Abstract implementation of {@code AmazonEC2}. Convenient method forms pass
* through to the corresponding overload that takes a request object, which
* throws an {@code UnsupportedOperationException}.
*/
public class AbstractAmazonEC2 implements AmazonEC2 {
protected AbstractAmazonEC2() {
}
@Override
public void setEndpoint(String endpoint) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void setRegion(com.amazonaws.regions.Region region) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public AcceptVpcPeeringConnectionResult acceptVpcPeeringConnection(
AcceptVpcPeeringConnectionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public AcceptVpcPeeringConnectionResult acceptVpcPeeringConnection() {
return acceptVpcPeeringConnection(new AcceptVpcPeeringConnectionRequest());
}
@Override
public AllocateAddressResult allocateAddress(AllocateAddressRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public AllocateAddressResult allocateAddress() {
return allocateAddress(new AllocateAddressRequest());
}
@Override
public AllocateHostsResult allocateHosts(AllocateHostsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public AssignPrivateIpAddressesResult assignPrivateIpAddresses(
AssignPrivateIpAddressesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public AssociateAddressResult associateAddress(
AssociateAddressRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public AssociateDhcpOptionsResult associateDhcpOptions(
AssociateDhcpOptionsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public AssociateRouteTableResult associateRouteTable(
AssociateRouteTableRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public AttachClassicLinkVpcResult attachClassicLinkVpc(
AttachClassicLinkVpcRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public AttachInternetGatewayResult attachInternetGateway(
AttachInternetGatewayRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public AttachNetworkInterfaceResult attachNetworkInterface(
AttachNetworkInterfaceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public AttachVolumeResult attachVolume(AttachVolumeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public AttachVpnGatewayResult attachVpnGateway(
AttachVpnGatewayRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public AuthorizeSecurityGroupEgressResult authorizeSecurityGroupEgress(
AuthorizeSecurityGroupEgressRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public AuthorizeSecurityGroupIngressResult authorizeSecurityGroupIngress(
AuthorizeSecurityGroupIngressRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public BundleInstanceResult bundleInstance(BundleInstanceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CancelBundleTaskResult cancelBundleTask(
CancelBundleTaskRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CancelConversionTaskResult cancelConversionTask(
CancelConversionTaskRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CancelExportTaskResult cancelExportTask(
CancelExportTaskRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CancelImportTaskResult cancelImportTask(
CancelImportTaskRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CancelImportTaskResult cancelImportTask() {
return cancelImportTask(new CancelImportTaskRequest());
}
@Override
public CancelReservedInstancesListingResult cancelReservedInstancesListing(
CancelReservedInstancesListingRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CancelSpotFleetRequestsResult cancelSpotFleetRequests(
CancelSpotFleetRequestsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CancelSpotInstanceRequestsResult cancelSpotInstanceRequests(
CancelSpotInstanceRequestsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ConfirmProductInstanceResult confirmProductInstance(
ConfirmProductInstanceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CopyImageResult copyImage(CopyImageRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CopySnapshotResult copySnapshot(CopySnapshotRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateCustomerGatewayResult createCustomerGateway(
CreateCustomerGatewayRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateDhcpOptionsResult createDhcpOptions(
CreateDhcpOptionsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateFlowLogsResult createFlowLogs(CreateFlowLogsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateImageResult createImage(CreateImageRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateInstanceExportTaskResult createInstanceExportTask(
CreateInstanceExportTaskRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateInternetGatewayResult createInternetGateway(
CreateInternetGatewayRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateInternetGatewayResult createInternetGateway() {
return createInternetGateway(new CreateInternetGatewayRequest());
}
@Override
public CreateKeyPairResult createKeyPair(CreateKeyPairRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateNatGatewayResult createNatGateway(
CreateNatGatewayRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateNetworkAclResult createNetworkAcl(
CreateNetworkAclRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateNetworkAclEntryResult createNetworkAclEntry(
CreateNetworkAclEntryRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateNetworkInterfaceResult createNetworkInterface(
CreateNetworkInterfaceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreatePlacementGroupResult createPlacementGroup(
CreatePlacementGroupRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateReservedInstancesListingResult createReservedInstancesListing(
CreateReservedInstancesListingRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateRouteResult createRoute(CreateRouteRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateRouteTableResult createRouteTable(
CreateRouteTableRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateSecurityGroupResult createSecurityGroup(
CreateSecurityGroupRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateSnapshotResult createSnapshot(CreateSnapshotRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateSpotDatafeedSubscriptionResult createSpotDatafeedSubscription(
CreateSpotDatafeedSubscriptionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateSubnetResult createSubnet(CreateSubnetRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateTagsResult createTags(CreateTagsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateVolumeResult createVolume(CreateVolumeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateVpcResult createVpc(CreateVpcRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateVpcEndpointResult createVpcEndpoint(
CreateVpcEndpointRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateVpcPeeringConnectionResult createVpcPeeringConnection(
CreateVpcPeeringConnectionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateVpcPeeringConnectionResult createVpcPeeringConnection() {
return createVpcPeeringConnection(new CreateVpcPeeringConnectionRequest());
}
@Override
public CreateVpnConnectionResult createVpnConnection(
CreateVpnConnectionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateVpnConnectionRouteResult createVpnConnectionRoute(
CreateVpnConnectionRouteRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateVpnGatewayResult createVpnGateway(
CreateVpnGatewayRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteCustomerGatewayResult deleteCustomerGateway(
DeleteCustomerGatewayRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteDhcpOptionsResult deleteDhcpOptions(
DeleteDhcpOptionsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteFlowLogsResult deleteFlowLogs(DeleteFlowLogsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteInternetGatewayResult deleteInternetGateway(
DeleteInternetGatewayRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteKeyPairResult deleteKeyPair(DeleteKeyPairRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteNatGatewayResult deleteNatGateway(
DeleteNatGatewayRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteNetworkAclResult deleteNetworkAcl(
DeleteNetworkAclRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteNetworkAclEntryResult deleteNetworkAclEntry(
DeleteNetworkAclEntryRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteNetworkInterfaceResult deleteNetworkInterface(
DeleteNetworkInterfaceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeletePlacementGroupResult deletePlacementGroup(
DeletePlacementGroupRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteRouteResult deleteRoute(DeleteRouteRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteRouteTableResult deleteRouteTable(
DeleteRouteTableRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteSecurityGroupResult deleteSecurityGroup(
DeleteSecurityGroupRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteSnapshotResult deleteSnapshot(DeleteSnapshotRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteSpotDatafeedSubscriptionResult deleteSpotDatafeedSubscription(
DeleteSpotDatafeedSubscriptionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteSpotDatafeedSubscriptionResult deleteSpotDatafeedSubscription() {
return deleteSpotDatafeedSubscription(new DeleteSpotDatafeedSubscriptionRequest());
}
@Override
public DeleteSubnetResult deleteSubnet(DeleteSubnetRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteTagsResult deleteTags(DeleteTagsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteVolumeResult deleteVolume(DeleteVolumeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteVpcResult deleteVpc(DeleteVpcRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteVpcEndpointsResult deleteVpcEndpoints(
DeleteVpcEndpointsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteVpcPeeringConnectionResult deleteVpcPeeringConnection(
DeleteVpcPeeringConnectionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteVpnConnectionResult deleteVpnConnection(
DeleteVpnConnectionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteVpnConnectionRouteResult deleteVpnConnectionRoute(
DeleteVpnConnectionRouteRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteVpnGatewayResult deleteVpnGateway(
DeleteVpnGatewayRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeregisterImageResult deregisterImage(DeregisterImageRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeAccountAttributesResult describeAccountAttributes(
DescribeAccountAttributesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeAccountAttributesResult describeAccountAttributes() {
return describeAccountAttributes(new DescribeAccountAttributesRequest());
}
@Override
public DescribeAddressesResult describeAddresses(
DescribeAddressesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeAddressesResult describeAddresses() {
return describeAddresses(new DescribeAddressesRequest());
}
@Override
public DescribeAvailabilityZonesResult describeAvailabilityZones(
DescribeAvailabilityZonesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeAvailabilityZonesResult describeAvailabilityZones() {
return describeAvailabilityZones(new DescribeAvailabilityZonesRequest());
}
@Override
public DescribeBundleTasksResult describeBundleTasks(
DescribeBundleTasksRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeBundleTasksResult describeBundleTasks() {
return describeBundleTasks(new DescribeBundleTasksRequest());
}
@Override
public DescribeClassicLinkInstancesResult describeClassicLinkInstances(
DescribeClassicLinkInstancesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeClassicLinkInstancesResult describeClassicLinkInstances() {
return describeClassicLinkInstances(new DescribeClassicLinkInstancesRequest());
}
@Override
public DescribeConversionTasksResult describeConversionTasks(
DescribeConversionTasksRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeConversionTasksResult describeConversionTasks() {
return describeConversionTasks(new DescribeConversionTasksRequest());
}
@Override
public DescribeCustomerGatewaysResult describeCustomerGateways(
DescribeCustomerGatewaysRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeCustomerGatewaysResult describeCustomerGateways() {
return describeCustomerGateways(new DescribeCustomerGatewaysRequest());
}
@Override
public DescribeDhcpOptionsResult describeDhcpOptions(
DescribeDhcpOptionsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeDhcpOptionsResult describeDhcpOptions() {
return describeDhcpOptions(new DescribeDhcpOptionsRequest());
}
@Override
public DescribeExportTasksResult describeExportTasks(
DescribeExportTasksRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeExportTasksResult describeExportTasks() {
return describeExportTasks(new DescribeExportTasksRequest());
}
@Override
public DescribeFlowLogsResult describeFlowLogs(
DescribeFlowLogsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeFlowLogsResult describeFlowLogs() {
return describeFlowLogs(new DescribeFlowLogsRequest());
}
@Override
public DescribeHostsResult describeHosts(DescribeHostsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeHostsResult describeHosts() {
return describeHosts(new DescribeHostsRequest());
}
@Override
public DescribeIdFormatResult describeIdFormat(
DescribeIdFormatRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeIdFormatResult describeIdFormat() {
return describeIdFormat(new DescribeIdFormatRequest());
}
@Override
public DescribeIdentityIdFormatResult describeIdentityIdFormat(
DescribeIdentityIdFormatRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeImageAttributeResult describeImageAttribute(
DescribeImageAttributeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeImagesResult describeImages(DescribeImagesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeImagesResult describeImages() {
return describeImages(new DescribeImagesRequest());
}
@Override
public DescribeImportImageTasksResult describeImportImageTasks(
DescribeImportImageTasksRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeImportImageTasksResult describeImportImageTasks() {
return describeImportImageTasks(new DescribeImportImageTasksRequest());
}
@Override
public DescribeImportSnapshotTasksResult describeImportSnapshotTasks(
DescribeImportSnapshotTasksRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeImportSnapshotTasksResult describeImportSnapshotTasks() {
return describeImportSnapshotTasks(new DescribeImportSnapshotTasksRequest());
}
@Override
public DescribeInstanceAttributeResult describeInstanceAttribute(
DescribeInstanceAttributeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeInstanceStatusResult describeInstanceStatus(
DescribeInstanceStatusRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeInstanceStatusResult describeInstanceStatus() {
return describeInstanceStatus(new DescribeInstanceStatusRequest());
}
@Override
public DescribeInstancesResult describeInstances(
DescribeInstancesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeInstancesResult describeInstances() {
return describeInstances(new DescribeInstancesRequest());
}
@Override
public DescribeInternetGatewaysResult describeInternetGateways(
DescribeInternetGatewaysRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeInternetGatewaysResult describeInternetGateways() {
return describeInternetGateways(new DescribeInternetGatewaysRequest());
}
@Override
public DescribeKeyPairsResult describeKeyPairs(
DescribeKeyPairsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeKeyPairsResult describeKeyPairs() {
return describeKeyPairs(new DescribeKeyPairsRequest());
}
@Override
public DescribeMovingAddressesResult describeMovingAddresses(
DescribeMovingAddressesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeMovingAddressesResult describeMovingAddresses() {
return describeMovingAddresses(new DescribeMovingAddressesRequest());
}
@Override
public DescribeNatGatewaysResult describeNatGateways(
DescribeNatGatewaysRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeNetworkAclsResult describeNetworkAcls(
DescribeNetworkAclsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeNetworkAclsResult describeNetworkAcls() {
return describeNetworkAcls(new DescribeNetworkAclsRequest());
}
@Override
public DescribeNetworkInterfaceAttributeResult describeNetworkInterfaceAttribute(
DescribeNetworkInterfaceAttributeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeNetworkInterfacesResult describeNetworkInterfaces(
DescribeNetworkInterfacesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeNetworkInterfacesResult describeNetworkInterfaces() {
return describeNetworkInterfaces(new DescribeNetworkInterfacesRequest());
}
@Override
public DescribePlacementGroupsResult describePlacementGroups(
DescribePlacementGroupsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribePlacementGroupsResult describePlacementGroups() {
return describePlacementGroups(new DescribePlacementGroupsRequest());
}
@Override
public DescribePrefixListsResult describePrefixLists(
DescribePrefixListsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribePrefixListsResult describePrefixLists() {
return describePrefixLists(new DescribePrefixListsRequest());
}
@Override
public DescribeRegionsResult describeRegions(DescribeRegionsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeRegionsResult describeRegions() {
return describeRegions(new DescribeRegionsRequest());
}
@Override
public DescribeReservedInstancesResult describeReservedInstances(
DescribeReservedInstancesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeReservedInstancesResult describeReservedInstances() {
return describeReservedInstances(new DescribeReservedInstancesRequest());
}
@Override
public DescribeReservedInstancesListingsResult describeReservedInstancesListings(
DescribeReservedInstancesListingsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeReservedInstancesListingsResult describeReservedInstancesListings() {
return describeReservedInstancesListings(new DescribeReservedInstancesListingsRequest());
}
@Override
public DescribeReservedInstancesModificationsResult describeReservedInstancesModifications(
DescribeReservedInstancesModificationsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeReservedInstancesModificationsResult describeReservedInstancesModifications() {
return describeReservedInstancesModifications(new DescribeReservedInstancesModificationsRequest());
}
@Override
public DescribeReservedInstancesOfferingsResult describeReservedInstancesOfferings(
DescribeReservedInstancesOfferingsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeReservedInstancesOfferingsResult describeReservedInstancesOfferings() {
return describeReservedInstancesOfferings(new DescribeReservedInstancesOfferingsRequest());
}
@Override
public DescribeRouteTablesResult describeRouteTables(
DescribeRouteTablesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeRouteTablesResult describeRouteTables() {
return describeRouteTables(new DescribeRouteTablesRequest());
}
@Override
public DescribeScheduledInstanceAvailabilityResult describeScheduledInstanceAvailability(
DescribeScheduledInstanceAvailabilityRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeScheduledInstancesResult describeScheduledInstances(
DescribeScheduledInstancesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeSecurityGroupReferencesResult describeSecurityGroupReferences(
DescribeSecurityGroupReferencesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeSecurityGroupsResult describeSecurityGroups(
DescribeSecurityGroupsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeSecurityGroupsResult describeSecurityGroups() {
return describeSecurityGroups(new DescribeSecurityGroupsRequest());
}
@Override
public DescribeSnapshotAttributeResult describeSnapshotAttribute(
DescribeSnapshotAttributeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeSnapshotsResult describeSnapshots(
DescribeSnapshotsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeSnapshotsResult describeSnapshots() {
return describeSnapshots(new DescribeSnapshotsRequest());
}
@Override
public DescribeSpotDatafeedSubscriptionResult describeSpotDatafeedSubscription(
DescribeSpotDatafeedSubscriptionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeSpotDatafeedSubscriptionResult describeSpotDatafeedSubscription() {
return describeSpotDatafeedSubscription(new DescribeSpotDatafeedSubscriptionRequest());
}
@Override
public DescribeSpotFleetInstancesResult describeSpotFleetInstances(
DescribeSpotFleetInstancesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeSpotFleetRequestHistoryResult describeSpotFleetRequestHistory(
DescribeSpotFleetRequestHistoryRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeSpotFleetRequestsResult describeSpotFleetRequests(
DescribeSpotFleetRequestsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeSpotFleetRequestsResult describeSpotFleetRequests() {
return describeSpotFleetRequests(new DescribeSpotFleetRequestsRequest());
}
@Override
public DescribeSpotInstanceRequestsResult describeSpotInstanceRequests(
DescribeSpotInstanceRequestsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeSpotInstanceRequestsResult describeSpotInstanceRequests() {
return describeSpotInstanceRequests(new DescribeSpotInstanceRequestsRequest());
}
@Override
public DescribeSpotPriceHistoryResult describeSpotPriceHistory(
DescribeSpotPriceHistoryRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeSpotPriceHistoryResult describeSpotPriceHistory() {
return describeSpotPriceHistory(new DescribeSpotPriceHistoryRequest());
}
@Override
public DescribeStaleSecurityGroupsResult describeStaleSecurityGroups(
DescribeStaleSecurityGroupsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeSubnetsResult describeSubnets(DescribeSubnetsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeSubnetsResult describeSubnets() {
return describeSubnets(new DescribeSubnetsRequest());
}
@Override
public DescribeTagsResult describeTags(DescribeTagsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeTagsResult describeTags() {
return describeTags(new DescribeTagsRequest());
}
@Override
public DescribeVolumeAttributeResult describeVolumeAttribute(
DescribeVolumeAttributeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeVolumeStatusResult describeVolumeStatus(
DescribeVolumeStatusRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeVolumeStatusResult describeVolumeStatus() {
return describeVolumeStatus(new DescribeVolumeStatusRequest());
}
@Override
public DescribeVolumesResult describeVolumes(DescribeVolumesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeVolumesResult describeVolumes() {
return describeVolumes(new DescribeVolumesRequest());
}
@Override
public DescribeVpcAttributeResult describeVpcAttribute(
DescribeVpcAttributeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeVpcClassicLinkResult describeVpcClassicLink(
DescribeVpcClassicLinkRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeVpcClassicLinkResult describeVpcClassicLink() {
return describeVpcClassicLink(new DescribeVpcClassicLinkRequest());
}
@Override
public DescribeVpcClassicLinkDnsSupportResult describeVpcClassicLinkDnsSupport(
DescribeVpcClassicLinkDnsSupportRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeVpcEndpointServicesResult describeVpcEndpointServices(
DescribeVpcEndpointServicesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeVpcEndpointServicesResult describeVpcEndpointServices() {
return describeVpcEndpointServices(new DescribeVpcEndpointServicesRequest());
}
@Override
public DescribeVpcEndpointsResult describeVpcEndpoints(
DescribeVpcEndpointsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeVpcEndpointsResult describeVpcEndpoints() {
return describeVpcEndpoints(new DescribeVpcEndpointsRequest());
}
@Override
public DescribeVpcPeeringConnectionsResult describeVpcPeeringConnections(
DescribeVpcPeeringConnectionsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeVpcPeeringConnectionsResult describeVpcPeeringConnections() {
return describeVpcPeeringConnections(new DescribeVpcPeeringConnectionsRequest());
}
@Override
public DescribeVpcsResult describeVpcs(DescribeVpcsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeVpcsResult describeVpcs() {
return describeVpcs(new DescribeVpcsRequest());
}
@Override
public DescribeVpnConnectionsResult describeVpnConnections(
DescribeVpnConnectionsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeVpnConnectionsResult describeVpnConnections() {
return describeVpnConnections(new DescribeVpnConnectionsRequest());
}
@Override
public DescribeVpnGatewaysResult describeVpnGateways(
DescribeVpnGatewaysRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeVpnGatewaysResult describeVpnGateways() {
return describeVpnGateways(new DescribeVpnGatewaysRequest());
}
@Override
public DetachClassicLinkVpcResult detachClassicLinkVpc(
DetachClassicLinkVpcRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DetachInternetGatewayResult detachInternetGateway(
DetachInternetGatewayRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DetachNetworkInterfaceResult detachNetworkInterface(
DetachNetworkInterfaceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DetachVolumeResult detachVolume(DetachVolumeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DetachVpnGatewayResult detachVpnGateway(
DetachVpnGatewayRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DisableVgwRoutePropagationResult disableVgwRoutePropagation(
DisableVgwRoutePropagationRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DisableVpcClassicLinkResult disableVpcClassicLink(
DisableVpcClassicLinkRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DisableVpcClassicLinkDnsSupportResult disableVpcClassicLinkDnsSupport(
DisableVpcClassicLinkDnsSupportRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DisassociateAddressResult disassociateAddress(
DisassociateAddressRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DisassociateRouteTableResult disassociateRouteTable(
DisassociateRouteTableRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public EnableVgwRoutePropagationResult enableVgwRoutePropagation(
EnableVgwRoutePropagationRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public EnableVolumeIOResult enableVolumeIO(EnableVolumeIORequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public EnableVpcClassicLinkResult enableVpcClassicLink(
EnableVpcClassicLinkRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public EnableVpcClassicLinkDnsSupportResult enableVpcClassicLinkDnsSupport(
EnableVpcClassicLinkDnsSupportRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public GetConsoleOutputResult getConsoleOutput(
GetConsoleOutputRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public GetConsoleScreenshotResult getConsoleScreenshot(
GetConsoleScreenshotRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public GetPasswordDataResult getPasswordData(GetPasswordDataRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ImportImageResult importImage(ImportImageRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ImportImageResult importImage() {
return importImage(new ImportImageRequest());
}
@Override
public ImportInstanceResult importInstance(ImportInstanceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ImportKeyPairResult importKeyPair(ImportKeyPairRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ImportSnapshotResult importSnapshot(ImportSnapshotRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ImportSnapshotResult importSnapshot() {
return importSnapshot(new ImportSnapshotRequest());
}
@Override
public ImportVolumeResult importVolume(ImportVolumeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ModifyHostsResult modifyHosts(ModifyHostsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ModifyIdFormatResult modifyIdFormat(ModifyIdFormatRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ModifyIdentityIdFormatResult modifyIdentityIdFormat(
ModifyIdentityIdFormatRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ModifyImageAttributeResult modifyImageAttribute(
ModifyImageAttributeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ModifyInstanceAttributeResult modifyInstanceAttribute(
ModifyInstanceAttributeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ModifyInstancePlacementResult modifyInstancePlacement(
ModifyInstancePlacementRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ModifyNetworkInterfaceAttributeResult modifyNetworkInterfaceAttribute(
ModifyNetworkInterfaceAttributeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ModifyReservedInstancesResult modifyReservedInstances(
ModifyReservedInstancesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ModifySnapshotAttributeResult modifySnapshotAttribute(
ModifySnapshotAttributeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ModifySpotFleetRequestResult modifySpotFleetRequest(
ModifySpotFleetRequestRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ModifySubnetAttributeResult modifySubnetAttribute(
ModifySubnetAttributeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ModifyVolumeAttributeResult modifyVolumeAttribute(
ModifyVolumeAttributeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ModifyVpcAttributeResult modifyVpcAttribute(
ModifyVpcAttributeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ModifyVpcEndpointResult modifyVpcEndpoint(
ModifyVpcEndpointRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ModifyVpcPeeringConnectionOptionsResult modifyVpcPeeringConnectionOptions(
ModifyVpcPeeringConnectionOptionsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public MonitorInstancesResult monitorInstances(
MonitorInstancesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public MoveAddressToVpcResult moveAddressToVpc(
MoveAddressToVpcRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public PurchaseReservedInstancesOfferingResult purchaseReservedInstancesOffering(
PurchaseReservedInstancesOfferingRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public PurchaseScheduledInstancesResult purchaseScheduledInstances(
PurchaseScheduledInstancesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public RebootInstancesResult rebootInstances(RebootInstancesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public RegisterImageResult registerImage(RegisterImageRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public RejectVpcPeeringConnectionResult rejectVpcPeeringConnection(
RejectVpcPeeringConnectionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ReleaseAddressResult releaseAddress(ReleaseAddressRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ReleaseHostsResult releaseHosts(ReleaseHostsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ReplaceNetworkAclAssociationResult replaceNetworkAclAssociation(
ReplaceNetworkAclAssociationRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ReplaceNetworkAclEntryResult replaceNetworkAclEntry(
ReplaceNetworkAclEntryRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ReplaceRouteResult replaceRoute(ReplaceRouteRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ReplaceRouteTableAssociationResult replaceRouteTableAssociation(
ReplaceRouteTableAssociationRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ReportInstanceStatusResult reportInstanceStatus(
ReportInstanceStatusRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public RequestSpotFleetResult requestSpotFleet(
RequestSpotFleetRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public RequestSpotInstancesResult requestSpotInstances(
RequestSpotInstancesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ResetImageAttributeResult resetImageAttribute(
ResetImageAttributeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ResetInstanceAttributeResult resetInstanceAttribute(
ResetInstanceAttributeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ResetNetworkInterfaceAttributeResult resetNetworkInterfaceAttribute(
ResetNetworkInterfaceAttributeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ResetSnapshotAttributeResult resetSnapshotAttribute(
ResetSnapshotAttributeRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public RestoreAddressToClassicResult restoreAddressToClassic(
RestoreAddressToClassicRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public RevokeSecurityGroupEgressResult revokeSecurityGroupEgress(
RevokeSecurityGroupEgressRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public RevokeSecurityGroupIngressResult revokeSecurityGroupIngress(
RevokeSecurityGroupIngressRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public RevokeSecurityGroupIngressResult revokeSecurityGroupIngress() {
return revokeSecurityGroupIngress(new RevokeSecurityGroupIngressRequest());
}
@Override
public RunInstancesResult runInstances(RunInstancesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public RunScheduledInstancesResult runScheduledInstances(
RunScheduledInstancesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public StartInstancesResult startInstances(StartInstancesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public StopInstancesResult stopInstances(StopInstancesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public TerminateInstancesResult terminateInstances(
TerminateInstancesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public UnassignPrivateIpAddressesResult unassignPrivateIpAddresses(
UnassignPrivateIpAddressesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public UnmonitorInstancesResult unmonitorInstances(
UnmonitorInstancesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public <X extends AmazonWebServiceRequest> DryRunResult<X> dryRun(
DryRunSupportedRequest<X> request) throws AmazonServiceException,
AmazonClientException {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void shutdown() {
throw new java.lang.UnsupportedOperationException();
}
@Override
public com.amazonaws.ResponseMetadata getCachedResponseMetadata(
com.amazonaws.AmazonWebServiceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
}
| |
/**
* Copyright (c) 2004-2011 QOS.ch
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
*/
package org.slf4j;
import java.io.Closeable;
import java.util.Map;
import org.slf4j.helpers.NOPMDCAdapter;
import org.slf4j.helpers.BasicMDCAdapter;
import org.slf4j.helpers.Util;
import org.slf4j.impl.StaticMDCBinder;
import org.slf4j.spi.MDCAdapter;
/**
* This class hides and serves as a substitute for the underlying logging
* system's MDC implementation.
*
* <p>
* If the underlying logging system offers MDC functionality, then SLF4J's MDC,
* i.e. this class, will delegate to the underlying system's MDC. Note that at
* this time, only two logging systems, namely log4j and logback, offer MDC
* functionality. For java.util.logging which does not support MDC,
* {@link BasicMDCAdapter} will be used. For other systems, i.e slf4j-simple
* and slf4j-nop, {@link NOPMDCAdapter} will be used.
*
* <p>
* Thus, as a SLF4J user, you can take advantage of MDC in the presence of log4j,
* logback, or java.util.logging, but without forcing these systems as
* dependencies upon your users.
*
* <p>
* For more information on MDC please see the <a
* href="http://logback.qos.ch/manual/mdc.html">chapter on MDC</a> in the
* logback manual.
*
* <p>
* Please note that all methods in this class are static.
*
* @author Ceki Gülcü
* @since 1.4.1
*/
public class MDC {
static final String NULL_MDCA_URL = "http://www.slf4j.org/codes.html#null_MDCA";
static final String NO_STATIC_MDC_BINDER_URL = "http://www.slf4j.org/codes.html#no_static_mdc_binder";
static MDCAdapter mdcAdapter;
/**
* An adapter to remove the key when done.
*/
public static class MDCCloseable implements Closeable {
private final String key;
private MDCCloseable(String key) {
this.key = key;
}
public void close() {
MDC.remove(this.key);
}
}
private MDC() {
}
static {
try {
mdcAdapter = StaticMDCBinder.SINGLETON.getMDCA();
} catch (NoClassDefFoundError ncde) {
mdcAdapter = new NOPMDCAdapter();
String msg = ncde.getMessage();
if (msg != null && msg.contains("StaticMDCBinder")) {
Util.report("Failed to load class \"org.slf4j.impl.StaticMDCBinder\".");
Util.report("Defaulting to no-operation MDCAdapter implementation.");
Util.report("See " + NO_STATIC_MDC_BINDER_URL + " for further details.");
} else {
throw ncde;
}
} catch (Exception e) {
// we should never get here
Util.report("MDC binding unsuccessful.", e);
}
}
/**
* Put a diagnostic context value (the <code>val</code> parameter) as identified with the
* <code>key</code> parameter into the current thread's diagnostic context map. The
* <code>key</code> parameter cannot be null. The <code>val</code> parameter
* can be null only if the underlying implementation supports it.
*
* <p>
* This method delegates all work to the MDC of the underlying logging system.
*
* @param key non-null key
* @param val value to put in the map
*
* @throws IllegalArgumentException
* in case the "key" parameter is null
*/
public static void put(String key, String val) throws IllegalArgumentException {
if (key == null) {
throw new IllegalArgumentException("key parameter cannot be null");
}
if (mdcAdapter == null) {
throw new IllegalStateException("MDCAdapter cannot be null. See also " + NULL_MDCA_URL);
}
mdcAdapter.put(key, val);
}
/**
* Put a diagnostic context value (the <code>val</code> parameter) as identified with the
* <code>key</code> parameter into the current thread's diagnostic context map. The
* <code>key</code> parameter cannot be null. The <code>val</code> parameter
* can be null only if the underlying implementation supports it.
*
* <p>
* This method delegates all work to the MDC of the underlying logging system.
* <p>
* This method return a <code>Closeable</code> object who can remove <code>key</code> when
* <code>close</code> is called.
*
* <p>
* Useful with Java 7 for example :
* <code>
* try(MDC.MDCCloseable closeable = MDC.putCloseable(key, value)) {
* ....
* }
* </code>
*
* @param key non-null key
* @param val value to put in the map
* @return a <code>Closeable</code> who can remove <code>key</code> when <code>close</code>
* is called.
*
* @throws IllegalArgumentException
* in case the "key" parameter is null
*/
public static MDCCloseable putCloseable(String key, String val) throws IllegalArgumentException {
put(key, val);
return new MDCCloseable(key);
}
/**
* Get the diagnostic context identified by the <code>key</code> parameter. The
* <code>key</code> parameter cannot be null.
*
* <p>
* This method delegates all work to the MDC of the underlying logging system.
*
* @param key
* @return the string value identified by the <code>key</code> parameter.
* @throws IllegalArgumentException
* in case the "key" parameter is null
*/
public static String get(String key) throws IllegalArgumentException {
if (key == null) {
throw new IllegalArgumentException("key parameter cannot be null");
}
if (mdcAdapter == null) {
throw new IllegalStateException("MDCAdapter cannot be null. See also " + NULL_MDCA_URL);
}
return mdcAdapter.get(key);
}
/**
* Remove the diagnostic context identified by the <code>key</code> parameter using
* the underlying system's MDC implementation. The <code>key</code> parameter
* cannot be null. This method does nothing if there is no previous value
* associated with <code>key</code>.
*
* @param key
* @throws IllegalArgumentException
* in case the "key" parameter is null
*/
public static void remove(String key) throws IllegalArgumentException {
if (key == null) {
throw new IllegalArgumentException("key parameter cannot be null");
}
if (mdcAdapter == null) {
throw new IllegalStateException("MDCAdapter cannot be null. See also " + NULL_MDCA_URL);
}
mdcAdapter.remove(key);
}
/**
* Clear all entries in the MDC of the underlying implementation.
*/
public static void clear() {
if (mdcAdapter == null) {
throw new IllegalStateException("MDCAdapter cannot be null. See also " + NULL_MDCA_URL);
}
mdcAdapter.clear();
}
/**
* Return a copy of the current thread's context map, with keys and values of
* type String. Returned value may be null.
*
* @return A copy of the current thread's context map. May be null.
* @since 1.5.1
*/
public static Map<String, String> getCopyOfContextMap() {
if (mdcAdapter == null) {
throw new IllegalStateException("MDCAdapter cannot be null. See also " + NULL_MDCA_URL);
}
return mdcAdapter.getCopyOfContextMap();
}
/**
* Set the current thread's context map by first clearing any existing map and
* then copying the map passed as parameter. The context map passed as
* parameter must only contain keys and values of type String.
*
* @param contextMap
* must contain only keys and values of type String
* @since 1.5.1
*/
public static void setContextMap(Map<String, String> contextMap) {
if (mdcAdapter == null) {
throw new IllegalStateException("MDCAdapter cannot be null. See also " + NULL_MDCA_URL);
}
mdcAdapter.setContextMap(contextMap);
}
/**
* Returns the MDCAdapter instance currently in use.
*
* @return the MDcAdapter instance currently in use.
* @since 1.4.2
*/
public static MDCAdapter getMDCAdapter() {
return mdcAdapter;
}
}
| |
/*
* Copyright 2016 Realm Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.realm.log;
import android.util.Log;
import java.util.Locale;
import javax.annotation.Nullable;
/**
* Global logger used by all Realm components.
* Custom loggers can be added by registering classes implementing {@link RealmLogger}.
*/
public final class RealmLog {
@SuppressWarnings("FieldCanBeLocal")
private static String REALM_JAVA_TAG = "REALM_JAVA";
/**
* Adds a logger implementation that will be notified on log events.
*
* @param logger the reference to a {@link RealmLogger} implementation.
*/
public static void add(RealmLogger logger) {
//noinspection ConstantConditions
if (logger == null) {
throw new IllegalArgumentException("A non-null logger has to be provided");
}
nativeAddLogger(logger);
}
/**
* Sets the current {@link LogLevel}. Setting this will affect all registered loggers.
*
* @param level see {@link LogLevel}.
*/
public static void setLevel(int level) {
if (level < LogLevel.ALL || level > LogLevel.OFF) {
throw new IllegalArgumentException("Invalid log level: " + level);
}
nativeSetLogLevel(level);
}
/**
* Get the current {@link LogLevel}.
*
* @return the current {@link LogLevel}.
*/
public static int getLevel() {
return nativeGetLogLevel();
}
/**
* Removes the given logger if it is currently added.
*
* @return {@code true} if the logger was removed, {@code false} otherwise.
*/
public static boolean remove(RealmLogger logger) {
//noinspection ConstantConditions
if (logger == null) {
throw new IllegalArgumentException("A non-null logger has to be provided");
}
nativeRemoveLogger(logger);
return true;
}
/**
* Removes all loggers. The default native logger will be removed as well. Use {@link #registerDefaultLogger()} to
* add it back.
*/
public static void clear() {
nativeClearLoggers();
}
/**
* Adds default native logger if it has been removed before. If the default logger has been registered already,
* it won't be added again. The default logger on Android will log to logcat.
*/
public static void registerDefaultLogger() {
nativeRegisterDefaultLogger();
}
/**
* Logs a {@link LogLevel#TRACE} exception.
*
* @param throwable exception to log.
*/
public static void trace(Throwable throwable) {
trace(throwable, null);
}
/**
* Logs a {@link LogLevel#TRACE} event.
*
* @param message message to log.
* @param args optional args used to format the message using {@link String#format(String, Object...)}.
*/
public static void trace(String message, Object... args) {
trace(null, message, args);
}
/**
* Logs a {@link LogLevel#TRACE} event.
*
* @param throwable optional exception to log.
* @param message optional message.
* @param args optional args used to format the message using {@link String#format(String, Object...)}.
*/
public static void trace(@Nullable Throwable throwable, @Nullable String message, Object... args) {
log(LogLevel.TRACE, throwable, message, args);
}
/**
* Logs a {@link LogLevel#DEBUG} exception.
*
* @param throwable exception to log.
*/
public static void debug(@Nullable Throwable throwable) {
debug(throwable, null);
}
/**
* Logs a {@link LogLevel#DEBUG} event.
*
* @param message message to log.
* @param args optional args used to format the message using {@link String#format(String, Object...)}.
*/
public static void debug(String message, Object... args) {
debug(null, message, args);
}
/**
* Logs a {@link LogLevel#DEBUG} event.
*
* @param throwable optional exception to log.
* @param message optional message.
* @param args optional args used to format the message using {@link String#format(String, Object...)}.
*/
public static void debug(@Nullable Throwable throwable, @Nullable String message, Object... args) {
log(LogLevel.DEBUG, throwable, message, args);
}
/**
* Logs an {@link LogLevel#INFO} exception.
*
* @param throwable exception to log.
*/
public static void info(Throwable throwable) {
info(throwable, null);
}
/**
* Logs an {@link LogLevel#INFO} event.
*
* @param message message to log.
* @param args optional args used to format the message using {@link String#format(String, Object...)}.
*/
public static void info(String message, Object... args) {
info(null, message, args);
}
/**
* Logs an {@link LogLevel#INFO} event.
*
* @param throwable optional exception to log.
* @param message optional message.
* @param args optional args used to format the message using {@link String#format(String, Object...)}.
*/
public static void info(@Nullable Throwable throwable, @Nullable String message, Object... args) {
log(LogLevel.INFO, throwable, message, args);
}
/**
* Logs a {@link LogLevel#WARN} exception.
*
* @param throwable exception to log.
*/
public static void warn(Throwable throwable) {
warn(throwable, null);
}
/**
* Logs a {@link LogLevel#WARN} event.
*
* @param message message to log.
* @param args optional args used to format the message using {@link String#format(String, Object...)}.
*/
public static void warn(String message, Object... args) {
warn(null, message, args);
}
/**
* Logs a {@link LogLevel#WARN} event.
*
* @param throwable optional exception to log.
* @param message optional message.
* @param args optional args used to format the message using {@link String#format(String, Object...)}.
*/
public static void warn(@Nullable Throwable throwable, @Nullable String message, Object... args) {
log(LogLevel.WARN, throwable, message, args);
}
/**
* Logs an {@link LogLevel#ERROR} exception.
*
* @param throwable exception to log.
*/
public static void error(Throwable throwable) {
error(throwable, null);
}
/**
* Logs an {@link LogLevel#ERROR} event.
*
* @param message message to log.
* @param args optional args used to format the message using {@link String#format(String, Object...)}.
*/
public static void error(String message, Object... args) {
error(null, message, args);
}
/**
* Logs an {@link LogLevel#ERROR} event.
*
* @param throwable optional exception to log.
* @param message optional message.
* @param args optional args used to format the message using {@link String#format(String, Object...)}.
*/
public static void error(@Nullable Throwable throwable, @Nullable String message, Object... args) {
log(LogLevel.ERROR, throwable, message, args);
}
/**
* Logs a {@link LogLevel#FATAL} exception.
*
* @param throwable exception to log.
*/
public static void fatal(Throwable throwable) {
fatal(throwable, null);
}
/**
* Logs an {@link LogLevel#FATAL} event.
*
* @param message message to log.
* @param args optional args used to format the message using {@link String#format(String, Object...)}.
*/
public static void fatal(String message, Object... args) {
fatal(null, message, args);
}
/**
* Logs a {@link LogLevel#FATAL} event.
*
* @param throwable optional exception to log.
* @param message optional message.
* @param args optional args used to format the message using {@link String#format(String, Object...)}.
*/
public static void fatal(@Nullable Throwable throwable, @Nullable String message, Object... args) {
log(LogLevel.FATAL, throwable, message, args);
}
// Formats the message, parses the stacktrace of given throwable and passes them to nativeLog.
private static void log(int level, @Nullable Throwable throwable, @Nullable String message, @Nullable Object... args) {
if (level < getLevel()) {
return;
}
StringBuilder stringBuilder = new StringBuilder();
if (message != null && args != null && args.length > 0) {
message = String.format(Locale.US, message, args);
}
if (throwable != null) {
stringBuilder.append(Log.getStackTraceString(throwable));
}
if (message != null) {
if (throwable != null) {
stringBuilder.append("\n");
}
stringBuilder.append(message);
}
nativeLog(level, REALM_JAVA_TAG, throwable, stringBuilder.toString());
}
private static native void nativeAddLogger(RealmLogger logger);
private static native void nativeRemoveLogger(RealmLogger logger);
private static native void nativeClearLoggers();
private static native void nativeRegisterDefaultLogger();
private static native void nativeLog(int level, String tag, @Nullable Throwable throwable, @Nullable String message);
private static native void nativeSetLogLevel(int level);
private static native int nativeGetLogLevel();
// Methods below are used for testing core logger bridge only.
static native long nativeCreateCoreLoggerBridge(@SuppressWarnings("SameParameterValue") String tag);
static native void nativeCloseCoreLoggerBridge(long nativePtr);
static native void nativeLogToCoreLoggerBridge(long nativePtr, int level, String message);
}
| |
/*
* Generator.java
*
* Created on 11 November 2007, 21:41
*
* To change this template, choose Tools | Template Manager
* and open the template in the editor.
*/
package generator.engine.db;
import generator.db.DBGeneratorDefinition;
import generator.engine.ProgressUpdateObserver;
import generator.extenders.IRandomiserFunctionality;
import generator.extenders.RandomiserInstance;
import generator.db.DBFieldGenerator;
import generator.db.DBTableGenerator;
import generator.misc.ApplicationContext;
import generator.misc.Constants;
import generator.misc.DBDriverInfo;
import generator.misc.RandomiserType;
import generator.misc.Utils;
import java.sql.*;
import java.util.*;
import org.apache.log4j.Logger;
/**
*
* @author Administrator
*/
public class Generator
{
private Vector<RandomiserType> vRandomiserTypes;
private Vector<RandomiserInstance> vRandomiserInstances;
private Vector<DBDriverInfo> vDBDriverInfo;
private DBGeneratorDefinition dbGenConfig;
private Logger logger = Logger.getLogger(Generator.class);
private ProgressUpdateObserver observer;
/** Creates a new instance of Generator */
public Generator()
{
//load the randomiser-type definitions from file
vRandomiserTypes = ApplicationContext.getInstance().getRandomiserTypes();
//load the randomiser-instance definitions from the file
vRandomiserInstances = ApplicationContext.getInstance().getRandomiserInstances();
//load the db drivers info
vDBDriverInfo = ApplicationContext.getInstance().getDriverInfo();
}
public void setDBGeneratorDefinition(DBGeneratorDefinition dbGenConfig)
{
this.dbGenConfig = dbGenConfig;
}
/**
* Returns a RandomiserInstance object, given a string.
* Preconditions: vRandomiserInstances must NOT be null :)
*/
private RandomiserInstance getRandomiserInstance(String riName)
{
RandomiserInstance randomizerInstance = null;
logger.debug("Retrieving randomiserInstance object for:" + riName);
for (RandomiserInstance ri : vRandomiserInstances)
{
if (ri.getName().equalsIgnoreCase(riName))
{
randomizerInstance = ri;
break;
}
}
logger.debug("Retrieving the randomiserInstance for:" + riName + ". Found:" + randomizerInstance != null ? true : false);
return randomizerInstance;
}
/**
* Returns the name of a RandomiserType class,
* given its name in the application.
* Preconditions: vRandomiserTypes must NOT be null :)
*/
private RandomiserType getRandomiserType(String randomiserType)
{
RandomiserType randomizerType = null;
logger.debug("Retrieving randomiserType object for:" + randomiserType);
for (RandomiserType rt : vRandomiserTypes)
{
if (rt.getName().equalsIgnoreCase(randomiserType))
{
randomizerType = rt;
break;
}
}
logger.debug("Retrieving the randomiserType for:" + randomiserType + ". Found:" + randomizerType != null ? randomizerType.getName() : "false");
return randomizerType;
}
/**
* Retrieves a database connection using the currently set database configuration
*
* @return Connection
*/
private Connection initialiseDBConnection()
{
//we need the driver for this database connection
DBDriverInfo dbDriverInfo;
//we need a connection
Connection connection;
Utils utils = new Utils();
//perform initialisation: find the driver, get a connection for that driver
dbDriverInfo = utils.findDBDriver(dbGenConfig.getDbDriver(), vDBDriverInfo);
connection = utils.getDBConnection(dbDriverInfo, dbGenConfig);
if (connection == null)
{
logger.error("Error while getting database connection:" + dbGenConfig.getDbDriver());
observer.datageGenError("Error while getting database connection.");
return null;
}
return connection;
}
/**
* Sets up the generators, by going through the list of the tables and the generators assigned to them
* Setting up means:
* a) instantiating all the generators, if a generator is used in more than one tables, each table
* will have a different instance of that calculator
* b) for every generator instance, also associate the delimiters it uses
*
* The key for the generators is in the form Table.Field
* If a certain field happens to get its values from another table's key, (foreign key), no generators are assigned to that key
*
* @param alDBGenerators
* @param mapGenerators
* @param mapDelimiters
*/
private void initialiseGenerators( List<DBTableGenerator> alDBGenerators,
Map<String, IRandomiserFunctionality> mapGenerators,
Map<String, String> mapDelimiters)
{
//now we need to set the randomisers
IRandomiserFunctionality iGenerator;
Utils utils = new Utils();
//find the driver, we need the delimiters
DBDriverInfo dbDriverInfo = utils.findDBDriver(dbGenConfig.getDbDriver(), vDBDriverInfo);
for (DBTableGenerator dbTableGenerator : alDBGenerators)
{
System.out.println("xxxxxxxxxxxxxxxxx" + dbTableGenerator.getName());
List<DBFieldGenerator> aDBFieldsGenerators = dbTableGenerator.getDBFieldGenerators();
for (DBFieldGenerator dbFieldGen : aDBFieldsGenerators)
{
//A field may have no generators assigned to it, that's normal, we skip that
if (dbFieldGen.getGenerator() == null || dbFieldGen.getGenerator().length()==0)
{
logger.debug("POSSIBLE ERROR: No generator for " + dbTableGenerator.getName() + "." + dbFieldGen.getField());
continue;
}
//if it's not null, then it may be linked to another Table.Field (which is not a generator, so skip that one too)
if (isForeignKey(dbTableGenerator, dbFieldGen.getField()))
{
logger.debug("Skipping foreign key " + dbFieldGen.getField());
continue;
}
//if we are here, we have a field with a generator assigned to it
//the id of the table field we want to generate data for
String id = dbTableGenerator.getName() + "." + dbFieldGen.getField();
//create the randomiser instance out of the name
RandomiserInstance ri = getRandomiserInstance(dbFieldGen.getGenerator());
//get the randomiser type out of the RI
RandomiserType rt = getRandomiserType(ri.getRandomiserType());
//load and store the generator, set its RI, now it is ready to use
iGenerator = (IRandomiserFunctionality) utils.createObject(rt.getGenerator());
iGenerator.setRandomiserInstance(ri);
// this particular field is associated with the generator created above
mapGenerators.put(id, iGenerator);
//once a value is generated for this field, we need to wrap it up in the appropriate delimeters
String delimiter = "";
if (rt.getJtype() == RandomiserType.TYPE_STRING)
{
delimiter = dbDriverInfo.getCharDelimiter();
} else if (rt.getJtype() == RandomiserType.TYPE_DATE)
{
delimiter = dbDriverInfo.getDateDelimiter();
} else if (rt.getJtype() == RandomiserType.TYPE_DATE)
{
delimiter = dbDriverInfo.getDateDelimiter();
} else if (rt.getJtype() == RandomiserType.TYPE_TIME)
{
delimiter = dbDriverInfo.getTimeDelimiter();
} else if (rt.getJtype() == RandomiserType.TYPE_TIMESTAMP)
{
delimiter = dbDriverInfo.getTimestampDelimiter();
}
if (delimiter != null)
{
mapDelimiters.put(id, new String(delimiter));
}
}//for fields generator
}//for tables
} // initialiseGenerators
/**
* Generates and returns a map of the tables for which we need to generate data
* Everytime we geneate data for a certian table we remove it from this map
* (the map is re-generated after each a cycle - we need a more efficient way than this!)
* @return
*/
private Map<String, DBTableGenerator> getTablesToGenerate()
{
List<DBTableGenerator> alDBGenerators = dbGenConfig.getDBTableGenerators();
Map<String, DBTableGenerator> mapDBGenerators = new HashMap<String, DBTableGenerator>();
for (DBTableGenerator dbT : alDBGenerators)
{
mapDBGenerators.put(dbT.getName(), dbT);
}
return mapDBGenerators;
}
/**
* Returns true if fieldName of a certain table
* appears as a key in another tablle (foreign key)
*/
private boolean isForeignKey(DBTableGenerator dbTableGen, String fieldName)
{
if (dbTableGen.getForeignKeyForField(fieldName).length() > 0)
{
return true;
}
return false;
}
/**
* Returns true if all the fields that are foreign keys
* have already had their values generated.
*
* If TableA.KeyB is a foreign field for TableB.Key,
* this method returns true, if the value for TableB.key has been generated.
*
* If any of the fields are null (they point to values that have not been generated yet),
* this method returns null
*/
private boolean checkForeignKeys(DBTableGenerator dbTableGenerator, Map<String, Object> mapValues)
{
//first check that if this table has foreign keys, the values for these keys
//have already been generated from the master table
List<DBFieldGenerator> aDBFieldsGenerators = dbTableGenerator.getDBFieldGenerators();
for (DBFieldGenerator dbFieldGen : aDBFieldsGenerators)
{
// String id = dbTableGenerator.getName() + "." + dbFieldGen.getField();
String fkey = dbTableGenerator.getForeignKeyForField(dbFieldGen.getField());
if (fkey.length() > 0)
{
Object keyValue = mapValues.get(fkey);
if (keyValue == null)
{
return false;
}
}
}
return true;
}
/**
* Returns the given value enclosed in the appropriate delimiters, which are retrieved
* either by looking at the delimiters of either id1 or id2.
*
* @param id1
* @param id2
* @param objGeneratedValue
* @param mapDelimeters
* @return
*/
private String getDelimitedValue(String id1, String id2, Object obj, Map<String, String> mapDelimeters)
{
String val;
String tmp;
tmp = mapDelimeters.get(id1);
if (tmp == null)
{
tmp = mapDelimeters.get(id2);
}
StringBuffer delimiter = new StringBuffer(tmp);
if (delimiter.indexOf(Constants.DATE_DELIM) >= 0)
{
int start = delimiter.indexOf(Constants.DATE_DELIM);
int end = start + Constants.DATE_DELIM.length();
delimiter.replace(start, end, obj.toString());
val = delimiter.toString();
} else
{
val = delimiter.toString() + obj + delimiter.toString();
}
return val;
}
/**
* Generates a single query for table dbTableGenerator
*
* @param dbTableGenerator
* @param mapGenerators
* @param mapDelimeters
* @param mapValues
* @return String - the generated query
*/
private String generateQuery(DBTableGenerator dbTableGenerator, Map<String, IRandomiserFunctionality> mapGenerators, Map<String, String> mapDelimeters, Map<String, Object> mapValues)
{
//the generators have already been set, start generating data
IRandomiserFunctionality iGenerator;
List<DBFieldGenerator> aDBFieldsGenerators = dbTableGenerator.getDBFieldGenerators();
for (DBFieldGenerator dbFieldGen : aDBFieldsGenerators)
{
Object objGeneratedValue = null;
//the id of the table field we want to generate data for is Table.Field (set in initialiseGenerators)
String id = dbTableGenerator.getName() + "." + dbFieldGen.getField();
iGenerator = mapGenerators.get(id);
if (iGenerator == null)
{
//if we do not have a generator, we might have a foreign key case...
//get foreign key, and then get the value associated with that key
String fkey = dbTableGenerator.getForeignKeyForField(dbFieldGen.getField());
objGeneratedValue = mapValues.get(fkey);
if (objGeneratedValue == null)
{
logger.error("POSSIBLE ERROR: No foreign key value found for null generator: " + id);
}
} else
{
objGeneratedValue = iGenerator.generate();
}
//every time we generate a value we put it in the map for future reference
// ( in case this is used as a foreign key somewhere else)
mapValues.put(id, objGeneratedValue);
}
//we have the values, form the Query
StringBuffer sbQuery = new StringBuffer();
StringBuffer sbFields = new StringBuffer();
StringBuffer sbValues = new StringBuffer();
sbFields.append("(");
sbValues.append("(");
sbQuery.append("INSERT INTO ");
sbQuery.append(dbTableGenerator.getName());
for (DBFieldGenerator dbFieldGen : aDBFieldsGenerators)
{
String id = dbTableGenerator.getName() + "." + dbFieldGen.getField();
//all the fields have a generator assigned to them except the fields that are linked to other fields
// which have their generator set as Table.Field
if (dbFieldGen.getGenerator() != null && dbFieldGen.getGenerator().length() > 0)
{
//getValue from Generator
id = dbTableGenerator.getName() + "." + dbFieldGen.getField();
Object objGeneratedValue = mapValues.get(id);
String delimValue = getDelimitedValue(id, dbFieldGen.getGenerator(), objGeneratedValue, mapDelimeters);
if (objGeneratedValue != null)
{
sbValues.append(delimValue);
}
sbValues.append(",");
sbFields.append(dbFieldGen.getField());
sbFields.append(",");
}
}
sbFields.deleteCharAt(sbFields.length() - 1);
sbFields.append(")");
sbValues.deleteCharAt(sbValues.length() - 1);
sbValues.append(")");
sbQuery.append(" ");
sbQuery.append(sbFields);
sbQuery.append(" VALUES ");
sbQuery.append(sbValues);
return sbQuery.toString();
}
/**
* The main methodused to generate the data
*/
public void generate()
{
notifyInit();
int numOfRecs = dbGenConfig.getCycles();
List<DBTableGenerator> alDBGenerators = dbGenConfig.getDBTableGenerators();
notifyMaxProgressValue(numOfRecs);
//get the database connection
Connection connection = initialiseDBConnection();
//get the generators and the delimeters we will need
Map<String, IRandomiserFunctionality> hmIGenerators = new HashMap<String, IRandomiserFunctionality>();
Map<String, String> hmDelimeters = new HashMap<String, String>();
initialiseGenerators(alDBGenerators, hmIGenerators, hmDelimeters);
//the queries of each table are stored here
List<String> aInsertQueries = new ArrayList<String>();
int i=0;
boolean noError = true;
boolean cancelled = false;
while(i < numOfRecs && noError && !cancelled)
{
i++;
//inform the observer
cancelled = notifyProgrssUpdate("Cycle = " + i + "/" + numOfRecs, i );
//in every cycle we are storing the values for all the tables and fields that have been generated
Map<String, Object> mapValues = new HashMap<String, Object>();
//initialise the array
aInsertQueries.clear();
//every time data for a table are generated, the table is removed from this hashmap
Map<String, DBTableGenerator> mapTablesToGenerate = getTablesToGenerate();
boolean skip;
while (mapTablesToGenerate.size() > 0)
{
for (DBTableGenerator dbT : alDBGenerators)
{
DBTableGenerator dbTable = mapTablesToGenerate.get(dbT.getName());
if (dbTable != null)
{
skip = false;
int maxRecs = dbTable.getMaxForeignKeyCardinality();
int j = 0;
if (checkForeignKeys(dbTable, mapValues) == false)
{
skip = true;
}
//if we made it this far, it means that:
// a) we do not have foreign keys in the table, OR
// b) we have foreign keys for which we have previously generatedvalues
while (j < maxRecs && skip == false)
{
j++;
String sQuery = generateQuery(dbTable, hmIGenerators, hmDelimeters, mapValues);
logger.debug(sQuery);
writeQuery(connection,sQuery);
mapTablesToGenerate.remove(dbT.getName());
}
}
}
}//skipped
//write the quries
//writeQueries(connection, aInsertQueries);
}
notifyEnd();
}
/**
* Writes the queries in the db
* @param connection
* @param aQueries
*/
private void writeQuery(Connection connection, String query)
{
try
{
Statement stmt = connection.createStatement();
stmt.executeUpdate(query);
} catch (Exception e)
{
logger.error("Error while writing query :" + query);
observer.datageGenError("Error while writing query :" + query);
}
}
/**
* Writes the queries in the db
* @param connection
* @param aQueries
*/
@SuppressWarnings("unused")
private void writeQueries(Connection connection, List<String> aQueries)
{
String q2 = null;
try
{
Statement stmt = connection.createStatement();
for (String query : aQueries)
{
q2 = query;
stmt.executeUpdate(query);
}
} catch (Exception e)
{
logger.error("Error while writing query :" + q2);
observer.datageGenError("Error while writing query :" + q2);
}
}
public void registerObserver(ProgressUpdateObserver observer)
{
this.observer = observer;
}
public void unregisterObserver()
{
this.observer = null;
}
private void notifyInit()
{
if (observer == null)
{
return;
}
observer.dataGenStarted();
}
private void notifyMaxProgressValue(int max)
{
if (observer == null)
{
return;
}
observer.dataGenMaxProgressValue(max);
}
private boolean notifyProgrssUpdate(String msg, int progress)
{
if (observer == null)
{
return false;
}
return observer.dataGenProgressContinue(msg, progress);
}
private void notifyEnd()
{
if (observer == null)
{
return;
}
observer.dataGenEnd();
observer = null;
}
@SuppressWarnings("unused")
private void datageGenError(String msg)
{
if (observer == null)
{
return;
}
observer.datageGenError(msg);
observer.dataGenEnd();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
/** Interface that represents the client side information for a file.
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public class FileStatus implements Writable, Comparable {
private Path path;
private long length;
private boolean isdir;
private short block_replication;
private long blocksize;
private long modification_time;
private long access_time;
private FsPermission permission;
private String owner;
private String group;
private Path symlink;
public FileStatus() { this(0, false, 0, 0, 0, 0, null, null, null, null); }
//We should deprecate this soon?
public FileStatus(long length, boolean isdir, int block_replication,
long blocksize, long modification_time, Path path) {
this(length, isdir, block_replication, blocksize, modification_time,
0, null, null, null, path);
}
/**
* Constructor for file systems on which symbolic links are not supported
*/
public FileStatus(long length, boolean isdir,
int block_replication,
long blocksize, long modification_time, long access_time,
FsPermission permission, String owner, String group,
Path path) {
this(length, isdir, block_replication, blocksize, modification_time,
access_time, permission, owner, group, null, path);
}
public FileStatus(long length, boolean isdir,
int block_replication,
long blocksize, long modification_time, long access_time,
FsPermission permission, String owner, String group,
Path symlink,
Path path) {
this.length = length;
this.isdir = isdir;
this.block_replication = (short)block_replication;
this.blocksize = blocksize;
this.modification_time = modification_time;
this.access_time = access_time;
if (permission != null) {
this.permission = permission;
} else if (isdir) {
this.permission = FsPermission.getDirDefault();
} else if (symlink!=null) {
this.permission = FsPermission.getDefault();
} else {
this.permission = FsPermission.getFileDefault();
}
this.owner = (owner == null) ? "" : owner;
this.group = (group == null) ? "" : group;
this.symlink = symlink;
this.path = path;
// The variables isdir and symlink indicate the type:
// 1. isdir implies directory, in which case symlink must be null.
// 2. !isdir implies a file or symlink, symlink != null implies a
// symlink, otherwise it's a file.
assert (isdir && symlink == null) || !isdir;
}
/**
* Get the length of this file, in bytes.
* @return the length of this file, in bytes.
*/
public long getLen() {
return length;
}
/**
* Is this a file?
* @return true if this is a file
*/
public boolean isFile() {
return !isdir && !isSymlink();
}
/**
* Is this a directory?
* @return true if this is a directory
*/
public boolean isDirectory() {
return isdir;
}
/**
* Old interface, instead use the explicit {@link FileStatus#isFile()},
* {@link FileStatus#isDirectory()}, and {@link FileStatus#isSymlink()}
* @return true if this is a directory.
* @deprecated Use {@link FileStatus#isFile()},
* {@link FileStatus#isDirectory()}, and {@link FileStatus#isSymlink()}
* instead.
*/
@Deprecated
public boolean isDir() {
return isdir;
}
/**
* Is this a symbolic link?
* @return true if this is a symbolic link
*/
public boolean isSymlink() {
return symlink != null;
}
/**
* Get the block size of the file.
* @return the number of bytes
*/
public long getBlockSize() {
return blocksize;
}
/**
* Get the replication factor of a file.
* @return the replication factor of a file.
*/
public short getReplication() {
return block_replication;
}
/**
* Get the modification time of the file.
* @return the modification time of file in milliseconds since January 1, 1970 UTC.
*/
public long getModificationTime() {
return modification_time;
}
/**
* Get the access time of the file.
* @return the access time of file in milliseconds since January 1, 1970 UTC.
*/
public long getAccessTime() {
return access_time;
}
/**
* Get FsPermission associated with the file.
* @return permssion. If a filesystem does not have a notion of permissions
* or if permissions could not be determined, then default
* permissions equivalent of "rwxrwxrwx" is returned.
*/
public FsPermission getPermission() {
return permission;
}
/**
* Get the owner of the file.
* @return owner of the file. The string could be empty if there is no
* notion of owner of a file in a filesystem or if it could not
* be determined (rare).
*/
public String getOwner() {
return owner;
}
/**
* Get the group associated with the file.
* @return group for the file. The string could be empty if there is no
* notion of group of a file in a filesystem or if it could not
* be determined (rare).
*/
public String getGroup() {
return group;
}
public Path getPath() {
return path;
}
public void setPath(final Path p) {
path = p;
}
/* These are provided so that these values could be loaded lazily
* by a filesystem (e.g. local file system).
*/
/**
* Sets permission.
* @param permission if permission is null, default value is set
*/
protected void setPermission(FsPermission permission) {
this.permission = (permission == null) ?
FsPermission.getFileDefault() : permission;
}
/**
* Sets owner.
* @param owner if it is null, default value is set
*/
protected void setOwner(String owner) {
this.owner = (owner == null) ? "" : owner;
}
/**
* Sets group.
* @param group if it is null, default value is set
*/
protected void setGroup(String group) {
this.group = (group == null) ? "" : group;
}
/**
* @return The contents of the symbolic link.
*/
public Path getSymlink() throws IOException {
if (!isSymlink()) {
throw new IOException("Path " + path + " is not a symbolic link");
}
return symlink;
}
public void setSymlink(final Path p) {
symlink = p;
}
//////////////////////////////////////////////////
// Writable
//////////////////////////////////////////////////
public void write(DataOutput out) throws IOException {
Text.writeString(out, getPath().toString());
out.writeLong(getLen());
out.writeBoolean(isDirectory());
out.writeShort(getReplication());
out.writeLong(getBlockSize());
out.writeLong(getModificationTime());
out.writeLong(getAccessTime());
getPermission().write(out);
Text.writeString(out, getOwner());
Text.writeString(out, getGroup());
out.writeBoolean(isSymlink());
if (isSymlink()) {
Text.writeString(out, getSymlink().toString());
}
}
public void readFields(DataInput in) throws IOException {
String strPath = Text.readString(in);
this.path = new Path(strPath);
this.length = in.readLong();
this.isdir = in.readBoolean();
this.block_replication = in.readShort();
blocksize = in.readLong();
modification_time = in.readLong();
access_time = in.readLong();
permission.readFields(in);
owner = Text.readString(in);
group = Text.readString(in);
if (in.readBoolean()) {
this.symlink = new Path(Text.readString(in));
} else {
this.symlink = null;
}
}
/**
* Compare this object to another object
*
* @param o the object to be compared.
* @return a negative integer, zero, or a positive integer as this object
* is less than, equal to, or greater than the specified object.
*
* @throws ClassCastException if the specified object's is not of
* type FileStatus
*/
public int compareTo(Object o) {
FileStatus other = (FileStatus)o;
return this.getPath().compareTo(other.getPath());
}
/** Compare if this object is equal to another object
* @param o the object to be compared.
* @return true if two file status has the same path name; false if not.
*/
public boolean equals(Object o) {
if (o == null) {
return false;
}
if (this == o) {
return true;
}
if (!(o instanceof FileStatus)) {
return false;
}
FileStatus other = (FileStatus)o;
return this.getPath().equals(other.getPath());
}
/**
* Returns a hash code value for the object, which is defined as
* the hash code of the path name.
*
* @return a hash code value for the path name.
*/
public int hashCode() {
return getPath().hashCode();
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName());
sb.append("{");
sb.append("path=" + path);
sb.append("; isDirectory=" + isdir);
if(!isDirectory()){
sb.append("; length=" + length);
sb.append("; replication=" + block_replication);
sb.append("; blocksize=" + blocksize);
}
sb.append("; modification_time=" + modification_time);
sb.append("; access_time=" + access_time);
sb.append("; owner=" + owner);
sb.append("; group=" + group);
sb.append("; permission=" + permission);
sb.append("; isSymlink=" + isSymlink());
if(isSymlink()) {
sb.append("; symlink=" + symlink);
}
sb.append("}");
return sb.toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.end2end;
import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CATALOG_SCHEMA;
import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CATALOG_TABLE;
import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TYPE_SEQUENCE;
import static org.apache.phoenix.util.TestUtil.ATABLE_NAME;
import static org.apache.phoenix.util.TestUtil.ATABLE_SCHEMA_NAME;
import static org.apache.phoenix.util.TestUtil.BTABLE_NAME;
import static org.apache.phoenix.util.TestUtil.CUSTOM_ENTITY_DATA_FULL_NAME;
import static org.apache.phoenix.util.TestUtil.CUSTOM_ENTITY_DATA_NAME;
import static org.apache.phoenix.util.TestUtil.CUSTOM_ENTITY_DATA_SCHEMA_NAME;
import static org.apache.phoenix.util.TestUtil.GROUPBYTEST_NAME;
import static org.apache.phoenix.util.TestUtil.MDTEST_NAME;
import static org.apache.phoenix.util.TestUtil.MDTEST_SCHEMA_NAME;
import static org.apache.phoenix.util.TestUtil.PTSDB_NAME;
import static org.apache.phoenix.util.TestUtil.STABLE_NAME;
import static org.apache.phoenix.util.TestUtil.TABLE_WITH_SALTING;
import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.Map;
import java.util.Properties;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver;
import org.apache.phoenix.coprocessor.ServerCachingEndpointImpl;
import org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver;
import org.apache.phoenix.exception.SQLExceptionCode;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
import org.apache.phoenix.query.QueryServices;
import org.apache.phoenix.schema.types.PChar;
import org.apache.phoenix.schema.ColumnNotFoundException;
import org.apache.phoenix.schema.types.PDecimal;
import org.apache.phoenix.schema.types.PInteger;
import org.apache.phoenix.schema.types.PLong;
import org.apache.phoenix.schema.PTable.ViewType;
import org.apache.phoenix.schema.PTableType;
import org.apache.phoenix.schema.ReadOnlyTableException;
import org.apache.phoenix.schema.TableNotFoundException;
import org.apache.phoenix.util.PhoenixRuntime;
import org.apache.phoenix.util.PropertiesUtil;
import org.apache.phoenix.util.ReadOnlyProps;
import org.apache.phoenix.util.SchemaUtil;
import org.apache.phoenix.util.StringUtil;
import org.apache.phoenix.util.TestUtil;
import org.junit.BeforeClass;
import org.junit.Test;
public class QueryDatabaseMetaDataIT extends BaseClientManagedTimeIT {
@BeforeClass
@Shadower(classBeingShadowed = BaseClientManagedTimeIT.class)
public static void doSetup() throws Exception {
Map<String,String> props = getDefaultProps();
props.put(QueryServices.DEFAULT_KEEP_DELETED_CELLS_ATTRIB, "true");
setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator()));
}
@Test
public void testTableMetadataScan() throws SQLException {
long ts = nextTimestamp();
ensureTableCreated(getUrl(), ATABLE_NAME, null, ts);
ensureTableCreated(getUrl(), STABLE_NAME, null, ts);
ensureTableCreated(getUrl(), CUSTOM_ENTITY_DATA_FULL_NAME, null, ts);
Properties props = new Properties();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 5));
Connection conn = DriverManager.getConnection(getUrl(), props);
DatabaseMetaData dbmd = conn.getMetaData();
String aTableName = StringUtil.escapeLike(TestUtil.ATABLE_NAME);
String aSchemaName = TestUtil.ATABLE_SCHEMA_NAME;
ResultSet rs = dbmd.getTables(null, aSchemaName, aTableName, null);
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_NAME"),aTableName);
assertEquals(PTableType.TABLE.toString(), rs.getString("TABLE_TYPE"));
assertEquals(rs.getString(3),aTableName);
assertEquals(PTableType.TABLE.toString(), rs.getString(4));
assertFalse(rs.next());
rs = dbmd.getTables(null, null, null, null);
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),SYSTEM_CATALOG_SCHEMA);
assertEquals(rs.getString("TABLE_NAME"),SYSTEM_CATALOG_TABLE);
assertEquals(PTableType.SYSTEM.toString(), rs.getString("TABLE_TYPE"));
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),SYSTEM_CATALOG_SCHEMA);
assertEquals(rs.getString("TABLE_NAME"),TYPE_SEQUENCE);
assertEquals(PTableType.SYSTEM.toString(), rs.getString("TABLE_TYPE"));
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),SYSTEM_CATALOG_SCHEMA);
assertEquals(rs.getString("TABLE_NAME"),PhoenixDatabaseMetaData.SYSTEM_STATS_TABLE);
assertEquals(PTableType.SYSTEM.toString(), rs.getString("TABLE_TYPE"));
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(rs.getString("TABLE_NAME"),ATABLE_NAME);
assertEquals(PTableType.TABLE.toString(), rs.getString("TABLE_TYPE"));
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(rs.getString("TABLE_NAME"),STABLE_NAME);
assertEquals(PTableType.TABLE.toString(), rs.getString("TABLE_TYPE"));
assertTrue(rs.next());
assertEquals(CUSTOM_ENTITY_DATA_SCHEMA_NAME, rs.getString("TABLE_SCHEM"));
assertEquals(CUSTOM_ENTITY_DATA_NAME, rs.getString("TABLE_NAME"));
assertEquals(PTableType.TABLE.toString(), rs.getString("TABLE_TYPE"));
rs = dbmd.getTables(null, CUSTOM_ENTITY_DATA_SCHEMA_NAME, CUSTOM_ENTITY_DATA_NAME, null);
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),CUSTOM_ENTITY_DATA_SCHEMA_NAME);
assertEquals(rs.getString("TABLE_NAME"),CUSTOM_ENTITY_DATA_NAME);
assertEquals(PTableType.TABLE.toString(), rs.getString("TABLE_TYPE"));
assertFalse(rs.next());
try {
rs.getString("RANDOM_COLUMN_NAME");
fail();
} catch (ColumnNotFoundException e) {
// expected
}
assertFalse(rs.next());
rs = dbmd.getTables(null, "", "_TABLE", new String[] {PTableType.TABLE.toString()});
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(rs.getString("TABLE_NAME"),ATABLE_NAME);
assertEquals(PTableType.TABLE.toString(), rs.getString("TABLE_TYPE"));
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(rs.getString("TABLE_NAME"),STABLE_NAME);
assertEquals(PTableType.TABLE.toString(), rs.getString("TABLE_TYPE"));
assertFalse(rs.next());
}
@Test
public void testSchemaMetadataScan() throws SQLException {
long ts = nextTimestamp();
ensureTableCreated(getUrl(), CUSTOM_ENTITY_DATA_FULL_NAME, null, ts);
ensureTableCreated(getUrl(), PTSDB_NAME, null, ts);
Properties props = new Properties();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 5));
Connection conn = DriverManager.getConnection(getUrl(), props);
DatabaseMetaData dbmd = conn.getMetaData();
ResultSet rs;
rs = dbmd.getSchemas(null, CUSTOM_ENTITY_DATA_SCHEMA_NAME);
assertTrue(rs.next());
assertEquals(rs.getString(1),CUSTOM_ENTITY_DATA_SCHEMA_NAME);
assertEquals(rs.getString(2),null);
assertFalse(rs.next());
rs = dbmd.getSchemas(null, null);
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(rs.getString("TABLE_CATALOG"),null);
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),CUSTOM_ENTITY_DATA_SCHEMA_NAME);
assertEquals(rs.getString("TABLE_CATALOG"),null);
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),PhoenixDatabaseMetaData.SYSTEM_CATALOG_SCHEMA);
assertEquals(rs.getString("TABLE_CATALOG"),null);
assertFalse(rs.next());
}
@Test
public void testColumnMetadataScan() throws SQLException {
long ts = nextTimestamp();
ensureTableCreated(getUrl(), MDTEST_NAME, null, ts);
Properties props = new Properties();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 5));
Connection conn = DriverManager.getConnection(getUrl(), props);
DatabaseMetaData dbmd = conn.getMetaData();
ResultSet rs;
rs = dbmd.getColumns(null, "", MDTEST_NAME, null);
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(MDTEST_NAME, rs.getString("TABLE_NAME"));
assertEquals(null, rs.getString("TABLE_CAT"));
assertEquals(SchemaUtil.normalizeIdentifier("id"), rs.getString("COLUMN_NAME"));
assertEquals(DatabaseMetaData.attributeNoNulls, rs.getShort("NULLABLE"));
assertEquals(PChar.INSTANCE.getSqlType(), rs.getInt("DATA_TYPE"));
assertEquals(1, rs.getInt("ORDINAL_POSITION"));
assertEquals(1, rs.getInt("COLUMN_SIZE"));
assertEquals(0, rs.getInt("DECIMAL_DIGITS"));
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(MDTEST_NAME, rs.getString("TABLE_NAME"));
assertEquals(SchemaUtil.normalizeIdentifier("a"), rs.getString("COLUMN_FAMILY"));
assertEquals(SchemaUtil.normalizeIdentifier("col1"), rs.getString("COLUMN_NAME"));
assertEquals(DatabaseMetaData.attributeNullable, rs.getShort("NULLABLE"));
assertEquals(PInteger.INSTANCE.getSqlType(), rs.getInt("DATA_TYPE"));
assertEquals(2, rs.getInt("ORDINAL_POSITION"));
assertEquals(0, rs.getInt("COLUMN_SIZE"));
assertTrue(rs.wasNull());
assertEquals(0, rs.getInt("DECIMAL_DIGITS"));
assertTrue(rs.wasNull());
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(MDTEST_NAME, rs.getString("TABLE_NAME"));
assertEquals(SchemaUtil.normalizeIdentifier("b"), rs.getString("COLUMN_FAMILY"));
assertEquals(SchemaUtil.normalizeIdentifier("col2"), rs.getString("COLUMN_NAME"));
assertEquals(DatabaseMetaData.attributeNullable, rs.getShort("NULLABLE"));
assertEquals(PLong.INSTANCE.getSqlType(), rs.getInt("DATA_TYPE"));
assertEquals(3, rs.getInt("ORDINAL_POSITION"));
assertEquals(0, rs.getInt("COLUMN_SIZE"));
assertTrue(rs.wasNull());
assertEquals(0, rs.getInt("DECIMAL_DIGITS"));
assertTrue(rs.wasNull());
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(MDTEST_NAME, rs.getString("TABLE_NAME"));
assertEquals(SchemaUtil.normalizeIdentifier("b"), rs.getString("COLUMN_FAMILY"));
assertEquals(SchemaUtil.normalizeIdentifier("col3"), rs.getString("COLUMN_NAME"));
assertEquals(DatabaseMetaData.attributeNullable, rs.getShort("NULLABLE"));
assertEquals(PDecimal.INSTANCE.getSqlType(), rs.getInt("DATA_TYPE"));
assertEquals(4, rs.getInt("ORDINAL_POSITION"));
assertEquals(0, rs.getInt("COLUMN_SIZE"));
assertTrue(rs.wasNull());
assertEquals(0, rs.getInt("DECIMAL_DIGITS"));
assertTrue(rs.wasNull());
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(MDTEST_NAME, rs.getString("TABLE_NAME"));
assertEquals(SchemaUtil.normalizeIdentifier("b"), rs.getString("COLUMN_FAMILY"));
assertEquals(SchemaUtil.normalizeIdentifier("col4"), rs.getString("COLUMN_NAME"));
assertEquals(DatabaseMetaData.attributeNullable, rs.getShort("NULLABLE"));
assertEquals(PDecimal.INSTANCE.getSqlType(), rs.getInt("DATA_TYPE"));
assertEquals(5, rs.getInt("ORDINAL_POSITION"));
assertEquals(5, rs.getInt("COLUMN_SIZE"));
assertEquals(0, rs.getInt("DECIMAL_DIGITS"));
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(MDTEST_NAME, rs.getString("TABLE_NAME"));
assertEquals(SchemaUtil.normalizeIdentifier("b"), rs.getString("COLUMN_FAMILY"));
assertEquals(SchemaUtil.normalizeIdentifier("col5"), rs.getString("COLUMN_NAME"));
assertEquals(DatabaseMetaData.attributeNullable, rs.getShort("NULLABLE"));
assertEquals(PDecimal.INSTANCE.getSqlType(), rs.getInt("DATA_TYPE"));
assertEquals(6, rs.getInt("ORDINAL_POSITION"));
assertEquals(6, rs.getInt("COLUMN_SIZE"));
assertEquals(3, rs.getInt("DECIMAL_DIGITS"));
assertFalse(rs.next());
// Look up only columns in a column family
rs = dbmd.getColumns(null, "", MDTEST_NAME, "A.");
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(MDTEST_NAME, rs.getString("TABLE_NAME"));
assertEquals(SchemaUtil.normalizeIdentifier("a"), rs.getString("COLUMN_FAMILY"));
assertEquals(SchemaUtil.normalizeIdentifier("col1"), rs.getString("COLUMN_NAME"));
assertEquals(DatabaseMetaData.attributeNullable, rs.getShort("NULLABLE"));
assertEquals(PInteger.INSTANCE.getSqlType(), rs.getInt("DATA_TYPE"));
assertEquals(2, rs.getInt("ORDINAL_POSITION"));
assertEquals(0, rs.getInt("COLUMN_SIZE"));
assertTrue(rs.wasNull());
assertEquals(0, rs.getInt("DECIMAL_DIGITS"));
assertTrue(rs.wasNull());
assertFalse(rs.next());
// Look up KV columns in a column family
rs = dbmd.getColumns("", "", MDTEST_NAME, "%.COL%");
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(MDTEST_NAME, rs.getString("TABLE_NAME"));
assertEquals(SchemaUtil.normalizeIdentifier("a"), rs.getString("COLUMN_FAMILY"));
assertEquals(SchemaUtil.normalizeIdentifier("col1"), rs.getString("COLUMN_NAME"));
assertEquals(DatabaseMetaData.attributeNullable, rs.getShort("NULLABLE"));
assertEquals(PInteger.INSTANCE.getSqlType(), rs.getInt("DATA_TYPE"));
assertEquals(2, rs.getInt("ORDINAL_POSITION"));
assertEquals(0, rs.getInt("COLUMN_SIZE"));
assertTrue(rs.wasNull());
assertEquals(0, rs.getInt("DECIMAL_DIGITS"));
assertTrue(rs.wasNull());
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(MDTEST_NAME, rs.getString("TABLE_NAME"));
assertEquals(SchemaUtil.normalizeIdentifier("b"), rs.getString("COLUMN_FAMILY"));
assertEquals(SchemaUtil.normalizeIdentifier("col2"), rs.getString("COLUMN_NAME"));
assertEquals(DatabaseMetaData.attributeNullable, rs.getShort("NULLABLE"));
assertEquals(PLong.INSTANCE.getSqlType(), rs.getInt("DATA_TYPE"));
assertEquals(3, rs.getInt("ORDINAL_POSITION"));
assertEquals(0, rs.getInt("COLUMN_SIZE"));
assertTrue(rs.wasNull());
assertEquals(0, rs.getInt("DECIMAL_DIGITS"));
assertTrue(rs.wasNull());
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(MDTEST_NAME, rs.getString("TABLE_NAME"));
assertEquals(SchemaUtil.normalizeIdentifier("b"), rs.getString("COLUMN_FAMILY"));
assertEquals(SchemaUtil.normalizeIdentifier("col3"), rs.getString("COLUMN_NAME"));
assertEquals(DatabaseMetaData.attributeNullable, rs.getShort("NULLABLE"));
assertEquals(PDecimal.INSTANCE.getSqlType(), rs.getInt("DATA_TYPE"));
assertEquals(4, rs.getInt("ORDINAL_POSITION"));
assertEquals(0, rs.getInt("COLUMN_SIZE"));
assertTrue(rs.wasNull());
assertEquals(0, rs.getInt("DECIMAL_DIGITS"));
assertTrue(rs.wasNull());
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(MDTEST_NAME, rs.getString("TABLE_NAME"));
assertEquals(SchemaUtil.normalizeIdentifier("b"), rs.getString("COLUMN_FAMILY"));
assertEquals(SchemaUtil.normalizeIdentifier("col4"), rs.getString("COLUMN_NAME"));
assertEquals(DatabaseMetaData.attributeNullable, rs.getShort("NULLABLE"));
assertEquals(PDecimal.INSTANCE.getSqlType(), rs.getInt("DATA_TYPE"));
assertEquals(5, rs.getInt("ORDINAL_POSITION"));
assertEquals(5, rs.getInt("COLUMN_SIZE"));
assertEquals(0, rs.getInt("DECIMAL_DIGITS"));
assertFalse(rs.wasNull());
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(MDTEST_NAME, rs.getString("TABLE_NAME"));
assertEquals(SchemaUtil.normalizeIdentifier("b"), rs.getString("COLUMN_FAMILY"));
assertEquals(SchemaUtil.normalizeIdentifier("col5"), rs.getString("COLUMN_NAME"));
assertEquals(DatabaseMetaData.attributeNullable, rs.getShort("NULLABLE"));
assertEquals(PDecimal.INSTANCE.getSqlType(), rs.getInt("DATA_TYPE"));
assertEquals(6, rs.getInt("ORDINAL_POSITION"));
assertEquals(6, rs.getInt("COLUMN_SIZE"));
assertEquals(3, rs.getInt("DECIMAL_DIGITS"));
assertFalse(rs.next());
// Look up KV columns in a column family
rs = dbmd.getColumns("", "", MDTEST_NAME, "B.COL2");
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(MDTEST_NAME, rs.getString("TABLE_NAME"));
assertEquals(SchemaUtil.normalizeIdentifier("b"), rs.getString("COLUMN_FAMILY"));
assertEquals(SchemaUtil.normalizeIdentifier("col2"), rs.getString("COLUMN_NAME"));
assertFalse(rs.next());
ensureTableCreated(getUrl(), TABLE_WITH_SALTING, null, ts);
rs = dbmd.getColumns("", "", TABLE_WITH_SALTING, StringUtil.escapeLike("A_INTEGER"));
assertTrue(rs.next());
assertEquals(1, rs.getInt("ORDINAL_POSITION"));
assertFalse(rs.next());
}
@Test
public void testPrimaryKeyMetadataScan() throws SQLException {
long ts = nextTimestamp();
ensureTableCreated(getUrl(), MDTEST_NAME, null, ts);
ensureTableCreated(getUrl(), CUSTOM_ENTITY_DATA_FULL_NAME, null, ts);
Properties props = new Properties();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 5));
Connection conn = DriverManager.getConnection(getUrl(), props);
DatabaseMetaData dbmd = conn.getMetaData();
ResultSet rs;
rs = dbmd.getPrimaryKeys(null, "", MDTEST_NAME);
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(MDTEST_NAME, rs.getString("TABLE_NAME"));
assertEquals(null, rs.getString("TABLE_CAT"));
assertEquals(SchemaUtil.normalizeIdentifier("id"), rs.getString("COLUMN_NAME"));
assertEquals(1, rs.getInt("KEY_SEQ"));
assertEquals(null, rs.getString("PK_NAME"));
assertFalse(rs.next());
rs = dbmd.getPrimaryKeys(null, CUSTOM_ENTITY_DATA_SCHEMA_NAME, CUSTOM_ENTITY_DATA_NAME);
assertTrue(rs.next());
assertEquals(CUSTOM_ENTITY_DATA_SCHEMA_NAME, rs.getString("TABLE_SCHEM"));
assertEquals(CUSTOM_ENTITY_DATA_NAME, rs.getString("TABLE_NAME"));
assertEquals(null, rs.getString("TABLE_CAT"));
assertEquals(SchemaUtil.normalizeIdentifier("custom_entity_data_id"), rs.getString("COLUMN_NAME"));
assertEquals(3, rs.getInt("KEY_SEQ"));
assertEquals(SchemaUtil.normalizeIdentifier("pk"), rs.getString("PK_NAME"));
assertTrue(rs.next());
assertEquals(CUSTOM_ENTITY_DATA_SCHEMA_NAME, rs.getString("TABLE_SCHEM"));
assertEquals(CUSTOM_ENTITY_DATA_NAME, rs.getString("TABLE_NAME"));
assertEquals(null, rs.getString("TABLE_CAT"));
assertEquals(SchemaUtil.normalizeIdentifier("key_prefix"), rs.getString("COLUMN_NAME"));
assertEquals(2, rs.getInt("KEY_SEQ"));
assertEquals(SchemaUtil.normalizeIdentifier("pk"), rs.getString("PK_NAME"));
assertTrue(rs.next());
assertEquals(CUSTOM_ENTITY_DATA_SCHEMA_NAME, rs.getString("TABLE_SCHEM"));
assertEquals(CUSTOM_ENTITY_DATA_NAME, rs.getString("TABLE_NAME"));
assertEquals(null, rs.getString("TABLE_CAT"));
assertEquals(SchemaUtil.normalizeIdentifier("organization_id"), rs.getString("COLUMN_NAME"));
assertEquals(1, rs.getInt("KEY_SEQ"));
assertEquals(SchemaUtil.normalizeIdentifier("pk"), rs.getString("PK_NAME")); // TODO: this is on the table row
assertFalse(rs.next());
rs = dbmd.getColumns("", CUSTOM_ENTITY_DATA_SCHEMA_NAME, CUSTOM_ENTITY_DATA_NAME, null);
assertTrue(rs.next());
assertEquals(CUSTOM_ENTITY_DATA_SCHEMA_NAME, rs.getString("TABLE_SCHEM"));
assertEquals(CUSTOM_ENTITY_DATA_NAME, rs.getString("TABLE_NAME"));
assertEquals(null, rs.getString("TABLE_CAT"));
assertEquals(SchemaUtil.normalizeIdentifier("organization_id"), rs.getString("COLUMN_NAME"));
assertEquals(rs.getInt("COLUMN_SIZE"), 15);
assertTrue(rs.next());
assertEquals(CUSTOM_ENTITY_DATA_SCHEMA_NAME, rs.getString("TABLE_SCHEM"));
assertEquals(CUSTOM_ENTITY_DATA_NAME, rs.getString("TABLE_NAME"));
assertEquals(null, rs.getString("TABLE_CAT"));
assertEquals(SchemaUtil.normalizeIdentifier("key_prefix"), rs.getString("COLUMN_NAME"));
assertEquals(rs.getInt("COLUMN_SIZE"), 3);
assertTrue(rs.next());
assertEquals(CUSTOM_ENTITY_DATA_SCHEMA_NAME, rs.getString("TABLE_SCHEM"));
assertEquals(CUSTOM_ENTITY_DATA_NAME, rs.getString("TABLE_NAME"));
assertEquals(null, rs.getString("TABLE_CAT"));
assertEquals(SchemaUtil.normalizeIdentifier("custom_entity_data_id"), rs.getString("COLUMN_NAME"));
// The above returns all columns, starting with the PK columns
assertTrue(rs.next());
rs = dbmd.getColumns("", CUSTOM_ENTITY_DATA_SCHEMA_NAME, CUSTOM_ENTITY_DATA_NAME, "KEY_PREFIX");
assertTrue(rs.next());
assertEquals(CUSTOM_ENTITY_DATA_SCHEMA_NAME, rs.getString("TABLE_SCHEM"));
assertEquals(CUSTOM_ENTITY_DATA_NAME, rs.getString("TABLE_NAME"));
assertEquals(null, rs.getString("TABLE_CAT"));
assertEquals(SchemaUtil.normalizeIdentifier("key_prefix"), rs.getString("COLUMN_NAME"));
rs = dbmd.getColumns("", CUSTOM_ENTITY_DATA_SCHEMA_NAME, CUSTOM_ENTITY_DATA_NAME, "KEY_PREFIX");
assertTrue(rs.next());
assertEquals(CUSTOM_ENTITY_DATA_SCHEMA_NAME, rs.getString("TABLE_SCHEM"));
assertEquals(CUSTOM_ENTITY_DATA_NAME, rs.getString("TABLE_NAME"));
assertEquals(null, rs.getString("TABLE_CAT"));
assertEquals(SchemaUtil.normalizeIdentifier("key_prefix"), rs.getString("COLUMN_NAME"));
assertFalse(rs.next());
}
@Test
public void testMultiTableColumnsMetadataScan() throws SQLException {
long ts = nextTimestamp();
ensureTableCreated(getUrl(), MDTEST_NAME, null, ts);
ensureTableCreated(getUrl(), GROUPBYTEST_NAME, null, ts);
ensureTableCreated(getUrl(), PTSDB_NAME, null, ts);
ensureTableCreated(getUrl(), CUSTOM_ENTITY_DATA_FULL_NAME, null, ts);
Properties props = new Properties();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 5));
Connection conn = DriverManager.getConnection(getUrl(), props);
DatabaseMetaData dbmd = conn.getMetaData();
ResultSet rs = dbmd.getColumns(null, "", "%TEST%", null);
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(rs.getString("TABLE_NAME"),GROUPBYTEST_NAME);
assertEquals(null, rs.getString("COLUMN_FAMILY"));
assertEquals(SchemaUtil.normalizeIdentifier("id"), rs.getString("COLUMN_NAME"));
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(rs.getString("TABLE_NAME"),GROUPBYTEST_NAME);
assertEquals(PhoenixDatabaseMetaData.TABLE_FAMILY, rs.getString("COLUMN_FAMILY"));
assertEquals(SchemaUtil.normalizeIdentifier("uri"), rs.getString("COLUMN_NAME"));
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(rs.getString("TABLE_NAME"),GROUPBYTEST_NAME);
assertEquals(PhoenixDatabaseMetaData.TABLE_FAMILY, rs.getString("COLUMN_FAMILY"));
assertEquals(SchemaUtil.normalizeIdentifier("appcpu"), rs.getString("COLUMN_NAME"));
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(rs.getString("TABLE_NAME"),MDTEST_NAME);
assertEquals(null, rs.getString("COLUMN_FAMILY"));
assertEquals(SchemaUtil.normalizeIdentifier("id"), rs.getString("COLUMN_NAME"));
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(rs.getString("TABLE_NAME"),MDTEST_NAME);
assertEquals(SchemaUtil.normalizeIdentifier("a"), rs.getString("COLUMN_FAMILY"));
assertEquals(SchemaUtil.normalizeIdentifier("col1"), rs.getString("COLUMN_NAME"));
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(rs.getString("TABLE_NAME"),MDTEST_NAME);
assertEquals(SchemaUtil.normalizeIdentifier("b"), rs.getString("COLUMN_FAMILY"));
assertEquals(SchemaUtil.normalizeIdentifier("col2"), rs.getString("COLUMN_NAME"));
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(rs.getString("TABLE_NAME"),MDTEST_NAME);
assertEquals(SchemaUtil.normalizeIdentifier("b"), rs.getString("COLUMN_FAMILY"));
assertEquals(SchemaUtil.normalizeIdentifier("col3"), rs.getString("COLUMN_NAME"));
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(rs.getString("TABLE_NAME"),MDTEST_NAME);
assertEquals(SchemaUtil.normalizeIdentifier("b"), rs.getString("COLUMN_FAMILY"));
assertEquals(SchemaUtil.normalizeIdentifier("col4"), rs.getString("COLUMN_NAME"));
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_SCHEM"),null);
assertEquals(rs.getString("TABLE_NAME"),MDTEST_NAME);
assertEquals(SchemaUtil.normalizeIdentifier("b"), rs.getString("COLUMN_FAMILY"));
assertEquals(SchemaUtil.normalizeIdentifier("col5"), rs.getString("COLUMN_NAME"));
assertFalse(rs.next());
}
@Test
public void testCreateDropTable() throws Exception {
long ts = nextTimestamp();
String tenantId = getOrganizationId();
initATableValues(tenantId, getDefaultSplits(tenantId), null, ts);
ensureTableCreated(getUrl(), BTABLE_NAME, null, ts-2);
ensureTableCreated(getUrl(), PTSDB_NAME, null, ts-2);
Properties props = new Properties();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 5));
Connection conn5 = DriverManager.getConnection(getUrl(), props);
String query = "SELECT a_string FROM aTable";
// Data should still be there b/c we only dropped the schema
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 8));
assertTrue(conn5.prepareStatement(query).executeQuery().next());
conn5.createStatement().executeUpdate("DROP TABLE " + ATABLE_NAME);
// Confirm that data is no longer there because we dropped the table
// This needs to be done natively b/c the metadata is gone
HTableInterface htable = conn5.unwrap(PhoenixConnection.class).getQueryServices().getTable(SchemaUtil.getTableNameAsBytes(ATABLE_SCHEMA_NAME, ATABLE_NAME));
Scan scan = new Scan();
scan.setFilter(new FirstKeyOnlyFilter());
scan.setTimeRange(0, ts+9);
assertNull(htable.getScanner(scan).next());
conn5.close();
// Still should work b/c we're at an earlier timestamp than when table was deleted
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 2));
Connection conn2 = DriverManager.getConnection(getUrl(), props);
assertTrue(conn2.prepareStatement(query).executeQuery().next());
conn2.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 10));
Connection conn10 = DriverManager.getConnection(getUrl(), props);
try {
conn10.prepareStatement(query).executeQuery().next();
fail();
} catch (TableNotFoundException e) {
}
}
@Test
public void testCreateOnExistingTable() throws Exception {
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), PropertiesUtil.deepCopy(TEST_PROPERTIES)).unwrap(PhoenixConnection.class);
String tableName = MDTEST_NAME;
String schemaName = MDTEST_SCHEMA_NAME;
byte[] cfA = Bytes.toBytes(SchemaUtil.normalizeIdentifier("a"));
byte[] cfB = Bytes.toBytes(SchemaUtil.normalizeIdentifier("b"));
byte[] cfC = Bytes.toBytes("c");
byte[][] familyNames = new byte[][] {cfB, cfC};
byte[] htableName = SchemaUtil.getTableNameAsBytes(schemaName, tableName);
HBaseAdmin admin = pconn.getQueryServices().getAdmin();
try {
admin.disableTable(htableName);
admin.deleteTable(htableName);
admin.enableTable(htableName);
} catch (org.apache.hadoop.hbase.TableNotFoundException e) {
}
@SuppressWarnings("deprecation")
HTableDescriptor descriptor = new HTableDescriptor(htableName);
for (byte[] familyName : familyNames) {
HColumnDescriptor columnDescriptor = new HColumnDescriptor(familyName);
descriptor.addFamily(columnDescriptor);
}
admin.createTable(descriptor);
long ts = nextTimestamp();
Properties props = new Properties();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 5));
PhoenixConnection conn1 = DriverManager.getConnection(getUrl(), props).unwrap(PhoenixConnection.class);
ensureTableCreated(getUrl(), tableName, null, ts);
descriptor = admin.getTableDescriptor(htableName);
assertEquals(3,descriptor.getColumnFamilies().length);
HColumnDescriptor cdA = descriptor.getFamily(cfA);
assertNotEquals(HColumnDescriptor.DEFAULT_KEEP_DELETED, cdA.getKeepDeletedCells());
assertEquals(DataBlockEncoding.NONE, cdA.getDataBlockEncoding()); // Overriden using WITH
assertEquals(1,cdA.getMaxVersions());// Overriden using WITH
HColumnDescriptor cdB = descriptor.getFamily(cfB);
// Allow KEEP_DELETED_CELLS to be false for VIEW
assertEquals(HColumnDescriptor.DEFAULT_KEEP_DELETED, cdB.getKeepDeletedCells());
assertEquals(DataBlockEncoding.NONE, cdB.getDataBlockEncoding()); // Should keep the original value.
// CF c should stay the same since it's not a Phoenix cf.
HColumnDescriptor cdC = descriptor.getFamily(cfC);
assertNotNull("Column family not found", cdC);
assertEquals(HColumnDescriptor.DEFAULT_KEEP_DELETED, cdC.getKeepDeletedCells());
assertFalse(SchemaUtil.DEFAULT_DATA_BLOCK_ENCODING == cdC.getDataBlockEncoding());
assertTrue(descriptor.hasCoprocessor(UngroupedAggregateRegionObserver.class.getName()));
assertTrue(descriptor.hasCoprocessor(GroupedAggregateRegionObserver.class.getName()));
assertTrue(descriptor.hasCoprocessor(ServerCachingEndpointImpl.class.getName()));
admin.close();
int rowCount = 5;
String upsert = "UPSERT INTO " + tableName + "(id,col1,col2) VALUES(?,?,?)";
PreparedStatement ps = conn1.prepareStatement(upsert);
for (int i = 0; i < rowCount; i++) {
ps.setString(1, Integer.toString(i));
ps.setInt(2, i+1);
ps.setInt(3, i+2);
ps.execute();
}
conn1.commit();
conn1.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 6));
Connection conn2 = DriverManager.getConnection(getUrl(), props);
String query = "SELECT count(1) FROM " + tableName;
ResultSet rs = conn2.createStatement().executeQuery(query);
assertTrue(rs.next());
assertEquals(rowCount, rs.getLong(1));
query = "SELECT id, col1,col2 FROM " + tableName;
rs = conn2.createStatement().executeQuery(query);
for (int i = 0; i < rowCount; i++) {
assertTrue(rs.next());
assertEquals(Integer.toString(i),rs.getString(1));
assertEquals(i+1, rs.getInt(2));
assertEquals(i+2, rs.getInt(3));
}
assertFalse(rs.next());
conn2.close();
}
@SuppressWarnings("deprecation")
@Test
public void testCreateViewOnExistingTable() throws Exception {
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), PropertiesUtil.deepCopy(TEST_PROPERTIES)).unwrap(PhoenixConnection.class);
String tableName = MDTEST_NAME;
String schemaName = MDTEST_SCHEMA_NAME;
byte[] cfB = Bytes.toBytes(SchemaUtil.normalizeIdentifier("b"));
byte[] cfC = Bytes.toBytes("c");
byte[][] familyNames = new byte[][] {cfB, cfC};
byte[] htableName = SchemaUtil.getTableNameAsBytes(schemaName, tableName);
HBaseAdmin admin = pconn.getQueryServices().getAdmin();
try {
admin.disableTable(htableName);
admin.deleteTable(htableName);
admin.enableTable(htableName);
} catch (org.apache.hadoop.hbase.TableNotFoundException e) {
} finally {
admin.close();
}
HTableDescriptor descriptor = new HTableDescriptor(htableName);
for (byte[] familyName : familyNames) {
HColumnDescriptor columnDescriptor = new HColumnDescriptor(familyName);
descriptor.addFamily(columnDescriptor);
}
admin.createTable(descriptor);
long ts = nextTimestamp();
Properties props = new Properties();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 5));
Connection conn1 = DriverManager.getConnection(getUrl(), props);
String createStmt = "create view bogusTable" +
" (id char(1) not null primary key,\n" +
" a.col1 integer,\n" +
" d.col2 bigint)\n";
try {
conn1.createStatement().execute(createStmt);
fail();
} catch (TableNotFoundException e) {
// expected to fail b/c table doesn't exist
} catch (ReadOnlyTableException e) {
// expected to fail b/c table doesn't exist
}
createStmt = "create view " + MDTEST_NAME +
" (id char(1) not null primary key,\n" +
" a.col1 integer,\n" +
" b.col2 bigint)\n";
try {
conn1.createStatement().execute(createStmt);
fail();
} catch (ReadOnlyTableException e) {
// expected to fail b/c cf a doesn't exist
}
createStmt = "create view " + MDTEST_NAME +
" (id char(1) not null primary key,\n" +
" b.col1 integer,\n" +
" c.col2 bigint)\n";
try {
conn1.createStatement().execute(createStmt);
fail();
} catch (ReadOnlyTableException e) {
// expected to fail b/c cf C doesn't exist (case issue)
}
createStmt = "create view " + MDTEST_NAME +
" (id char(1) not null primary key,\n" +
" b.col1 integer,\n" +
" \"c\".col2 bigint) \n";
// should be ok now
conn1.createStatement().execute(createStmt);
conn1.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 6));
PhoenixConnection conn2 = DriverManager.getConnection(getUrl(), props).unwrap(PhoenixConnection.class);
ResultSet rs = conn2.getMetaData().getTables(null, null, MDTEST_NAME, null);
assertTrue(rs.next());
assertEquals(ViewType.MAPPED.name(), rs.getString(PhoenixDatabaseMetaData.VIEW_TYPE));
assertFalse(rs.next());
String deleteStmt = "DELETE FROM " + MDTEST_NAME;
PreparedStatement ps = conn2.prepareStatement(deleteStmt);
try {
ps.execute();
fail();
} catch (ReadOnlyTableException e) {
// expected to fail b/c table is read-only
}
try {
String upsert = "UPSERT INTO " + MDTEST_NAME + "(id,col1,col2) VALUES(?,?,?)";
ps = conn2.prepareStatement(upsert);
try {
ps.setString(1, Integer.toString(0));
ps.setInt(2, 1);
ps.setInt(3, 2);
ps.execute();
fail();
} catch (ReadOnlyTableException e) {
// expected to fail b/c table is read-only
}
conn2.createStatement().execute("ALTER VIEW " + MDTEST_NAME + " SET IMMUTABLE_ROWS=TRUE");
HTableInterface htable = conn2.getQueryServices().getTable(SchemaUtil.getTableNameAsBytes(MDTEST_SCHEMA_NAME,MDTEST_NAME));
Put put = new Put(Bytes.toBytes("0"));
put.add(cfB, Bytes.toBytes("COL1"), ts+6, PInteger.INSTANCE.toBytes(1));
put.add(cfC, Bytes.toBytes("COL2"), ts+6, PLong.INSTANCE.toBytes(2));
htable.put(put);
conn2.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 10));
Connection conn7 = DriverManager.getConnection(getUrl(), props);
// Should be ok b/c we've marked the view with IMMUTABLE_ROWS=true
conn7.createStatement().execute("CREATE INDEX idx ON " + MDTEST_NAME + "(B.COL1)");
String select = "SELECT col1 FROM " + MDTEST_NAME + " WHERE col2=?";
ps = conn7.prepareStatement(select);
ps.setInt(1, 2);
rs = ps.executeQuery();
assertTrue(rs.next());
assertEquals(1, rs.getInt(1));
assertFalse(rs.next());
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 12));
Connection conn75 = DriverManager.getConnection(getUrl(), props);
String dropTable = "DROP TABLE " + MDTEST_NAME ;
ps = conn75.prepareStatement(dropTable);
try {
ps.execute();
fail();
} catch (TableNotFoundException e) {
// expected to fail b/c it is a view
}
String dropView = "DROP VIEW " + MDTEST_NAME ;
ps = conn75.prepareStatement(dropView);
ps.execute();
conn75.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 15));
Connection conn8 = DriverManager.getConnection(getUrl(), props);
createStmt = "create view " + MDTEST_NAME +
" (id char(1) not null primary key,\n" +
" b.col1 integer,\n" +
" \"c\".col2 bigint) IMMUTABLE_ROWS=true\n";
// should be ok to create a view with IMMUTABLE_ROWS = true
conn8.createStatement().execute(createStmt);
conn8.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 20));
Connection conn9 = DriverManager.getConnection(getUrl(), props);
conn9.createStatement().execute("CREATE INDEX idx ON " + MDTEST_NAME + "(B.COL1)");
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 30));
Connection conn91 = DriverManager.getConnection(getUrl(), props);
ps = conn91.prepareStatement(dropView);
ps.execute();
conn91.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 35));
Connection conn92 = DriverManager.getConnection(getUrl(), props);
createStmt = "create view " + MDTEST_NAME +
" (id char(1) not null primary key,\n" +
" b.col1 integer,\n" +
" \"c\".col2 bigint) as\n" +
" select * from " + MDTEST_NAME +
" where b.col1 = 1";
conn92.createStatement().execute(createStmt);
conn92.close();
put = new Put(Bytes.toBytes("1"));
put.add(cfB, Bytes.toBytes("COL1"), ts+39, PInteger.INSTANCE.toBytes(3));
put.add(cfC, Bytes.toBytes("COL2"), ts+39, PLong.INSTANCE.toBytes(4));
htable.put(put);
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 40));
Connection conn92a = DriverManager.getConnection(getUrl(), props);
rs = conn92a.createStatement().executeQuery("select count(*) from " + MDTEST_NAME);
assertTrue(rs.next());
assertEquals(1,rs.getInt(1));
conn92a.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 45));
Connection conn93 = DriverManager.getConnection(getUrl(), props);
try {
String alterView = "alter view " + MDTEST_NAME + " drop column b.col1";
conn93.createStatement().execute(alterView);
fail();
} catch (SQLException e) {
assertEquals(SQLExceptionCode.CANNOT_MUTATE_TABLE.getErrorCode(), e.getErrorCode());
}
conn93.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 50));
Connection conn94 = DriverManager.getConnection(getUrl(), props);
String alterView = "alter view " + MDTEST_NAME + " drop column \"c\".col2";
conn94.createStatement().execute(alterView);
conn94.close();
} finally {
HTableInterface htable = pconn.getQueryServices().getTable(SchemaUtil.getTableNameAsBytes(MDTEST_SCHEMA_NAME,MDTEST_NAME));
Delete delete1 = new Delete(Bytes.toBytes("0"));
Delete delete2 = new Delete(Bytes.toBytes("1"));
htable.batch(Arrays.asList(delete1, delete2));
}
}
@Test
public void testAddKVColumnToExistingFamily() throws Throwable {
long ts = nextTimestamp();
String tenantId = getOrganizationId();
initATableValues(tenantId, getDefaultSplits(tenantId), null, ts);
Properties props = new Properties();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 5));
Connection conn1 = DriverManager.getConnection(getUrl(), props);
// Failed attempt to repro table not found bug
// TestUtil.clearMetaDataCache(conn1);
// PhoenixConnection pconn = conn1.unwrap(PhoenixConnection.class);
// pconn.removeTable(ATABLE_SCHEMA_NAME, ATABLE_NAME);
conn1.createStatement().executeUpdate("ALTER TABLE " + ATABLE_NAME + " ADD z_integer integer");
conn1.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 6));
Connection conn2 = DriverManager.getConnection(getUrl(), props);
String query = "SELECT z_integer FROM aTable";
assertTrue(conn2.prepareStatement(query).executeQuery().next());
conn2.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 3));
Connection conn3 = DriverManager.getConnection(getUrl(), props);
try {
conn3.prepareStatement(query).executeQuery().next();
fail();
} catch (ColumnNotFoundException e) {
}
}
@Test
public void testAddKVColumnToNewFamily() throws Exception {
long ts = nextTimestamp();
String tenantId = getOrganizationId();
initATableValues(tenantId, getDefaultSplits(tenantId), null, ts);
Properties props = new Properties();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 5));
Connection conn1 = DriverManager.getConnection(getUrl(), props);
conn1.createStatement().executeUpdate("ALTER TABLE " + ATABLE_NAME + " ADD newcf.z_integer integer");
conn1.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 6));
Connection conn2 = DriverManager.getConnection(getUrl(), props);
String query = "SELECT z_integer FROM aTable";
assertTrue(conn2.prepareStatement(query).executeQuery().next());
conn2.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 3));
Connection conn3 = DriverManager.getConnection(getUrl(), props);
try {
conn3.prepareStatement(query).executeQuery().next();
fail();
} catch (ColumnNotFoundException e) {
}
}
@Test
public void testAddPKColumn() throws Exception {
long ts = nextTimestamp();
String tenantId = getOrganizationId();
initATableValues(tenantId, getDefaultSplits(tenantId), null, ts);
Properties props = new Properties();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 5));
Connection conn1 = DriverManager.getConnection(getUrl(), props);
try {
conn1.createStatement().executeUpdate("ALTER TABLE " + ATABLE_NAME + " ADD z_string varchar not null primary key");
fail();
} catch (SQLException e) {
assertTrue(e.getMessage(), e.getMessage().contains("ERROR 1006 (42J04): Only nullable columns may be added to a multi-part row key."));
}
conn1.createStatement().executeUpdate("ALTER TABLE " + ATABLE_NAME + " ADD z_string varchar primary key");
conn1.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 6));
Connection conn2 = DriverManager.getConnection(getUrl(), props);
String query = "SELECT z_string FROM aTable";
assertTrue(conn2.prepareStatement(query).executeQuery().next());
conn2.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 3));
Connection conn3 = DriverManager.getConnection(getUrl(), props);
try {
conn3.prepareStatement(query).executeQuery().next();
fail();
} catch (ColumnNotFoundException e) {
}
}
@Test
public void testDropKVColumn() throws Exception {
long ts = nextTimestamp();
String tenantId = getOrganizationId();
initATableValues(tenantId, getDefaultSplits(tenantId), null, ts);
Properties props = new Properties();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 5));
Connection conn5 = DriverManager.getConnection(getUrl(), props);
assertTrue(conn5.createStatement().executeQuery("SELECT 1 FROM atable WHERE b_string IS NOT NULL").next());
conn5.createStatement().executeUpdate("ALTER TABLE " + ATABLE_NAME + " DROP COLUMN b_string");
conn5.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 6));
Connection conn2 = DriverManager.getConnection(getUrl(), props);
String query = "SELECT b_string FROM aTable";
try {
conn2.prepareStatement(query).executeQuery().next();
fail();
} catch (ColumnNotFoundException e) {
}
conn2.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 3));
Connection conn3 = DriverManager.getConnection(getUrl(), props);
assertTrue(conn3.prepareStatement(query).executeQuery().next());
conn3.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 7));
Connection conn7 = DriverManager.getConnection(getUrl(), props);
conn7.createStatement().executeUpdate("ALTER TABLE " + ATABLE_NAME + " ADD b_string VARCHAR");
conn7.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 8));
Connection conn8 = DriverManager.getConnection(getUrl(), props);
assertFalse(conn8.createStatement().executeQuery("SELECT 1 FROM atable WHERE b_string IS NOT NULL").next());
conn8.close();
}
@Test
public void testDropPKColumn() throws Exception {
long ts = nextTimestamp();
String tenantId = getOrganizationId();
initATableValues(tenantId, getDefaultSplits(tenantId), null, ts);
Properties props = new Properties();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 5));
Connection conn1 = DriverManager.getConnection(getUrl(), props);
try {
conn1.createStatement().executeUpdate("ALTER TABLE " + ATABLE_NAME + " DROP COLUMN entity_id");
fail();
} catch (SQLException e) {
assertTrue(e.getMessage(), e.getMessage().contains("ERROR 506 (42817): Primary key column may not be dropped."));
}
conn1.close();
}
@Test
public void testDropAllKVCols() throws Exception {
ResultSet rs;
long ts = nextTimestamp();
ensureTableCreated(getUrl(), MDTEST_NAME, null, ts);
Properties props = new Properties();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 2));
Connection conn2 = DriverManager.getConnection(getUrl(), props);
conn2.createStatement().executeUpdate("UPSERT INTO " + MDTEST_NAME + " VALUES('a',1,1)");
conn2.createStatement().executeUpdate("UPSERT INTO " + MDTEST_NAME + " VALUES('b',2,2)");
conn2.commit();
conn2.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 3));
Connection conn3 = DriverManager.getConnection(getUrl(), props);
rs = conn3.createStatement().executeQuery("SELECT count(1) FROM " + MDTEST_NAME);
assertTrue(rs.next());
assertEquals(2, rs.getLong(1));
conn3.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 5));
Connection conn5 = DriverManager.getConnection(getUrl(), props);
conn5.createStatement().executeUpdate("ALTER TABLE " + MDTEST_NAME + " DROP COLUMN col1");
conn5.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 6));
Connection conn6 = DriverManager.getConnection(getUrl(), props);
rs = conn6.createStatement().executeQuery("SELECT count(1) FROM " + MDTEST_NAME);
assertTrue(rs.next());
assertEquals(2, rs.getLong(1));
conn6.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 7));
Connection conn7 = DriverManager.getConnection(getUrl(), props);
conn7.createStatement().executeUpdate("ALTER TABLE " + MDTEST_NAME + " DROP COLUMN col2");
conn7.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 8));
Connection conn8 = DriverManager.getConnection(getUrl(), props);
rs = conn8.createStatement().executeQuery("SELECT count(1) FROM " + MDTEST_NAME);
assertTrue(rs.next());
assertEquals(2, rs.getLong(1));
conn8.close();
}
@Test
public void testNewerTableDisallowed() throws Exception {
long ts = nextTimestamp();
ensureTableCreated(getUrl(), ATABLE_NAME, null, ts);
Properties props = new Properties();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 5));
Connection conn5 = DriverManager.getConnection(getUrl(), props);
conn5.createStatement().executeUpdate("ALTER TABLE " + ATABLE_NAME + " DROP COLUMN x_integer");
try {
conn5.createStatement().executeUpdate("ALTER TABLE " + ATABLE_NAME + " DROP COLUMN y_integer");
fail();
} catch (SQLException e) {
assertTrue(e.getMessage(), e.getMessage().contains("ERROR 1013 (42M04): Table already exists. tableName=ATABLE"));
}
conn5.close();
}
@Test
public void testTableWithScemaMetadataScan() throws SQLException {
long ts = nextTimestamp();
Properties props = new Properties();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts));
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.createStatement().execute("create table foo.bar(k varchar primary key)");
conn.createStatement().execute("create table bar(k varchar primary key)");
conn.close();
props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 10));
conn = DriverManager.getConnection(getUrl(), props);
DatabaseMetaData metaData = conn.getMetaData();
ResultSet rs;
// Tricky case that requires returning false for null AND true expression
rs = metaData.getTables(null, "FOO", "BAR", null);
assertTrue(rs.next());
assertEquals("FOO",rs.getString("TABLE_SCHEM"));
assertEquals("BAR", rs.getString("TABLE_NAME"));
assertFalse(rs.next());
// Tricky case that requires end key to maintain trailing nulls
rs = metaData.getTables("", "FOO", "BAR", null);
assertTrue(rs.next());
assertEquals("FOO",rs.getString("TABLE_SCHEM"));
assertEquals("BAR", rs.getString("TABLE_NAME"));
assertFalse(rs.next());
rs = metaData.getTables("", null, "BAR", null);
assertTrue(rs.next());
assertEquals(null,rs.getString("TABLE_SCHEM"));
assertEquals("BAR", rs.getString("TABLE_NAME"));
assertTrue(rs.next());
assertEquals("FOO",rs.getString("TABLE_SCHEM"));
assertEquals("BAR", rs.getString("TABLE_NAME"));
assertFalse(rs.next());
}
}
| |
package com.leonardofischer.jmautic.parser;
import java.io.InputStream;
import java.io.IOException;
import java.util.Iterator;
import java.util.Date;
import java.util.ArrayList;
import com.leonardofischer.jmautic.MauticException;
import com.leonardofischer.jmautic.model.*;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.module.SimpleModule;
/**
* <p>Parses the Mautic REST API responses and constructs jMautic model objects from it.
*
* <p>Internally, it uses
* <a href="https://github.com/FasterXML/jackson-databind" target="_top">jackson-databind</a> to
* parse the JSON stream and map it to jMautic model classes.
*
* <p>Although we could use jackson-databind annotations to map JSON fields to jMautic model
* classes, this parser does not relies on it. We cannot predict when the Mautic developers will
* change it's API, and jMautic intention is to support all versions of the Mautic API. So the use
* of annotations may make harder to support several distinct JSON formats in the same code.
*
* <p>For now, the Parser will change the parsed JSON, in a way that it is easy to map the JSON
* tree into jMautic model objects. This includes renaming fields, moving fields around in the
* tree, and other things.
*
* <p>Future versions of jMautic may handle changes in the Mautic API by creating an interface
* from this class public methods, and then creating multiple implementations of this new
* interface.
*/
public class Parser {
ObjectMapper mapper;
public Parser() {
mapper = new ObjectMapper();
SimpleModule module = new SimpleModule();
module.addDeserializer(Date.class, new DateDeserializer());
mapper.registerModule(module);
}
/**
* Parses the JSON returned by the <code>GET /contacts</code> Mautic endpoint, and creates
* a ListContactsResult instance from it.
* @param input the InputStream to read the endpoint response
* @return the ListContactsResult built from the given JSON content in the stream
* @throws MauticException if an error occurs mapping the input to the ListContactsResult
* instance
*/
public ListContactsResult parseListContacts(InputStream input) throws MauticException {
ObjectNode jsonTree;
try {
jsonTree = (ObjectNode)mapper.readTree(input);
renameLeadsToContacts(jsonTree);
Iterator<JsonNode> iterator = jsonTree.get("contacts").iterator();
while( iterator.hasNext() ) {
ObjectNode contact = (ObjectNode)iterator.next();
moveFieldsDotAllToAllFields(contact);
moveFieldsToField(contact);
renameFieldOrder(contact);
simplifyIpAddresses(contact);
}
}
catch(IOException e) {
throw new MauticException("Could not read json: "+e.getMessage(), e);
}
try {
return mapper.treeToValue(jsonTree, ListContactsResult.class);
}
catch(JsonProcessingException e) {
throw new MauticException("Could not convert json to ListContactsResult: "+e.getMessage(), e);
}
}
/**
* Mautic current documentation uses "Contacts" instead of "Leads", but they keept the "leads"
* field to avoid breaking changes. To make jMautic more uniform and allow it to use the
* current namming scheme, we use only "contact" and "contacts" in the model classes. This
* method renames the "leads" entry of the <code>GET /contacts</code> endpoint to "contacts",
* making it easier to map into the "contacts" field of the
* {@link com.leonardofischer.jmautic.model.ListContactsResult} class.
*/
private void renameLeadsToContacts(ObjectNode jsonTree) {
jsonTree.set( "contacts", jsonTree.get("leads") );
jsonTree.remove("leads");
}
/**
* The "fields" entry of a contact in the JSON has twoo kinds of sub-entries: groups and an
* "all" entry. Although all are related to fields, it have very distinct structures: the
* "group" field are very uniform, and the "all" field may have very different sub-fields
* for each contact. This method removes the entry "all" from the "fields" entry, and put
* it in the contact itself, using the name "allFields".
*/
private void moveFieldsDotAllToAllFields(ObjectNode contact) {
ObjectNode contactFields = (ObjectNode)contact.get("fields");
if( contactFields!=null ) {
contact.set( "allFields", contactFields.get("all") );
contactFields.remove("all");
}
}
/**
* The "fields" entry of a contact may have several groups. The group names appear two times
* in the JSON structure: as an entry inside the "fields" entry, and as a value of the
* group in a field. In the same way, field entry inside a "group" entry is replicated in the
* "alias" entry of a field. This method "flattens" this structure, converting the whole
* "fields" entry to a list of top-level fields.
*/
private void moveFieldsToField(ObjectNode contact) throws MauticException {
ArrayNode fields = contact.arrayNode(); //--> new ArrayNode();
ObjectNode groups = (ObjectNode)contact.get("fields");
if( groups!=null ) {
Iterator<String> groupNamesIterator = groups.fieldNames();
while( groupNamesIterator.hasNext() ) {
String groupName = groupNamesIterator.next();
JsonNode groupNode = groups.get(groupName);
if( groupNode instanceof ObjectNode ) {
ObjectNode group = (ObjectNode)groupNode;
Iterator<String> fieldNamesIterator = group.fieldNames();
while( fieldNamesIterator.hasNext() ) {
String fieldName = fieldNamesIterator.next();
fields.add( group.get(fieldName) );
}
}
else if (groupNode instanceof ArrayNode) {
ArrayNode groupArray = (ArrayNode) groupNode;
if( groupArray.size()!=0 ) {
throw new MauticException("Expecting empty array node, but found '"+groupNode+"'");
}
}
}
}
contact.set("fields", fields);
}
/**
* Right now the Mautic API is non-uniform in several points. For example, most of the time the
* api returns the fields in camel-case (for example, "someField"). But, for some unknown
* reason, the fieldOrder is returned in the JSON result as "some_field". We can't use the
* ObjectMapper.setPropertyNamingStrategy() method, so we make the JSON more uniform by
* renaming the "field_order" to "fieldOrder".
*/
private void renameFieldOrder(ObjectNode contact) {
ArrayNode fields = (ArrayNode)contact.get("fields");
if( fields!=null ) {
Iterator<JsonNode> fieldsIterator = fields.iterator();
while( fieldsIterator.hasNext() ) {
ObjectNode field = (ObjectNode)fieldsIterator.next();
field.set("fieldOrder", field.get("field_order"));
field.remove("field_order");
}
}
}
/**
* The "ipAddresses" field of a contact has several entries, one for each IP address. And
* each IP address has some details to it. This method flattens each IP entry, making the
* IP address appear in the same level as its details, making easier to handle it.
*/
private void simplifyIpAddresses(ObjectNode contact) {
ArrayNode ipAddresses = contact.arrayNode(); //--> new ArrayNode();
JsonNode ipAddressNamesNode = contact.get("ipAddresses");
if( ipAddressNamesNode instanceof ObjectNode ) {
ObjectNode ipAddressNames = (ObjectNode)ipAddressNamesNode;
if( ipAddressNames!=null ) {
Iterator<String> ipAddressNamesIterator = ipAddressNames.fieldNames();
while( ipAddressNamesIterator.hasNext() ) {
String ipAddress = ipAddressNamesIterator.next();
ObjectNode ipAddressData = (ObjectNode)ipAddressNames.get(ipAddress);
ObjectNode ipAddressDetails = (ObjectNode)ipAddressData.get("ipDetails");
ipAddressDetails.put("ipAddress", ipAddress);
ipAddresses.add( ipAddressDetails );
}
}
}
contact.set("ipAddresses", ipAddresses);
}
/**
* Parses the JSON returned by the <code>GET /contacts/ID</code> Mautic endpoint and creates
* a GetContactResult object from it.
*
* @param input the InputStream to read the returned JSON from the API
* @return the GetContactResult object build from the JSON read from the stream
* @throws MauticException if an error occurrs while reading or mapping the JSON
* to a GetContactResult instance.
*/
public GetContactResult parseGetContact(InputStream input) throws MauticException {
ObjectNode jsonTree;
try {
jsonTree = (ObjectNode)mapper.readTree(input);
renameLeadToContact(jsonTree);
ObjectNode contact = (ObjectNode)jsonTree.get("contact");
moveFieldsDotAllToAllFields(contact);
moveFieldsToField(contact);
renameFieldOrder(contact);
simplifyIpAddresses(contact);
}
catch(IOException e) {
throw new MauticException("Could not read json: "+e.getMessage(), e);
}
try {
return mapper.treeToValue(jsonTree, GetContactResult.class);
}
catch(JsonProcessingException e) {
throw new MauticException("Could not convert json to GetContactResult: "+e.getMessage(), e);
}
}
/**
* Renames the field "lead" to "contact" from a JSON returned by the
* <code>GET /contacts/ID</code> API. This make the whole jMautic model more uniform by using
* only the word "contact".
*/
private void renameLeadToContact(ObjectNode jsonTree) {
jsonTree.set( "contact", jsonTree.get("lead") );
jsonTree.remove("lead");
}
}
| |
/*
* #%L
* Native ARchive plugin for Maven
* %%
* Copyright (C) 2002 - 2014 NAR Maven Plugin developers.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package com.github.maven_nar.cpptasks.msvc;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.tools.ant.BuildException;
import org.apache.xml.serialize.OutputFormat;
import org.apache.xml.serialize.XMLSerializer;
import org.xml.sax.ContentHandler;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.AttributesImpl;
import com.github.maven_nar.cpptasks.CCTask;
import com.github.maven_nar.cpptasks.CUtil;
import com.github.maven_nar.cpptasks.TargetInfo;
import com.github.maven_nar.cpptasks.compiler.CommandLineCompilerConfiguration;
import com.github.maven_nar.cpptasks.compiler.CommandLineLinkerConfiguration;
import com.github.maven_nar.cpptasks.compiler.ProcessorConfiguration;
import com.github.maven_nar.cpptasks.ide.CommentDef;
import com.github.maven_nar.cpptasks.ide.DependencyDef;
import com.github.maven_nar.cpptasks.ide.ProjectDef;
import com.github.maven_nar.cpptasks.ide.ProjectWriter;
/**
* Writes a Visual Studio.NET project file.
*
* @author curta
*/
public final class VisualStudioNETProjectWriter implements ProjectWriter {
/**
* Adds an non-namespace-qualified attribute to attribute list.
*
* @param attributes
* list of attributes.
* @param attrName
* attribute name, may not be null.
* @param attrValue
* attribute value, if null attribute is not added.
*/
private static void addAttribute(final AttributesImpl attributes, final String attrName, final String attrValue) {
if (attrName == null) {
throw new IllegalArgumentException("attrName");
}
if (attrValue != null) {
attributes.addAttribute(null, attrName, attrName, "#PCDATA", attrValue);
}
}
/**
* Version of VisualStudio.NET.
*/
private final String version;
/**
* Literal to represent a true value.
*/
private final String trueLiteral;
/**
* Literal to represent a false value.
*/
private final String falseLiteral;
/**
* Constructor.
*
* @param versionArg
* String VisualStudio.NET version
* @param trueArg
* literal to represent true, "true" in VC 2005.
* @param falseArg
* literal to represent false, "false" in VC 2005.
*/
public VisualStudioNETProjectWriter(final String versionArg, final String trueArg, final String falseArg) {
if (versionArg == null) {
throw new IllegalArgumentException("versionArg");
}
if (trueArg == null) {
throw new IllegalArgumentException("trueArg");
}
if (falseArg == null) {
throw new IllegalArgumentException("falseArg");
}
this.version = versionArg;
this.trueLiteral = trueArg;
this.falseLiteral = falseArg;
}
/**
* Get value of AdditionalDependencies property.
*
* @param linkTarget
* link target.
* @param projectDependencies
* dependencies declared in project.
* @param targets
* all targets.
* @param basePath
* path to directory containing project file.
* @return value of AdditionalDependencies property.
*/
private String getAdditionalDependencies(final TargetInfo linkTarget, final List<DependencyDef> projectDependencies,
final Map<String, TargetInfo> targets, final String basePath) {
String dependencies = null;
final File[] linkSources = linkTarget.getAllSources();
final StringBuffer buf = new StringBuffer();
for (int i = 0; i < linkSources.length; i++) {
//
// if file was not compiled or otherwise generated
//
if (targets.get(linkSources[i].getName()) == null) {
//
// if source appears to be a system library or object file
// just output the name of the file (advapi.lib for example)
// otherwise construct a relative path.
//
String relPath = linkSources[i].getName();
//
// check if file comes from a project dependency
// if it does it should not be explicitly linked
boolean fromDependency = false;
if (relPath.indexOf(".") > 0) {
final String baseName = relPath.substring(0, relPath.indexOf("."));
for (DependencyDef depend : projectDependencies) {
if (baseName.compareToIgnoreCase(depend.getName()) == 0) {
fromDependency = true;
}
}
}
if (!fromDependency) {
if (!CUtil.isSystemPath(linkSources[i])) {
relPath = CUtil.getRelativePath(basePath, linkSources[i]);
}
//
// if path has an embedded space then
// must quote
if (relPath.indexOf(' ') > 0) {
buf.append('\"');
buf.append(CUtil.toWindowsPath(relPath));
buf.append('\"');
} else {
buf.append(relPath);
}
buf.append(' ');
}
}
}
if (buf.length() > 0) {
buf.setLength(buf.length() - 1);
dependencies = buf.toString();
}
return dependencies;
}
/**
* Get value of AdditionalIncludeDirectories property.
*
* @param compilerConfig
* compiler configuration.
* @param baseDir
* base for relative paths.
* @return value of AdditionalIncludeDirectories property.
*/
private String getAdditionalIncludeDirectories(final String baseDir,
final CommandLineCompilerConfiguration compilerConfig) {
final File[] includePath = compilerConfig.getIncludePath();
final StringBuffer includeDirs = new StringBuffer();
// Darren Sargent Feb 10 2010 -- reverted to older code to ensure sys
// includes get, erm, included
final String[] args = compilerConfig.getPreArguments();
for (final String arg : args) {
if (arg.startsWith("/I")) {
includeDirs.append(arg.substring(2));
includeDirs.append(';');
}
}
// end Darren
if (includeDirs.length() > 0) {
includeDirs.setLength(includeDirs.length() - 1);
}
return includeDirs.toString();
}
/**
* Gets the first recognized compiler from the
* compilation targets.
*
* @param targets
* compilation targets
* @return representative (hopefully) compiler configuration
*/
private CommandLineCompilerConfiguration getBaseCompilerConfiguration(final Map<String, TargetInfo> targets) {
//
// get the first target and assume that it is representative
//
final Iterator<TargetInfo> targetIter = targets.values().iterator();
while (targetIter.hasNext()) {
final TargetInfo targetInfo = targetIter.next();
final ProcessorConfiguration config = targetInfo.getConfiguration();
//
// for the first cl compiler
//
if (config instanceof CommandLineCompilerConfiguration) {
final CommandLineCompilerConfiguration compilerConfig = (CommandLineCompilerConfiguration) config;
if (compilerConfig.getCompiler() instanceof MsvcCCompiler) {
return compilerConfig;
}
}
}
return null;
}
/**
* Get value of BasicRuntimeChecks property.
*
* @param compilerConfig
* compiler configuration.
* @return value of BasicRuntimeChecks property.
*/
private String getBasicRuntimeChecks(final CommandLineCompilerConfiguration compilerConfig) {
String checks = "0";
final String[] args = compilerConfig.getPreArguments();
for (final String arg : args) {
if ("/RTCs".equals(arg)) {
checks = "1";
}
if ("/RTCu".equals(arg)) {
checks = "2";
}
if ("/RTC1".equals(arg) || "/GZ".equals(arg)) {
checks = "3";
}
}
return checks;
}
/**
* Get character set for Windows API.
*
* @param compilerConfig
* compiler configuration, may not be null.
* @return "1" is TCHAR is unicode, "0" if TCHAR is multi-byte.
*/
private String getCharacterSet(final CommandLineCompilerConfiguration compilerConfig) {
final String[] args = compilerConfig.getPreArguments();
String charset = "0";
for (final String arg : args) {
if ("/D_UNICODE".equals(arg) || "/DUNICODE".equals(arg)) {
charset = "1";
}
if ("/D_MBCS".equals(arg)) {
charset = "2";
}
}
return charset;
}
/**
* Gets the configuration type.
*
* @param task
* cc task, may not be null.
* @return configuration type
*/
private String getConfigurationType(final CCTask task) {
final String outputType = task.getOuttype();
String targtype = "2"; // Win32 (x86) Dynamic-Link Library";
if ("executable".equals(outputType)) {
targtype = "1"; // "Win32 (x86) Console Application";
} else if ("static".equals(outputType)) {
targtype = "4"; // "Win32 (x86) Static Library";
}
return targtype;
}
/**
* Get value of DebugInformationFormat property.
*
* @param compilerConfig
* compiler configuration.
* @return value of DebugInformationFormat property.
*/
private String getDebugInformationFormat(final CommandLineCompilerConfiguration compilerConfig) {
String format = "0";
final String[] args = compilerConfig.getPreArguments();
for (final String arg : args) {
if ("/Z7".equals(arg)) {
format = "1";
}
if ("/Zd".equals(arg)) {
format = "2";
}
if ("/Zi".equals(arg)) {
format = "3";
}
if ("/ZI".equals(arg)) {
format = "4";
}
}
return format;
}
/**
* Get value of Detect64BitPortabilityProblems property.
*
* @param compilerConfig
* compiler configuration.
* @return value of Detect64BitPortabilityProblems property.
*/
private String getDetect64BitPortabilityProblems(final CommandLineCompilerConfiguration compilerConfig) {
String warn64 = null;
final String[] args = compilerConfig.getPreArguments();
for (final String arg : args) {
if ("/Wp64".equals(arg)) {
warn64 = this.trueLiteral;
}
}
return warn64;
}
/**
* Get value of LinkIncremental property.
*
* @param linkerConfig
* linker configuration.
* @return value of LinkIncremental property
*/
private String getLinkIncremental(final CommandLineLinkerConfiguration linkerConfig) {
String incremental = "0";
final String[] args = linkerConfig.getPreArguments();
for (final String arg : args) {
if ("/INCREMENTAL:NO".equals(arg)) {
incremental = "1";
}
if ("/INCREMENTAL:YES".equals(arg)) {
incremental = "2";
}
}
return incremental;
}
/**
* Get value of Optimization property.
*
* @param compilerConfig
* compiler configuration, may not be null.
* @return value of Optimization property.
*/
private String getOptimization(final CommandLineCompilerConfiguration compilerConfig) {
final String[] args = compilerConfig.getPreArguments();
String opt = "0";
for (final String arg : args) {
if ("/Od".equals(arg)) {
opt = "0";
}
if ("/O1".equals(arg)) {
opt = "1";
}
if ("/O2".equals(arg)) {
opt = "2";
}
if ("/Ox".equals(arg)) {
opt = "3";
}
}
return opt;
}
/**
* Get value of PrecompiledHeaderFile property.
*
* @param compilerConfig
* compiler configuration.
* @return value of PrecompiledHeaderFile property.
*/
private String getPrecompiledHeaderFile(final CommandLineCompilerConfiguration compilerConfig) {
String pch = null;
final String[] args = compilerConfig.getPreArguments();
for (final String arg : args) {
if (arg.startsWith("/Fp")) {
pch = arg.substring(3);
}
}
return pch;
}
/**
* Get value of PreprocessorDefinitions property.
*
* @param compilerConfig
* compiler configuration.
* @param isDebug
* true if generating debug configuration.
* @return value of PreprocessorDefinitions property.
*/
private String
getPreprocessorDefinitions(final CommandLineCompilerConfiguration compilerConfig, final boolean isDebug) {
final StringBuffer defines = new StringBuffer();
final String[] args = compilerConfig.getPreArguments();
for (final String arg : args) {
if (arg.startsWith("/D")) {
String macro = arg.substring(2);
if (isDebug) {
if (macro.equals("NDEBUG")) {
macro = "_DEBUG";
}
} else {
if (macro.equals("_DEBUG")) {
macro = "NDEBUG";
}
}
defines.append(macro);
defines.append(";");
}
}
if (defines.length() > 0) {
defines.setLength(defines.length() - 1);
}
return defines.toString();
}
/**
* Get value of RuntimeLibrary property.
*
* @param compilerConfig
* compiler configuration.
* @param isDebug
* true if generating debug configuration.
* @return value of RuntimeLibrary property.
*/
private String getRuntimeLibrary(final CommandLineCompilerConfiguration compilerConfig, final boolean isDebug) {
String rtl = null;
final String[] args = compilerConfig.getPreArguments();
for (final String arg : args) {
if (arg.startsWith("/MT")) {
if (isDebug) {
rtl = "1";
} else {
rtl = "0";
}
} else if (arg.startsWith("/MD")) {
if (isDebug) {
rtl = "3";
} else {
rtl = "2";
}
}
}
return rtl;
}
/**
* Get value of Subsystem property.
*
* @param linkerConfig
* linker configuration.
* @return value of Subsystem property
*/
private String getSubsystem(final CommandLineLinkerConfiguration linkerConfig) {
String subsystem = "0";
final String[] args = linkerConfig.getPreArguments();
for (final String arg : args) {
if ("/SUBSYSTEM:CONSOLE".equals(arg)) {
subsystem = "1";
}
if ("/SUBSYSTEM:WINDOWS".equals(arg)) {
subsystem = "2";
}
if ("/SUBSYSTEM:WINDOWSCE".equals(arg)) {
subsystem = "9";
}
}
return subsystem;
}
/**
* Get value of TargetMachine property.
*
* @param linkerConfig
* linker configuration.
* @return value of TargetMachine property
*/
private String getTargetMachine(final CommandLineLinkerConfiguration linkerConfig) {
String subsystem = "0";
final String[] args = linkerConfig.getPreArguments();
for (final String arg : args) {
if ("/MACHINE:X86".equals(arg)) {
subsystem = "1";
}
}
return subsystem;
}
/**
* Get value of UsePrecompiledHeader property.
*
* @param compilerConfig
* compiler configuration.
* @return value of UsePrecompiledHeader property.
*/
private String getUsePrecompiledHeader(final CommandLineCompilerConfiguration compilerConfig) {
String usePCH = "0";
final String[] args = compilerConfig.getPreArguments();
for (final String arg : args) {
if ("/Yc".equals(arg)) {
usePCH = "1";
}
if ("/Yu".equals(arg)) {
usePCH = "2";
}
}
return usePCH;
}
/**
* Get value of WarningLevel property.
*
* @param compilerConfig
* compiler configuration.
* @return value of WarningLevel property.
*/
private String getWarningLevel(final CommandLineCompilerConfiguration compilerConfig) {
String warn = null;
final String[] args = compilerConfig.getPreArguments();
for (final String arg : args) {
if ("/W0".equals(arg)) {
warn = "0";
}
if ("/W1".equals(arg)) {
warn = "1";
}
if ("/W2".equals(arg)) {
warn = "2";
}
if ("/W3".equals(arg)) {
warn = "3";
}
// Added by Darren Sargent, 2/26/2008
if ("/W4".equals(arg)) {
warn = "4";
}
// end added
}
return warn;
}
/**
* Returns true if the file has an extension that
* appears in the group filter.
*
* @param filter
* String group filter
* @param candidate
* File file
* @return boolean true if member of group
*/
private boolean isGroupMember(final String filter, final File candidate) {
final String fileName = candidate.getName();
final int lastDot = fileName.lastIndexOf('.');
if (lastDot >= 0 && lastDot < fileName.length() - 1) {
final String extension = ";" + fileName.substring(lastDot + 1).toLowerCase() + ";";
final String semiFilter = ";" + filter + ";";
return semiFilter.indexOf(extension) >= 0;
}
return false;
}
/**
* write the Compiler element.
*
* @param content
* serialization content handler.
* @param isDebug
* true if generating debug configuration.
* @param basePath
* base for relative file paths.
* @param compilerConfig
* compiler configuration.
* @throws SAXException
* thrown if error during serialization.
*/
private void writeCompilerElement(final ContentHandler content, final boolean isDebug, final String basePath,
final CommandLineCompilerConfiguration compilerConfig) throws SAXException {
final AttributesImpl attributes = new AttributesImpl();
addAttribute(attributes, "Name", "VCCLCompilerTool");
String optimization = getOptimization(compilerConfig);
String debugFormat = getDebugInformationFormat(compilerConfig);
if (isDebug) {
optimization = "0";
if ("0".equals(debugFormat)) {
debugFormat = "4";
}
} else {
if ("0".equals(optimization)) {
optimization = "2";
}
debugFormat = "0";
}
addAttribute(attributes, "Optimization", optimization);
addAttribute(attributes, "AdditionalIncludeDirectories", getAdditionalIncludeDirectories(basePath, compilerConfig));
addAttribute(attributes, "PreprocessorDefinitions", getPreprocessorDefinitions(compilerConfig, isDebug));
addAttribute(attributes, "MinimalRebuild", this.trueLiteral);
addAttribute(attributes, "BasicRuntimeChecks", getBasicRuntimeChecks(compilerConfig));
addAttribute(attributes, "RuntimeLibrary", getRuntimeLibrary(compilerConfig, isDebug));
addAttribute(attributes, "UsePrecompiledHeader", getUsePrecompiledHeader(compilerConfig));
addAttribute(attributes, "PrecompiledHeaderFile", getPrecompiledHeaderFile(compilerConfig));
addAttribute(attributes, "WarningLevel", getWarningLevel(compilerConfig));
addAttribute(attributes, "Detect64BitPortabilityProblems", getDetect64BitPortabilityProblems(compilerConfig));
addAttribute(attributes, "DebugInformationFormat", debugFormat);
content.startElement(null, "Tool", "Tool", attributes);
content.endElement(null, "Tool", "Tool");
}
/**
* Write the start tag of the Configuration element.
*
* @param content
* serialization content handler.
* @param isDebug
* if true, write a debug configuration.
* @param task
* cc task.
* @param compilerConfig
* compiler configuration.
* @throws SAXException
* thrown if serialization error.
*/
private void writeConfigurationStartTag(final ContentHandler content, final boolean isDebug, final CCTask task,
final CommandLineCompilerConfiguration compilerConfig) throws SAXException {
final AttributesImpl attributes = new AttributesImpl();
if (isDebug) {
addAttribute(attributes, "Name", "Debug|Win32");
addAttribute(attributes, "OutputDirectory", "Debug");
addAttribute(attributes, "IntermediateDirectory", "Debug");
} else {
addAttribute(attributes, "Name", "Release|Win32");
addAttribute(attributes, "OutputDirectory", "Release");
addAttribute(attributes, "IntermediateDirectory", "Release");
}
addAttribute(attributes, "ConfigurationType", getConfigurationType(task));
addAttribute(attributes, "CharacterSet", getCharacterSet(compilerConfig));
content.startElement(null, "Configuration", "Configuration", attributes);
}
/**
* Writes a cluster of source files to the project.
*
* @param name
* name of filter
* @param filter
* file extensions
* @param basePath
* base path for files
* @param sortedSources
* array of source files
* @param content
* generated project
* @throws SAXException
* if invalid content
*/
private void writeFilteredSources(final String name, final String filter, final String basePath,
final File[] sortedSources, final ContentHandler content) throws SAXException {
final AttributesImpl filterAttrs = new AttributesImpl();
filterAttrs.addAttribute(null, "Name", "Name", "#PCDATA", name);
filterAttrs.addAttribute(null, "Filter", "Filter", "#PCDATA", filter);
content.startElement(null, "Filter", "Filter", filterAttrs);
final AttributesImpl fileAttrs = new AttributesImpl();
fileAttrs.addAttribute(null, "RelativePath", "RelativePath", "#PCDATA", "");
for (final File sortedSource : sortedSources) {
if (isGroupMember(filter, sortedSource)) {
final String relativePath = CUtil.getRelativePath(basePath, sortedSource);
fileAttrs.setValue(0, relativePath);
content.startElement(null, "File", "File", fileAttrs);
content.endElement(null, "File", "File");
}
}
content.endElement(null, "Filter", "Filter");
}
/**
* Write Tool element for linker.
*
* @param content
* serialization content handler.
* @param isDebug
* true if generating debug configuration.
* @param dependencies
* project dependencies.
* @param basePath
* path to directory containing project file.
* @param linkTarget
* link target.
* @param targets
* all targets.
* @throws SAXException
* thrown if error during serialization.
*/
private void writeLinkerElement(final ContentHandler content, final boolean isDebug,
final List<DependencyDef> dependencies, final String basePath, final TargetInfo linkTarget,
final Map<String, TargetInfo> targets) throws SAXException {
final AttributesImpl attributes = new AttributesImpl();
addAttribute(attributes, "Name", "VCLinkerTool");
final ProcessorConfiguration config = linkTarget.getConfiguration();
if (config instanceof CommandLineLinkerConfiguration) {
final CommandLineLinkerConfiguration linkerConfig = (CommandLineLinkerConfiguration) config;
if (linkerConfig.getLinker() instanceof MsvcCompatibleLinker) {
addAttribute(attributes, "LinkIncremental", getLinkIncremental(linkerConfig));
if (isDebug) {
addAttribute(attributes, "GenerateDebugInformation", this.trueLiteral);
} else {
addAttribute(attributes, "GenerateDebugInformation", this.falseLiteral);
}
addAttribute(attributes, "SubSystem", getSubsystem(linkerConfig));
addAttribute(attributes, "TargetMachine", getTargetMachine(linkerConfig));
}
}
addAttribute(attributes, "AdditionalDependencies",
getAdditionalDependencies(linkTarget, dependencies, targets, basePath));
content.startElement(null, "Tool", "Tool", attributes);
content.endElement(null, "Tool", "Tool");
}
/**
* Writes a project definition file.
*
* @param fileName
* project name for file, should has .cbx extension
* @param task
* cc task for which to write project
* @param projectDef
* project element
* @param sources
* source files
* @param targets
* compilation targets
* @param linkTarget
* link target
* @throws IOException
* if I/O error
* @throws SAXException
* if XML serialization error
*/
@Override
public void writeProject(final File fileName, final CCTask task, final ProjectDef projectDef,
final List<File> sources, final Map<String, TargetInfo> targets, final TargetInfo linkTarget)
throws IOException, SAXException {
String projectName = projectDef.getName();
if (projectName == null) {
projectName = fileName.getName();
}
final File vcprojFile = new File(fileName + ".vcproj");
if (!projectDef.getOverwrite() && vcprojFile.exists()) {
throw new BuildException("Not allowed to overwrite project file " + vcprojFile.toString());
}
final File slnFile = new File(fileName + ".sln");
if (!projectDef.getOverwrite() && slnFile.exists()) {
throw new BuildException("Not allowed to overwrite project file " + slnFile.toString());
}
final CommandLineCompilerConfiguration compilerConfig = getBaseCompilerConfiguration(targets);
if (compilerConfig == null) {
throw new BuildException("Unable to generate Visual Studio.NET project " + "when Microsoft C++ is not used.");
}
final OutputStream outStream = new FileOutputStream(fileName + ".vcproj");
final OutputFormat format = new OutputFormat("xml", "UTF-8", true);
final XMLSerializer serializer = new XMLSerializer(outStream, format);
final ContentHandler content = serializer.asContentHandler();
final String basePath = fileName.getParentFile().getAbsolutePath();
content.startDocument();
for (final CommentDef commentDef : projectDef.getComments()) {
final String comment = commentDef.getText();
serializer.comment(comment);
}
final AttributesImpl emptyAttrs = new AttributesImpl();
final AttributesImpl attributes = new AttributesImpl();
addAttribute(attributes, "ProjectType", "Visual C++");
addAttribute(attributes, "Version", this.version);
addAttribute(attributes, "Name", projectName);
content.startElement(null, "VisualStudioProject", "VisualStudioProject", attributes);
content.startElement(null, "Platforms", "Platforms", emptyAttrs);
attributes.clear();
addAttribute(attributes, "Name", "Win32");
content.startElement(null, "Platform", "Platform", attributes);
content.endElement(null, "Platform", "Platform");
content.endElement(null, "Platforms", "Platforms");
content.startElement(null, "Configurations", "Configurations", emptyAttrs);
//
// write debug configuration
//
writeConfigurationStartTag(content, true, task, compilerConfig);
writeCompilerElement(content, true, basePath, compilerConfig);
writeLinkerElement(content, true, projectDef.getDependencies(), basePath, linkTarget, targets);
content.endElement(null, "Configuration", "Configuration");
//
// write release configuration
//
writeConfigurationStartTag(content, false, task, compilerConfig);
writeCompilerElement(content, false, basePath, compilerConfig);
writeLinkerElement(content, false, projectDef.getDependencies(), basePath, linkTarget, targets);
content.endElement(null, "Configuration", "Configuration");
content.endElement(null, "Configurations", "Configurations");
content.startElement(null, "References", "References", emptyAttrs);
content.endElement(null, "References", "References");
content.startElement(null, "Files", "Files", emptyAttrs);
final File[] sortedSources = new File[sources.size()];
sources.toArray(sortedSources);
Arrays.sort(sortedSources, new Comparator<File>() {
@Override
public int compare(final File o1, final File o2) {
return o1.getName().compareTo(o2.getName());
}
});
writeFilteredSources("Source Files", "cpp;c;cxx;def;odl;idl;hpj;bat;asm;asmx", basePath, sortedSources, content);
writeFilteredSources("Header Files", "h;hpp;hxx;hm;inl;inc;xsd", basePath, sortedSources, content);
writeFilteredSources("Resource Files", "rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx", basePath,
sortedSources, content);
content.endElement(null, "Files", "Files");
content.startElement(null, "Globals", "Globals", emptyAttrs);
content.endElement(null, "Globals", "Globals");
content.endElement(null, "VisualStudioProject", "VisualStudioProject");
content.endDocument();
}
}
| |
/**
* Copyright 2017-2019 The GreyCat Authors. All rights reserved.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package greycat.ml.math;
import greycat.struct.DMatrix;
import greycat.struct.matrix.MatrixOps;
import greycat.struct.matrix.PInvSVD;
import greycat.struct.matrix.VolatileDMatrix;
public class MultivariateNormalDistribution {
double[] min;
double[] max;
double[] means;
double[] covDiag;
DMatrix inv;
DMatrix covariance;
PInvSVD pinvsvd;
int rank;
double det;
public MultivariateNormalDistribution(double[] means, DMatrix cov, boolean allowSingular) {
this.means = means;
if (cov != null) {
this.covariance = cov;
covDiag = new double[cov.rows()];
for (int i = 0; i < covDiag.length; i++) {
covDiag[i] = cov.get(i, i);
}
this.pinvsvd = new PInvSVD();
this.pinvsvd.factor(covariance, false);
this.inv = pinvsvd.getPInv();
this.det = pinvsvd.getDeterminant();
this.rank = pinvsvd.getRank();
if (!allowSingular && this.rank < cov.rows()) {
this.covariance = VolatileDMatrix.cloneFrom(cov);
double[] temp = new double[covDiag.length];
for (int i = 0; i < covDiag.length; i++) {
temp[i] = Math.sqrt(covDiag[i]);
}
for (int i = 0; i < covDiag.length; i++) {
for (int j = i + 1; j < covDiag.length; j++) {
double d = this.covariance.get(i, j) - 0.0001 * temp[i] * temp[j];
this.covariance.set(i, j, d);
this.covariance.set(j, i, d);
}
}
pinvsvd = new PInvSVD();
pinvsvd.factor(this.covariance, false);
inv = pinvsvd.getPInv();
det = pinvsvd.getDeterminant();
rank = pinvsvd.getRank();
}
int x = 0;
//Solve complete covariance dependence
/* if(this.rank<means.length){
this.covariance=cov.clone();
double[] temp=new double[covDiag.length];
for(int i=0;i<covDiag.length;i++){
temp[i]=Math.sqrt(covDiag[i]);
}
for(int i=0;i<covDiag.length;i++){
for(int j=i+1;j<covDiag.length;j++){
double d=this.covariance.get(i,j)-0.001*temp[i]*temp[j];
this.covariance.set(i,j,d);
this.covariance.set(j,i,d);
}
}
pinvsvd = new PInvSVD();
pinvsvd.factor(this.covariance, false);
inv = pinvsvd.getPInv();
det = pinvsvd.getDeterminant();
rank = pinvsvd.getRank();
}*/
}
}
public static DMatrix getCovariance(double[] sum, double[] sumsquares, int total) {
if (total < 2) {
return null;
}
int features = sum.length;
double[] avg = new double[features];
for (int i = 0; i < features; i++) {
avg[i] = sum[i] / total;
}
double[] covariances = new double[features * features];
double correction = total;
correction = correction / (total - 1);
int count = 0;
for (int i = 0; i < features; i++) {
for (int j = i; j < features; j++) {
covariances[i * features + j] = (sumsquares[count] / total - avg[i] * avg[j]) * correction;
covariances[j * features + i] = covariances[i * features + j];
count++;
}
}
return VolatileDMatrix.wrap(covariances, features, features);
}
//Sum is a n-vector sum of features
//Sum squares is a n(n+1)/2 vector of sumsquares of features, in upper-triangle row shapes
//Example: for (int i = 0; i < features; i++) { for (int j = i; j < features; j++) { sumsquares[count] + = x[i] * x[j]; count++; } }
//Total is the number of observations
public static MultivariateNormalDistribution getDistribution(double[] sum, double[] sumsquares, int total, boolean allowSingular) {
if (total < 2) {
return null;
}
int features = sum.length;
double[] avg = new double[features];
for (int i = 0; i < features; i++) {
avg[i] = sum[i] / total;
}
double[] covariances = new double[features * features];
double correction = total;
correction = correction / (total - 1);
int count = 0;
for (int i = 0; i < features; i++) {
for (int j = i; j < features; j++) {
covariances[i * features + j] = (sumsquares[count] / total - avg[i] * avg[j]) * correction;
covariances[j * features + i] = covariances[i * features + j];
count++;
}
}
DMatrix cov = VolatileDMatrix.wrap(covariances, features, features);
return new MultivariateNormalDistribution(avg, cov, allowSingular);
}
public double[] getMin() {
return min;
}
public void setMin(double[] min) {
this.min = min;
}
public double[] getMax() {
return max;
}
public void setMax(double[] max) {
this.max = max;
}
public double[] getAvg() {
return means;
}
public double[] getCovDiag() {
return covDiag;
}
public double density(double[] features, boolean normalizeOnAvg) {
if (normalizeOnAvg) {
return getExponentTerm(features);
} else {
return Math.pow(2 * Math.PI, -0.5 * rank) *
Math.pow(det, -0.5) * getExponentTerm(features);
}
}
public double getExponentTerm(double[] features) {
double[] f = new double[features.length];
System.arraycopy(features, 0, f, 0, features.length);
//double[] f = features.clone();
for (int i = 0; i < features.length; i++) {
f[i] = f[i] - means[i];
}
DMatrix ft = VolatileDMatrix.wrap(f, 1, f.length);
DMatrix ftt = VolatileDMatrix.wrap(f, f.length, 1);
DMatrix res = MatrixOps.multiply(ft, inv);
DMatrix res2 = MatrixOps.multiply(res, ftt);
double d = Math.exp(-0.5 * res2.get(0, 0));
return d;
}
public MultivariateNormalDistribution clone(double[] avg) {
MultivariateNormalDistribution res = new MultivariateNormalDistribution(avg, null, false);
res.pinvsvd = this.pinvsvd;
res.inv = this.inv;
res.det = this.det;
res.rank = this.rank;
res.covDiag = this.covDiag;
return res;
}
public double densityExponent(double[] features) {
double[] f = new double[features.length];
System.arraycopy(features, 0, f, 0, features.length);
//double[] f = features.clone();
for (int i = 0; i < features.length; i++) {
f[i] = f[i] - means[i];
}
DMatrix ft = VolatileDMatrix.wrap(f, 1, f.length);
DMatrix ftt = VolatileDMatrix.wrap(f, f.length, 1);
DMatrix res = MatrixOps.multiply(ft, inv);
DMatrix res2 = MatrixOps.multiply(res, ftt);
return -0.5 * res2.get(0, 0);
}
}
| |
// This is a generated file. Not intended for manual editing.
package com.intellij.plugin.buck.lang;
import com.intellij.lang.PsiBuilder;
import com.intellij.lang.PsiBuilder.Marker;
import static com.intellij.plugin.buck.lang.psi.BuckTypes.*;
import static com.intellij.lang.parser.GeneratedParserUtilBase.*;
import com.intellij.psi.tree.IElementType;
import com.intellij.lang.ASTNode;
import com.intellij.psi.tree.TokenSet;
import com.intellij.lang.PsiParser;
import com.intellij.lang.LightPsiParser;
@SuppressWarnings({"SimplifiableIfStatement", "UnusedAssignment"})
public class BuckParser implements PsiParser, LightPsiParser {
public ASTNode parse(IElementType t, PsiBuilder b) {
parseLight(t, b);
return b.getTreeBuilt();
}
public void parseLight(IElementType t, PsiBuilder b) {
boolean r;
b = adapt_builder_(t, b, this, null);
Marker m = enter_section_(b, 0, _COLLAPSE_, null);
if (t == ARRAY_ELEMENTS) {
r = array_elements(b, 0);
}
else if (t == EXPRESSION) {
r = expression(b, 0);
}
else if (t == GLOB_BLOCK) {
r = glob_block(b, 0);
}
else if (t == GLOB_ELEMENTS) {
r = glob_elements(b, 0);
}
else if (t == LIST) {
r = list(b, 0);
}
else if (t == LIST_ELEMENTS) {
r = list_elements(b, 0);
}
else if (t == OBJECT) {
r = object(b, 0);
}
else if (t == OBJECT_ELEMENTS) {
r = object_elements(b, 0);
}
else if (t == PAIR) {
r = pair(b, 0);
}
else if (t == PROPERTY) {
r = property(b, 0);
}
else if (t == PROPERTY_LVALUE) {
r = property_lvalue(b, 0);
}
else if (t == RULE_BLOCK) {
r = rule_block(b, 0);
}
else if (t == RULE_BODY) {
r = rule_body(b, 0);
}
else if (t == RULE_CALL) {
r = rule_call(b, 0);
}
else if (t == RULE_NAME) {
r = rule_name(b, 0);
}
else if (t == VALUE) {
r = value(b, 0);
}
else if (t == VALUE_ARRAY) {
r = value_array(b, 0);
}
else {
r = parse_root_(t, b, 0);
}
exit_section_(b, 0, m, t, r, true, TRUE_CONDITION);
}
protected boolean parse_root_(IElementType t, PsiBuilder b, int l) {
return buckFile(b, l + 1);
}
/* ********************************************************** */
// (value ',')* [value [',']]
public static boolean array_elements(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "array_elements")) return false;
boolean r;
Marker m = enter_section_(b, l, _NONE_, "<array elements>");
r = array_elements_0(b, l + 1);
r = r && array_elements_1(b, l + 1);
exit_section_(b, l, m, ARRAY_ELEMENTS, r, false, null);
return r;
}
// (value ',')*
private static boolean array_elements_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "array_elements_0")) return false;
int c = current_position_(b);
while (true) {
if (!array_elements_0_0(b, l + 1)) break;
if (!empty_element_parsed_guard_(b, "array_elements_0", c)) break;
c = current_position_(b);
}
return true;
}
// value ','
private static boolean array_elements_0_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "array_elements_0_0")) return false;
boolean r;
Marker m = enter_section_(b);
r = value(b, l + 1);
r = r && consumeToken(b, COMMA);
exit_section_(b, m, null, r);
return r;
}
// [value [',']]
private static boolean array_elements_1(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "array_elements_1")) return false;
array_elements_1_0(b, l + 1);
return true;
}
// value [',']
private static boolean array_elements_1_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "array_elements_1_0")) return false;
boolean r;
Marker m = enter_section_(b);
r = value(b, l + 1);
r = r && array_elements_1_0_1(b, l + 1);
exit_section_(b, m, null, r);
return r;
}
// [',']
private static boolean array_elements_1_0_1(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "array_elements_1_0_1")) return false;
consumeToken(b, COMMA);
return true;
}
/* ********************************************************** */
// item_*
static boolean buckFile(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "buckFile")) return false;
int c = current_position_(b);
while (true) {
if (!item_(b, l + 1)) break;
if (!empty_element_parsed_guard_(b, "buckFile", c)) break;
c = current_position_(b);
}
return true;
}
/* ********************************************************** */
// (value operator)* [value]
public static boolean expression(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "expression")) return false;
boolean r;
Marker m = enter_section_(b, l, _NONE_, "<expression>");
r = expression_0(b, l + 1);
r = r && expression_1(b, l + 1);
exit_section_(b, l, m, EXPRESSION, r, false, null);
return r;
}
// (value operator)*
private static boolean expression_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "expression_0")) return false;
int c = current_position_(b);
while (true) {
if (!expression_0_0(b, l + 1)) break;
if (!empty_element_parsed_guard_(b, "expression_0", c)) break;
c = current_position_(b);
}
return true;
}
// value operator
private static boolean expression_0_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "expression_0_0")) return false;
boolean r;
Marker m = enter_section_(b);
r = value(b, l + 1);
r = r && operator(b, l + 1);
exit_section_(b, m, null, r);
return r;
}
// [value]
private static boolean expression_1(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "expression_1")) return false;
value(b, l + 1);
return true;
}
/* ********************************************************** */
// GLOB_KEYWORD '(' glob_elements ')'
public static boolean glob_block(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "glob_block")) return false;
if (!nextTokenIs(b, GLOB_KEYWORD)) return false;
boolean r;
Marker m = enter_section_(b);
r = consumeToken(b, GLOB_KEYWORD);
r = r && consumeToken(b, L_PARENTHESES);
r = r && glob_elements(b, l + 1);
r = r && consumeToken(b, R_PARENTHESES);
exit_section_(b, m, GLOB_BLOCK, r);
return r;
}
/* ********************************************************** */
// value_array [',' GLOB_EXCLUDES_KEYWORD '=' expression]
public static boolean glob_elements(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "glob_elements")) return false;
if (!nextTokenIs(b, L_BRACKET)) return false;
boolean r;
Marker m = enter_section_(b);
r = value_array(b, l + 1);
r = r && glob_elements_1(b, l + 1);
exit_section_(b, m, GLOB_ELEMENTS, r);
return r;
}
// [',' GLOB_EXCLUDES_KEYWORD '=' expression]
private static boolean glob_elements_1(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "glob_elements_1")) return false;
glob_elements_1_0(b, l + 1);
return true;
}
// ',' GLOB_EXCLUDES_KEYWORD '=' expression
private static boolean glob_elements_1_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "glob_elements_1_0")) return false;
boolean r;
Marker m = enter_section_(b);
r = consumeToken(b, COMMA);
r = r && consumeToken(b, GLOB_EXCLUDES_KEYWORD);
r = r && consumeToken(b, EQUAL);
r = r && expression(b, l + 1);
exit_section_(b, m, null, r);
return r;
}
/* ********************************************************** */
// rule_call | rule_block | property | LINE_COMMENT
static boolean item_(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "item_")) return false;
boolean r;
Marker m = enter_section_(b);
r = rule_call(b, l + 1);
if (!r) r = rule_block(b, l + 1);
if (!r) r = property(b, l + 1);
if (!r) r = consumeToken(b, LINE_COMMENT);
exit_section_(b, m, null, r);
return r;
}
/* ********************************************************** */
// '(' list_elements ')'
public static boolean list(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "list")) return false;
if (!nextTokenIs(b, L_PARENTHESES)) return false;
boolean r;
Marker m = enter_section_(b);
r = consumeToken(b, L_PARENTHESES);
r = r && list_elements(b, l + 1);
r = r && consumeToken(b, R_PARENTHESES);
exit_section_(b, m, LIST, r);
return r;
}
/* ********************************************************** */
// (value ',')* [value [',']]
public static boolean list_elements(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "list_elements")) return false;
boolean r;
Marker m = enter_section_(b, l, _NONE_, "<list elements>");
r = list_elements_0(b, l + 1);
r = r && list_elements_1(b, l + 1);
exit_section_(b, l, m, LIST_ELEMENTS, r, false, null);
return r;
}
// (value ',')*
private static boolean list_elements_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "list_elements_0")) return false;
int c = current_position_(b);
while (true) {
if (!list_elements_0_0(b, l + 1)) break;
if (!empty_element_parsed_guard_(b, "list_elements_0", c)) break;
c = current_position_(b);
}
return true;
}
// value ','
private static boolean list_elements_0_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "list_elements_0_0")) return false;
boolean r;
Marker m = enter_section_(b);
r = value(b, l + 1);
r = r && consumeToken(b, COMMA);
exit_section_(b, m, null, r);
return r;
}
// [value [',']]
private static boolean list_elements_1(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "list_elements_1")) return false;
list_elements_1_0(b, l + 1);
return true;
}
// value [',']
private static boolean list_elements_1_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "list_elements_1_0")) return false;
boolean r;
Marker m = enter_section_(b);
r = value(b, l + 1);
r = r && list_elements_1_0_1(b, l + 1);
exit_section_(b, m, null, r);
return r;
}
// [',']
private static boolean list_elements_1_0_1(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "list_elements_1_0_1")) return false;
consumeToken(b, COMMA);
return true;
}
/* ********************************************************** */
// '{' object_elements '}'
public static boolean object(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "object")) return false;
if (!nextTokenIs(b, L_CURLY)) return false;
boolean r;
Marker m = enter_section_(b);
r = consumeToken(b, L_CURLY);
r = r && object_elements(b, l + 1);
r = r && consumeToken(b, R_CURLY);
exit_section_(b, m, OBJECT, r);
return r;
}
/* ********************************************************** */
// (pair ',')* [pair [',']]
public static boolean object_elements(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "object_elements")) return false;
boolean r;
Marker m = enter_section_(b, l, _NONE_, "<object elements>");
r = object_elements_0(b, l + 1);
r = r && object_elements_1(b, l + 1);
exit_section_(b, l, m, OBJECT_ELEMENTS, r, false, null);
return r;
}
// (pair ',')*
private static boolean object_elements_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "object_elements_0")) return false;
int c = current_position_(b);
while (true) {
if (!object_elements_0_0(b, l + 1)) break;
if (!empty_element_parsed_guard_(b, "object_elements_0", c)) break;
c = current_position_(b);
}
return true;
}
// pair ','
private static boolean object_elements_0_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "object_elements_0_0")) return false;
boolean r;
Marker m = enter_section_(b);
r = pair(b, l + 1);
r = r && consumeToken(b, COMMA);
exit_section_(b, m, null, r);
return r;
}
// [pair [',']]
private static boolean object_elements_1(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "object_elements_1")) return false;
object_elements_1_0(b, l + 1);
return true;
}
// pair [',']
private static boolean object_elements_1_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "object_elements_1_0")) return false;
boolean r;
Marker m = enter_section_(b);
r = pair(b, l + 1);
r = r && object_elements_1_0_1(b, l + 1);
exit_section_(b, m, null, r);
return r;
}
// [',']
private static boolean object_elements_1_0_1(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "object_elements_1_0_1")) return false;
consumeToken(b, COMMA);
return true;
}
/* ********************************************************** */
// PLUS
static boolean operator(PsiBuilder b, int l) {
return consumeToken(b, PLUS);
}
/* ********************************************************** */
// string ':' value
public static boolean pair(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "pair")) return false;
if (!nextTokenIs(b, "<pair>", DOUBLE_QUOTED_STRING, SINGLE_QUOTED_STRING)) return false;
boolean r;
Marker m = enter_section_(b, l, _NONE_, "<pair>");
r = string(b, l + 1);
r = r && consumeToken(b, COLON);
r = r && value(b, l + 1);
exit_section_(b, l, m, PAIR, r, false, null);
return r;
}
/* ********************************************************** */
// property_lvalue '=' expression
public static boolean property(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "property")) return false;
if (!nextTokenIs(b, "<property>", IDENTIFIER, MACROS)) return false;
boolean r;
Marker m = enter_section_(b, l, _NONE_, "<property>");
r = property_lvalue(b, l + 1);
r = r && consumeToken(b, EQUAL);
r = r && expression(b, l + 1);
exit_section_(b, l, m, PROPERTY, r, false, null);
return r;
}
/* ********************************************************** */
// IDENTIFIER | MACROS
public static boolean property_lvalue(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "property_lvalue")) return false;
if (!nextTokenIs(b, "<property lvalue>", IDENTIFIER, MACROS)) return false;
boolean r;
Marker m = enter_section_(b, l, _NONE_, "<property lvalue>");
r = consumeToken(b, IDENTIFIER);
if (!r) r = consumeToken(b, MACROS);
exit_section_(b, l, m, PROPERTY_LVALUE, r, false, null);
return r;
}
/* ********************************************************** */
// rule_name '(' rule_body ')'
public static boolean rule_block(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "rule_block")) return false;
if (!nextTokenIs(b, IDENTIFIER)) return false;
boolean r;
Marker m = enter_section_(b);
r = rule_name(b, l + 1);
r = r && consumeToken(b, L_PARENTHESES);
r = r && rule_body(b, l + 1);
r = r && consumeToken(b, R_PARENTHESES);
exit_section_(b, m, RULE_BLOCK, r);
return r;
}
/* ********************************************************** */
// (property ',')* [property [',']]
public static boolean rule_body(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "rule_body")) return false;
boolean r;
Marker m = enter_section_(b, l, _NONE_, "<rule body>");
r = rule_body_0(b, l + 1);
r = r && rule_body_1(b, l + 1);
exit_section_(b, l, m, RULE_BODY, r, false, null);
return r;
}
// (property ',')*
private static boolean rule_body_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "rule_body_0")) return false;
int c = current_position_(b);
while (true) {
if (!rule_body_0_0(b, l + 1)) break;
if (!empty_element_parsed_guard_(b, "rule_body_0", c)) break;
c = current_position_(b);
}
return true;
}
// property ','
private static boolean rule_body_0_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "rule_body_0_0")) return false;
boolean r;
Marker m = enter_section_(b);
r = property(b, l + 1);
r = r && consumeToken(b, COMMA);
exit_section_(b, m, null, r);
return r;
}
// [property [',']]
private static boolean rule_body_1(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "rule_body_1")) return false;
rule_body_1_0(b, l + 1);
return true;
}
// property [',']
private static boolean rule_body_1_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "rule_body_1_0")) return false;
boolean r;
Marker m = enter_section_(b);
r = property(b, l + 1);
r = r && rule_body_1_0_1(b, l + 1);
exit_section_(b, m, null, r);
return r;
}
// [',']
private static boolean rule_body_1_0_1(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "rule_body_1_0_1")) return false;
consumeToken(b, COMMA);
return true;
}
/* ********************************************************** */
// rule_name '(' list_elements ')'
public static boolean rule_call(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "rule_call")) return false;
if (!nextTokenIs(b, IDENTIFIER)) return false;
boolean r;
Marker m = enter_section_(b);
r = rule_name(b, l + 1);
r = r && consumeToken(b, L_PARENTHESES);
r = r && list_elements(b, l + 1);
r = r && consumeToken(b, R_PARENTHESES);
exit_section_(b, m, RULE_CALL, r);
return r;
}
/* ********************************************************** */
// IDENTIFIER
public static boolean rule_name(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "rule_name")) return false;
if (!nextTokenIs(b, IDENTIFIER)) return false;
boolean r;
Marker m = enter_section_(b);
r = consumeToken(b, IDENTIFIER);
exit_section_(b, m, RULE_NAME, r);
return r;
}
/* ********************************************************** */
// DOUBLE_QUOTED_STRING | SINGLE_QUOTED_STRING
static boolean string(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "string")) return false;
if (!nextTokenIs(b, "", DOUBLE_QUOTED_STRING, SINGLE_QUOTED_STRING)) return false;
boolean r;
Marker m = enter_section_(b);
r = consumeToken(b, DOUBLE_QUOTED_STRING);
if (!r) r = consumeToken(b, SINGLE_QUOTED_STRING);
exit_section_(b, m, null, r);
return r;
}
/* ********************************************************** */
// NONE | BOOLEAN | NUMBER | MACROS | string | value_array | list | object | glob_block
public static boolean value(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "value")) return false;
boolean r;
Marker m = enter_section_(b, l, _NONE_, "<value>");
r = consumeToken(b, NONE);
if (!r) r = consumeToken(b, BOOLEAN);
if (!r) r = consumeToken(b, NUMBER);
if (!r) r = consumeToken(b, MACROS);
if (!r) r = string(b, l + 1);
if (!r) r = value_array(b, l + 1);
if (!r) r = list(b, l + 1);
if (!r) r = object(b, l + 1);
if (!r) r = glob_block(b, l + 1);
exit_section_(b, l, m, VALUE, r, false, null);
return r;
}
/* ********************************************************** */
// '[' array_elements ']'
public static boolean value_array(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "value_array")) return false;
if (!nextTokenIs(b, L_BRACKET)) return false;
boolean r;
Marker m = enter_section_(b);
r = consumeToken(b, L_BRACKET);
r = r && array_elements(b, l + 1);
r = r && consumeToken(b, R_BRACKET);
exit_section_(b, m, VALUE_ARRAY, r);
return r;
}
}
| |
// Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.jetbrains.python.inspections;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.psi.impl.source.PsiFileImpl;
import com.intellij.testFramework.LightProjectDescriptor;
import com.jetbrains.python.fixtures.PyInspectionTestCase;
import com.jetbrains.python.psi.LanguageLevel;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* @author vlan
*/
public class PyTypeCheckerInspectionTest extends PyInspectionTestCase {
@Override
protected @Nullable LightProjectDescriptor getProjectDescriptor() {
return ourPy2Descriptor;
}
@NotNull
@Override
protected Class<? extends PyInspection> getInspectionClass() {
return PyTypeCheckerInspection.class;
}
@Override
protected boolean isLowerCaseTestFile() {
return false;
}
public void testSimple() {
doTest();
}
public void testStrUnicode() {
doTest();
}
public void testListTuple() {
doTest();
}
public void testBuiltinNumeric() {
doTest();
}
public void testGenerator() {
doTest();
}
// PY-4025
public void testFunctionAssignments() {
doTest();
}
public void testOldStyleClasses() {
doTest();
}
public void testPartlyUnknownType() {
doTest();
}
public void testTypeAssertions() {
doTest();
}
public void testLocalTypeResolve() {
doTest();
}
public void testSubscript() {
doTest();
}
public void testComparisonOperators() {
doTest();
}
public void testRightOperators() {
doTest();
}
public void testStringInteger() {
doTest();
}
public void testIsInstanceImplicitSelfTypes() {
doTest();
}
public void testNotNone() {
doTest();
}
public void testUnionReturnTypes() {
doTest();
}
public void testEnumerateIterator() {
doTest();
}
public void testGenericUserFunctions() {
doTest();
}
public void testGenericUserClasses() {
doTest();
}
public void testDictGenerics() {
doTest();
}
// PY-5474
public void testBadSubscriptExpression() {
doTest();
}
// PY-5873
public void testTypeOfRaiseException() {
doTest();
}
// PY-6542
public void testDictLiterals() {
doTest();
}
// PY-6570
public void testDictLiteralIndexing() {
doTest();
}
// PY-6606
public void testBuiltinBaseClass() {
doTest();
}
// PY-18096
public void testNamedTupleBaseClass() {
doTest();
}
// PY-6803
public void testPropertyAndFactoryFunction() {
doTest();
}
// PY-7179
public void testDecoratedFunction() {
doTest();
}
// PY-6925
public void testAssignedOperator() {
doTest();
}
// PY-7244
public void testGenericArguments() {
doTest();
}
// PY-7757
public void testOpenRead2K() {
doTest();
}
// PY-8182
public void testUnionWithSameMethods() {
doTest();
}
// PY-8181
public void testBytesSubclassAsStr() {
doTest();
}
// PY-9118
public void testNegativeIsInstance() {
doTest();
}
// PY-7340
public void testFieldWithNoneInStub() {
doMultiFileTest();
}
public void testBoundedGeneric() {
doTest();
}
public void testNotImportedClassInDocString() {
doMultiFileTest();
}
// PY-6728
public void testForLoopIteration() {
doTest();
}
// PY-4285
public void testMapReturnElementType() {
doTest();
}
// PY-10413
public void testFunctionParameterReturnType() {
doTest();
}
// PY-10095
public void testStringStartsWith() {
doTest();
}
// PY-10854
public void testSecondFormIter() {
doTest();
}
public void testMetaClassIteration() {
doTest();
}
// PY-10967
public void testDefaultTupleParameter() {
doTest();
}
// PY-14222
public void testRecursiveDictAttribute() {
doTest();
}
// PY-13394
public void testContainsArguments() {
doTest();
}
public void testExpectedStructuralType() {
doTest();
}
public void testActualStructuralType() {
doTest();
}
public void testStructuralTypesForNestedCalls() {
doTest();
}
public void testIterateOverParamWithNoAttributes() {
doTest();
}
public void testGetAttributeAgainstStructuralType() {
doTest();
}
public void testComparisonOperatorsForNumericTypes() {
doTest();
}
public void testClassNew() {
doTest();
}
// PY-18275
public void testStrFormat() {
doTest();
}
// PY-9924
public void testTupleGetItemWithSlice() {
doTest();
}
// PY-9924
public void testListGetItemWithSlice() {
doTest();
}
// PY-20460
public void testStringGetItemWithSlice() {
doTest();
}
// PY-20460
public void testUnicodeGetItemWithSlice() {
doTest();
}
// PY-19884
public void testAbsSetAndMutableSet() {
doTest();
}
// PY-19884
public void testSetMethods() {
doTest();
}
// PY-11943
public void testMutableMapping() {
doTest();
}
// PY-16055
public void testFunctionReturnType() {
doTest();
}
// PY-20364
public void testActualBasestringExpectedUnionStrUnicode() {
doTest();
}
// PY-21083
public void testFloatFromhex() {
doTest();
}
// PY-20073
public void testMapArgumentsInOppositeOrderPy2() {
doTest();
}
public void testPositionalArguments() {
doTest();
}
// PY-19723
public void testKeywordArguments() {
doTest();
}
// PY-21350
public void testBuiltinInputPy2() {
doTest();
}
// PY-21350
public void testBuiltinRawInput() {
doTest();
}
// PY-22222, PY-29233
public void testPassClassWithDunderSlotsToMethodThatUsesSlottedAttribute() {
doTest();
}
// PY-22391
public void testIteratingOverListAfterIfNot() {
doTest();
}
// EA-98555, EA-98663
public void testNullArgumentMappedToPositionalParameter() {
doTest();
}
// PY-23138
public void testHomogeneousTuplePlusHeterogeneousTupleWithTheSameElementsType() {
doTest();
}
// PY-22763
public void testChainedComparisons() {
doTest();
}
// PY-22971
public void testTopLevelOverloadsAndImplementation() {
runWithLanguageLevel(LanguageLevel.PYTHON35, this::doTest);
}
// PY-22971
public void testOverloadsAndImplementationInClass() {
runWithLanguageLevel(LanguageLevel.PYTHON35, this::doTest);
}
// PY-22971
public void testOverloadsAndImplementationInImportedModule() {
runWithLanguageLevel(LanguageLevel.PYTHON35, this::doMultiFileTest);
}
// PY-22971
public void testOverloadsAndImplementationInImportedClass() {
runWithLanguageLevel(LanguageLevel.PYTHON35, this::doMultiFileTest);
}
// PY-23429
public void testMatchingModuleAgainstStructuralType() {
doMultiFileTest();
}
// PY-24287
public void testPromotingBytearrayToStrAndUnicode() {
doTest();
}
// PY-24930
public void testCallOperator() {
runWithLanguageLevel(LanguageLevel.PYTHON35, this::doTest);
}
// PY-24763
public void testAnnotatedDunderInitInGenericClass() {
runWithLanguageLevel(LanguageLevel.PYTHON35, this::doTest);
}
public void testDunderInitAnnotatedAsNonNone() {
runWithLanguageLevel(LanguageLevel.PYTHON35, this::doTest);
}
// PY-23367
public void testComparingFloatAndInt() {
doTest();
}
// PY-25120
public void testIterateOverDictValueWhenItsTypeIsUnion() {
doTest();
}
// PY-9662
public void testBinaryExpressionWithUnknownOperand() {
runWithLanguageLevel(LanguageLevel.PYTHON35, this::doTest);
}
// PY-16066
public void testBasestringMatchesType() {
doTest();
}
// PY-23864
public void testClassObjectAndMetaclassCompatibility() {
doTest();
}
// PY-21408
public void testCallableAgainstStructural() {
doTest();
}
public void testMatchingOpenFunctionCallTypesPy2() {
doMultiFileTest();
}
// PY-21408
public void testClassMetaAttrsAgainstStructural() {
runWithLanguageLevel(LanguageLevel.PYTHON34, this::doTest);
}
public void testCallableInstanceAgainstCallable() {
runWithLanguageLevel(LanguageLevel.PYTHON35, this::doTest);
}
// PY-26163
public void testTypingNTAgainstStructural() {
runWithLanguageLevel(LanguageLevel.PYTHON36, this::doTest);
}
// PY-26163
public void testDefinitionAgainstStructural() {
runWithLanguageLevel(LanguageLevel.PYTHON36, this::doTest);
}
// PY-28017
public void testModuleWithGetAttr() {
runWithLanguageLevel(LanguageLevel.PYTHON37, this::doMultiFileTest);
}
// PY-26628
public void testAgainstTypingProtocol() {
runWithLanguageLevel(LanguageLevel.PYTHON35, this::doTest);
}
// PY-26628
public void testAgainstTypingProtocolWithImplementedMethod() {
runWithLanguageLevel(LanguageLevel.PYTHON35, this::doTest);
}
// PY-26628
public void testAgainstTypingProtocolWithImplementedVariable() {
runWithLanguageLevel(LanguageLevel.PYTHON36, this::doTest);
}
// PY-26628
public void testAgainstMergedTypingProtocols() {
runWithLanguageLevel(LanguageLevel.PYTHON35, this::doTest);
}
// PY-26628
public void testAgainstGenericTypingProtocol() {
runWithLanguageLevel(LanguageLevel.PYTHON36, this::doTest);
}
// PY-26628
public void testAgainstRecursiveTypingProtocol() {
runWithLanguageLevel(LanguageLevel.PYTHON35, this::doTest);
}
// PY-26628
public void testAgainstTypingProtocolWrongTypes() {
runWithLanguageLevel(LanguageLevel.PYTHON36, this::doTest);
}
// PY-26628
public void testTypingProtocolAgainstProtocol() {
runWithLanguageLevel(LanguageLevel.PYTHON36, this::doTest);
}
// PY-26628
public void testAgainstTypingProtocolDefinition() {
runWithLanguageLevel(LanguageLevel.PYTHON35, this::doTest);
}
// PY-26628
public void testTypingProtocolsInheritorAgainstHashable() {
runWithLanguageLevel(LanguageLevel.PYTHON35, this::doTest);
}
// PY-11977
public void testMetaclassInstanceMembersProvidedAndNoTypeCheckWarningWhenPassIntoMethodUseThisMembers() {
runWithLanguageLevel(LanguageLevel.getLatest(), this::doTest);
}
// PY-28720
public void testOverriddenBuiltinMethodAgainstTypingProtocol() {
runWithLanguageLevel(
LanguageLevel.PYTHON35,
() ->
doTestByText("import typing\n" +
"class Proto(typing.Protocol):\n" +
" def function(self) -> None:\n" +
" pass\n" +
"class Cls:\n" +
" def __eq__(self, other) -> 'Cls':\n" +
" pass\n" +
" def function(self) -> None:\n" +
" pass\n" +
"def method(p: Proto):\n" +
" pass\n" +
"method(Cls())")
);
}
// PY-28720
public void testAgainstInvalidProtocol() {
runWithLanguageLevel(
LanguageLevel.PYTHON34,
() ->
doTestByText(
"from typing import Any, Protocol\n" +
"class B:\n" +
" def foo(self):\n" +
" ...\n" +
"class C(B, Protocol):\n" +
" def bar(self):\n" +
" ...\n" +
"class Bar:\n" +
" def bar(self):\n" +
" ...\n" +
"def f(x: C) -> Any:\n" +
" ...\n" +
"f(Bar())"
)
);
}
// PY-43133
public void testHierarchyAgainstProtocol() {
runWithLanguageLevel(
LanguageLevel.getLatest(),
() -> doTestByText(
"from typing import Protocol\n" +
"\n" +
"class A:\n" +
" def f1(self, x: str):\n" +
" pass\n" +
"\n" +
"class B(A):\n" +
" def f2(self, y: str):\n" +
" pass\n" +
"\n" +
"class P(Protocol):\n" +
" def f1(self, x: str): ...\n" +
" def f2(self, y: str): ...\n" +
"\n" +
"def test(p: P):\n" +
" pass\n" +
"\n" +
"b = B()\n" +
"test(b)"
)
);
}
// PY-23161
public void testGenericWithTypeVarBounds() {
runWithLanguageLevel(LanguageLevel.PYTHON35, this::doTest);
}
// PY-27788
public void testOverloadedFunctionAssignedToTargetInStub() {
doMultiFileTest();
}
// PY-27949
public void testAssigningToDictEntry() {
doTest();
}
// PY-27231
public void testStructuralAndNone() {
doTestByText("def func11(value):\n" +
" if value is not None and value != 1:\n" +
" pass\n" +
"\n" +
"\n" +
"def func12(value):\n" +
" if None is not value and value != 1:\n" +
" pass\n" +
"\n" +
"\n" +
"def func21(value):\n" +
" if value is None and value != 1:\n" +
" pass\n" +
"\n" +
"\n" +
"def func22(value):\n" +
" if None is value and value != 1:\n" +
" pass\n" +
"\n" +
"\n" +
"func11(None)\n" +
"func12(None)\n" +
"func21(None)\n" +
"func22(None)\n" +
"\n" +
"\n" +
"def func31(value):\n" +
" if value and None and value != 1:\n" +
" pass\n" +
"\n" +
"\n" +
"def func32(value):\n" +
" if value is value and value != 1:\n" +
" pass\n" +
"\n" +
"\n" +
"def func33(value):\n" +
" if None is None and value != 1:\n" +
" pass\n" +
"\n" +
"\n" +
"def func34(value):\n" +
" a = 2\n" +
" if a is a and value != 1:\n" +
" pass\n" +
"\n" +
"\n" +
"func31(<warning descr=\"Expected type '{__ne__}', got 'None' instead\">None</warning>)\n" +
"func32(<warning descr=\"Expected type '{__ne__}', got 'None' instead\">None</warning>)\n" +
"func33(<warning descr=\"Expected type '{__ne__}', got 'None' instead\">None</warning>)\n" +
"func34(<warning descr=\"Expected type '{__ne__}', got 'None' instead\">None</warning>)");
}
// PY-29704
public void testPassingAbstractMethodResult() {
doTestByText("import abc\n" +
"\n" +
"class Foo:\n" +
" __metaclass__ = abc.ABCMeta\n" +
"\n" +
" @abc.abstractmethod\n" +
" def get_int(self):\n" +
" pass\n" +
"\n" +
" def foo(self, i):\n" +
" # type: (int) -> None\n" +
" print(i)\n" +
"\n" +
" def bar(self):\n" +
" self.foo(self.get_int())");
}
// PY-30629
public void testIteratingOverAbstractMethodResult() {
runWithLanguageLevel(
LanguageLevel.PYTHON35,
() -> doTestByText("from abc import ABCMeta, abstractmethod\n" +
"\n" +
"class A(metaclass=ABCMeta):\n" +
"\n" +
" @abstractmethod\n" +
" def foo(self):\n" +
" pass\n" +
"\n" +
"def something(derived: A):\n" +
" for _, _ in derived.foo():\n" +
" pass\n")
);
}
// PY-30357
public void testClassWithNestedAgainstStructural() {
doTestByText("def f(cls):\n" +
" print(cls.Meta)\n" +
"\n" +
"class A:\n" +
" class Meta:\n" +
" pass\n" +
"\n" +
"f(A)");
}
// PY-32205
public void testRightShift() {
runWithLanguageLevel(
LanguageLevel.PYTHON35,
() -> doTestByText("class Bin:\n" +
" def __rshift__(self, other: int):\n" +
" pass\n" +
"\n" +
"Bin() >> 1")
);
}
// PY-32313
public void testMatchingAgainstMultipleBoundTypeVar() {
runWithLanguageLevel(
LanguageLevel.PYTHON35,
() -> doTestByText("from typing import Type, TypeVar\n" +
"\n" +
"class A:\n" +
" pass\n" +
"\n" +
"class B(A):\n" +
" pass\n" +
"\n" +
"class C:\n" +
" pass\n" +
"\n" +
"T = TypeVar('T', A, B)\n" +
"\n" +
"def f(cls: Type[T], arg: int) -> T:\n" +
" pass\n" +
"\n" +
"f(A, 1)\n" +
"f(B, 2)\n" +
"f(<warning descr=\"Expected type 'Type[T]', got 'Type[C]' instead\">C</warning>, 3)")
);
}
// PY-32375
public void testMatchingReturnAgainstBoundedTypeVar() {
runWithLanguageLevel(
LanguageLevel.PYTHON35,
() -> doTestByText("from typing import TypeVar\n" +
"\n" +
"F = TypeVar('F', bound=int)\n" +
"\n" +
"def deco(func: F) -> F:\n" +
" return <warning descr=\"Expected type 'F', got 'str' instead\">\"\"</warning>")
);
}
// PY-35544
public void testLessSpecificCallableAgainstMoreSpecific() {
runWithLanguageLevel(
LanguageLevel.PYTHON35,
() -> doTestByText(
"from typing import Callable\n" +
"\n" +
"class MainClass:\n" +
" pass\n" +
"\n" +
"class SubClass(MainClass):\n" +
" pass\n" +
"\n" +
"def f(p: Callable[[SubClass], int]):\n" +
" pass\n" +
"\n" +
"def g(p: MainClass) -> int:\n" +
" pass\n" +
"\n" +
"f(g)"
)
);
}
// PY-35235
public void testTypingLiteralInitialization() {
runWithLanguageLevel(
LanguageLevel.PYTHON36,
() -> doTestByText("from typing_extensions import Literal\n" +
"\n" +
"a: Literal[20] = 20\n" +
"b: Literal[30] = <warning descr=\"Expected type 'Literal[30]', got 'Literal[25]' instead\">25</warning>\n" +
"c: Literal[2, 3, 4] = 3")
);
}
// PY-35235
public void testTypingLiteralInitializationWithDifferentExpressions() {
runWithLanguageLevel(
LanguageLevel.PYTHON36,
() -> doTestByText("from typing_extensions import Literal\n" +
"\n" +
"a1: Literal[0x14] = 20\n" +
"a2: Literal[20] = 0x14\n" +
"b1: Literal[0] = <warning descr=\"Expected type 'Literal[0]', got 'Literal[False]' instead\">False</warning>\n" +
"b2: Literal[False] = <warning descr=\"Expected type 'Literal[False]', got 'Literal[0]' instead\">0</warning>")
);
}
// PY-35235
public void testExplicitTypingLiteralArgument() {
runWithLanguageLevel(
LanguageLevel.PYTHON36,
() -> doTestByText("from typing_extensions import Literal\n" +
"\n" +
"a: Literal[20] = undefined\n" +
"b: Literal[30] = undefined\n" +
"c: int = 20\n" +
"\n" +
"def foo1(p1: Literal[20]):\n" +
" pass\n" +
"\n" +
"foo1(a)\n" +
"foo1(<warning descr=\"Expected type 'Literal[20]', got 'Literal[30]' instead\">b</warning>)\n" +
"foo1(<warning descr=\"Expected type 'Literal[20]', got 'int' instead\">c</warning>)\n" +
"\n" +
"def foo2(p1: int):\n" +
" pass\n" +
"\n" +
"foo2(a)\n" +
"foo2(b)\n" +
"foo2(c)")
);
}
// PY-35235
public void testTypingLiteralStrings() {
doTestByText("from typing_extensions import Literal\n" +
"\n" +
"a = undefined # type: Literal[\"abc\"]\n" +
"b = undefined # type: Literal[u\"abc\"]\n" +
"\n" +
"def foo1(p1):\n" +
" # type: (Literal[\"abc\"]) -> None\n" +
" pass\n" +
"foo1(a)\n" +
"foo1(<warning descr=\"Expected type 'Literal[\\\"abc\\\"]', got 'Literal[u\\\"abc\\\"]' instead\">b</warning>)\n" +
"\n" +
"def foo2(p1):\n" +
" # type: (Literal[u\"abc\"]) -> None\n" +
" pass\n" +
"foo2(<warning descr=\"Expected type 'Literal[u\\\"abc\\\"]', got 'Literal[\\\"abc\\\"]' instead\">a</warning>)\n" +
"foo2(b)\n" +
"\n" +
"def foo3(p1):\n" +
" # type: (bytes) -> None\n" +
" pass\n" +
"foo3(a)\n" +
"foo3(<warning descr=\"Expected type 'str', got 'Literal[u\\\"abc\\\"]' instead\">b</warning>)\n" +
"\n" +
"def foo4(p1):\n" +
" # type: (unicode) -> None\n" +
" pass\n" +
"foo4(a)\n" +
"foo4(b)\n");
}
// PY-35235
public void testNegativeTypingLiterals() {
doTestByText("from typing_extensions import Literal\n" +
"a = undefined # type: Literal[-10]\n" +
"b = undefined # type: Literal[-20]\n" +
"a = <warning descr=\"Expected type 'Literal[-10]', got 'Literal[-20]' instead\">b</warning>");
}
// PY-35235
public void testDistinguishTypingLiteralsFromTypeHintOrValue() {
doTestByText("from typing_extensions import Literal\n" +
"# no warning because `Literal[10]` as an expression has type `Any`\n" +
"a = Literal[10] # type: Literal[0]");
}
// PY-35235
public void testLiteralAgainstTypeVarBoundedWithTypingLiteral() {
runWithLanguageLevel(
LanguageLevel.PYTHON36,
() -> doTestByText("from typing_extensions import Literal\n" +
"from typing import TypeVar\n" +
"T = TypeVar('T', Literal[\"a\"], Literal[\"b\"], Literal[\"c\"])\n" +
"\n" +
"def repeat(x: T, n: int):\n" +
" return [x] * n\n" +
"\n" +
"repeat(\"c\", 2)")
);
}
// PY-35235
public void testKeywordArgumentAgainstTypingLiteral() {
runWithLanguageLevel(
LanguageLevel.PYTHON36,
() -> doTestByText("from typing_extensions import Literal\n" +
"def f(a: Literal[\"b\"]):\n" +
" pass\n" +
"f(a='b')\n" +
"f(<warning descr=\"Expected type 'Literal[\\\"b\\\"]', got 'Literal['c']' instead\">a='c'</warning>)")
);
}
// PY-35235
public void testNumericMatchingAndTypingLiteral() {
runWithLanguageLevel(
LanguageLevel.PYTHON36,
() -> doTestByText("from typing import Literal\n" +
"def expects_str(x: float) -> None: ...\n" +
"var: Literal[1] = 1\n" +
"expects_str(var)")
);
}
// PY-35235
public void testNonPlainStringAsTypingLiteralValue() {
runWithLanguageLevel(
LanguageLevel.PYTHON36,
() -> doTestByText("from typing import Literal\n" +
"a: Literal[\"22\"] = f\"22\"\n" +
"b: Literal[\"22\"] = <warning descr=\"Expected type 'Literal[\\\"22\\\"]', got 'Literal[f\\\"32\\\"]' instead\">f\"32\"</warning>\n" +
"two = \"2\"\n" +
"c: Literal[\"22\"] = <warning descr=\"Expected type 'Literal[\\\"22\\\"]', got 'str' instead\">f\"2{two}\"</warning>")
);
}
// PY-35235, PY-42281
public void testExpectedTypingLiteralReturnType() {
runWithLanguageLevel(
LanguageLevel.getLatest(),
() -> doTestByText("from typing import Literal\n" +
"def foo() -> Literal[\"ok\"]:\n" +
" return \"ok\"")
);
}
// PY-33500
public void testImplicitGenericDunderCallCallOnTypedElement() {
runWithLanguageLevel(
LanguageLevel.getLatest(),
() -> doTestByText("from typing import TypeVar, Generic\n" +
"\n" +
"_T = TypeVar('_T')\n" +
"\n" +
"class Callback(Generic[_T]):\n" +
" def __call__(self, arg: _T):\n" +
" pass\n" +
"\n" +
"def foo(cb: Callback[int]):\n" +
" cb(<warning descr=\"Expected type 'int' (matched generic type '_T'), got 'str' instead\">\"42\"</warning>)")
);
}
// PY-36008
public void testTypedDictUsageAlternativeSyntax() {
doTestByText("from typing import TypedDict\n" +
"\n" +
"Movie = TypedDict('Movie', {'name': str, 'year': int}, total=False)\n" +
"movie = <warning descr=\"Expected type 'Movie', got 'Dict[str, Union[str, int]]' instead\">{'name': 'Blade Runner', 'lo': 1234}</warning> # type: Movie\n");
}
// PY-36008
public void testTypedDictAsArgument() {
runWithLanguageLevel(
LanguageLevel.getLatest(),
() -> doTestByText("from typing import TypedDict\n" +
"class Movie(TypedDict):\n" +
" name: str\n" +
" year: int\n" +
"def record_movie(movie: Movie) -> None: ...\n" +
"record_movie({'name': 'Blade Runner', 'year': 1982})\n" +
"record_movie(<warning descr=\"Expected type 'Movie', got 'dict[str, int]' instead\">{'name': 1984}</warning>)")
);
}
// PY-36008
public void testTypedDictSubscriptionAsArgument() {
runWithLanguageLevel(
LanguageLevel.getLatest(),
() -> doTestByText("from typing import TypedDict\n" +
"class Movie(TypedDict):\n" +
" name: str\n" +
" year: int\n" +
"m1: Movie = dict(name='Alien', year=1979)\n" +
"m2 = Movie(name='Garden State', year=2004)\n" +
"def foo(p: int):\n" +
" pass\n" +
"foo(m2[\"year\"])\n" +
"foo(<warning descr=\"Expected type 'int', got 'str' instead\">m2[\"name\"]</warning>)\n" +
"foo(<warning descr=\"Expected type 'int', got 'str' instead\">m1[\"name\"]</warning>)")
);
}
// PY-36008
public void testTypedDictAssignment() {
runWithLanguageLevel(
LanguageLevel.getLatest(),
() -> doTestByText("from typing import TypedDict\n" +
"class Movie(TypedDict):\n" +
" name: str\n" +
" year: int\n" +
"m1: Movie = dict(name='Alien', year=1979)\n" +
"m2: Movie = <warning descr=\"Expected type 'Movie', got 'dict[str, str]' instead\">dict(name='Alien', year='1979')</warning>\n" +
"m3: Movie = typing.cast(Movie, dict(zip(['name', 'year'], ['Alien', 1979])))\n" +
"m4: Movie = <warning descr=\"Expected type 'Movie', got 'dict[str, str]' instead\">{'name': 'Alien', 'year': '1979'}</warning>\n" +
"m5 = Movie(name='Garden State', year=2004)"));
}
// PY-36008
public void testTypedDictAlternativeSyntaxAssignment() {
runWithLanguageLevel(
LanguageLevel.getLatest(),
() -> doTestByText("from typing import TypedDict\n" +
"Movie = TypedDict('Movie', {'name': str, 'year': int})\n" +
"m1: Movie = dict(name='Alien', year=1979)\n" +
"m2: Movie = <warning descr=\"Expected type 'Movie', got 'dict[str, str]' instead\">dict(name='Alien', year='1979')</warning>\n" +
"m3: Movie = typing.cast(Movie, dict(zip(['name', 'year'], ['Alien', 1979])))\n" +
"m4: Movie = <warning descr=\"Expected type 'Movie', got 'dict[str, str]' instead\">{'name': 'Alien', 'year': '1979'}</warning>\n" +
"m5 = Movie(name='Garden State', year=2004)"));
}
// PY-36008
public void testTypedDictDefinition() {
runWithLanguageLevel(
LanguageLevel.getLatest(),
() -> doTestByText("from typing import TypedDict\n" +
"class Employee(TypedDict):\n" +
" name: str\n" +
" id: int\n" +
"class Employee2(Employee, total=False):\n" +
" director: str\n" +
"em = Employee2(name='John Dorian', id=1234, director='3')\n" +
"em2 = Employee2(name='John Dorian', id=1234, <warning descr=\"Expected type 'str', got 'int' instead\">director=3</warning>)"));
}
// PY-36008
public void testTypedDictDefinitionAlternativeSyntax() {
runWithLanguageLevel(
LanguageLevel.PYTHON36,
() -> doTestByText("from typing import TypedDict\n" +
"Movie = TypedDict(<warning descr=\"Expected type 'str', got 'int' instead\">3</warning>, <warning descr=\"Expected type 'Dict[str, Any]', got 'List[int]' instead\">[1, 2, 3]</warning>)\n" +
"Movie = TypedDict('Movie', {})"));
}
// PY-36008
public void testTypedDictConsistency() {
runWithLanguageLevel(LanguageLevel.getLatest(), this::doTest);
}
// PY-36008
public void testTypedDictKeyValueRead() {
runWithLanguageLevel(
LanguageLevel.getLatest(),
() -> doTestByText("from typing import TypedDict\n" +
"\n" +
"Movie = TypedDict('Movie', {'name': str, 'year': int}, total=False)\n" +
"class Movie2(TypedDict, total=False):\n" +
" name: str\n" +
" year: int\n" +
"movie = Movie()\n" +
"movie2 = Movie2()\n" +
"s: str = <warning descr=\"Expected type 'str', got 'int' instead\">movie['year']</warning>\n" +
"s2: str = <warning descr=\"Expected type 'str', got 'int' instead\">movie2['year']</warning>\n"));
}
// PY-38873
public void testTypedDictWithListField() {
runWithLanguageLevel(
LanguageLevel.getLatest(),
() -> doTestByText("from typing import TypedDict, List\n" +
"\n" +
"Movie = TypedDict('Movie', {'address': List[str]}, total=False)\n" +
"class Movie2(TypedDict, total=False):\n" +
" address: List[str]\n" +
"movie = Movie()\n" +
"movie2 = Movie2()\n" +
"s: str = movie['address'][0]\n" +
"s: str = movie2['address'][0]\n" +
"s: str = movie['address'][<warning descr=\"Unexpected type(s):(str)Possible type(s):(int)(slice)\">'i'</warning>]\n" +
"s2: str = movie2['address'][<warning descr=\"Unexpected type(s):(str)Possible type(s):(int)(slice)\">'i'</warning>]\n"));
}
// PY-36008
public void testIncorrectTotalityValue() {
doTestByText("from typing import TypedDict\n" +
"Movie = TypedDict(\"Movie\", {}, <warning descr=\"Expected type 'bool', got 'int' instead\">total=2</warning>)");
}
// PY-33548
public void testTypeVarsChainBeforeNonTypeVarSubstitution() {
runWithLanguageLevel(
LanguageLevel.getLatest(),
() -> doTestByText(
"from typing import TypeVar, Mapping\n" +
"\n" +
"MyKT = TypeVar(\"MyKT\")\n" +
"MyVT = TypeVar(\"MyVT\")\n" +
"\n" +
"class MyMapping(Mapping[MyKT, MyVT]):\n" +
" pass\n" +
"\n" +
"d: MyMapping[str, str] = undefined1\n" +
"d.get(undefined2)\n" +
"d.get(\"str\")\n" +
"d.get(<warning descr=\"Expected type 'str' (matched generic type '_KT'), got 'int' instead\">1</warning>)"
)
);
}
// PY-38412
public void testTypedDictInStub() {
runWithLanguageLevel(LanguageLevel.getLatest(), this::doMultiFileTest);
}
// PY-28364
public void testDefinitionAgainstCallableInstance() {
runWithLanguageLevel(
LanguageLevel.getLatest(),
() -> doTestByText("class B:\n" +
" def __call__(self, *args, **kwargs):\n" +
" pass\n" +
"\n" +
"def some_fn(arg: B):\n" +
" pass\n" +
"\n" +
"some_fn(<warning descr=\"Expected type 'B', got 'Type[B]' instead\">B</warning>)")
);
}
// PY-29993
public void testCallableInstanceAgainstOtherCallableInstance() {
runWithLanguageLevel(
LanguageLevel.getLatest(),
() -> doTestByText("class MyCls:\n" +
" def __call__(self):\n" +
" return True\n" +
"\n" +
"class DifferentCls:\n" +
" def __call__(self):\n" +
" return True\n" +
"\n" +
"def foo(arg: MyCls):\n" +
" pass\n" +
"\n" +
"foo(MyCls())\n" +
"foo(<warning descr=\"Expected type 'MyCls', got 'DifferentCls' instead\">DifferentCls()</warning>)")
);
}
public void testNewTypeInForeignUnstubbedFile() {
runWithLanguageLevel(LanguageLevel.getLatest(), () -> {
myFixture.copyDirectoryToProject(getTestDirectoryPath(), "");
myFixture.configureFromTempProjectFile("a.py");
VirtualFile foreignVFile = myFixture.findFileInTempDir("b.py");
assertNotNull(foreignVFile);
PsiFile foreignFilePsi = PsiManager.getInstance(myFixture.getProject()).findFile(foreignVFile);
assertNotNull(foreignFilePsi);
assertNotParsed(foreignFilePsi);
//noinspection ResultOfMethodCallIgnored
foreignFilePsi.getNode();
assertNotNull(((PsiFileImpl)foreignFilePsi).getTreeElement());
configureInspection();
});
}
// PY-42205
public void testNonReferenceCallee() {
runWithLanguageLevel(
LanguageLevel.getLatest(),
() -> doTestByText("class CallableTest:\n" +
" def __call__(self, arg=None):\n" +
" pass\n" +
"CallableTest()(\"bad 1\")")
);
}
// PY-37876
public void testGenericCallablesInGenericClasses() {
runWithLanguageLevel(
LanguageLevel.getLatest(),
() -> doTestByText("from typing import Iterable, TypeVar, Generic\n" +
"T = TypeVar(\"T\")\n" +
"class MyClass(Generic[T]):\n" +
" def __init__(self, data: Iterable[T]):\n" +
" sorted(data, key=self.my_func)\n" +
" def my_func(self, elem: T) -> int:\n" +
" pass")
);
}
// PY-37876
public void testBoundedGenericParameterOfExpectedCallableParameter1() {
runWithLanguageLevel(
LanguageLevel.getLatest(),
() -> doTestByText("from typing import Callable, TypeVar\n" +
"\n" +
"T = TypeVar('T', bound=int)\n" +
"\n" +
"def func(c: Callable[[T], None]):\n" +
" pass\n" +
"\n" +
"def accepts_anything(x: object) -> None:\n" +
" pass\n" +
"\n" +
"func(accepts_anything)\n")
);
}
// PY-37876
public void testBoundedGenericParameterOfExpectedCallableParameter2() {
runWithLanguageLevel(
LanguageLevel.getLatest(),
() -> doTestByText("from typing import Callable, TypeVar\n" +
"\n" +
"T = TypeVar('T', bound=int)\n" +
"\n" +
"def func(c: Callable[[T], None]):\n" +
" pass\n" +
"\n" +
"def accepts_anything(x: str) -> None:\n" +
" pass\n" +
"\n" +
"func(<warning descr=\"Expected type '(Any) -> None' (matched generic type '(T) -> None'), got '(x: str) -> None' instead\">accepts_anything</warning>)\n")
);
}
// PY-37876
public void testGenericParameterOfExpectedCallableMappedByOtherArgument() {
runWithLanguageLevel(
LanguageLevel.getLatest(),
() -> doTestByText("from typing import Callable, TypeVar\n" +
"\n" +
"T = TypeVar('T')\n" +
"\n" +
"def func(x: T, c: Callable[[T], None]) -> None:\n" +
" pass\n" +
"\n" +
"def accepts_anything(x: str) -> None:\n" +
" pass\n" +
"\n" +
"func(42, <warning descr=\"Expected type '(int) -> None' (matched generic type '(T) -> None'), got '(x: str) -> None' instead\">accepts_anything</warning>)")
);
}
public void testCallByClass() {
doTest();
}
// PY-41806
public void testClassDefinitionAgainstProtocolDunderCall() {
runWithLanguageLevel(LanguageLevel.getLatest(), this::doTest);
}
// PY-41806
public void testClassInstanceAgainstProtocolDunderCall() {
runWithLanguageLevel(LanguageLevel.getLatest(), this::doTest);
}
// PY-36062
public void testModuleTypeParameter() {
runWithLanguageLevel(LanguageLevel.getLatest(), this::doMultiFileTest);
}
// PY-43841
public void testPyFunctionAgainstBuiltinFunction() {
runWithLanguageLevel(LanguageLevel.getLatest(), this::doTest);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.client.gateway.local;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.dag.Pipeline;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.client.cli.CliFrontend;
import org.apache.flink.client.cli.CliFrontendParser;
import org.apache.flink.client.cli.CustomCommandLine;
import org.apache.flink.client.deployment.ClusterClientServiceLoader;
import org.apache.flink.client.deployment.ClusterDescriptor;
import org.apache.flink.client.deployment.DefaultClusterClientServiceLoader;
import org.apache.flink.client.program.ClusterClient;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.DeploymentOptions;
import org.apache.flink.configuration.GlobalConfiguration;
import org.apache.flink.core.execution.JobClient;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.core.fs.Path;
import org.apache.flink.core.plugin.PluginUtils;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.TableSchema;
import org.apache.flink.table.api.internal.TableEnvironmentInternal;
import org.apache.flink.table.catalog.UnresolvedIdentifier;
import org.apache.flink.table.client.SqlClientException;
import org.apache.flink.table.client.config.Environment;
import org.apache.flink.table.client.gateway.Executor;
import org.apache.flink.table.client.gateway.ProgramTargetDescriptor;
import org.apache.flink.table.client.gateway.ResultDescriptor;
import org.apache.flink.table.client.gateway.SessionContext;
import org.apache.flink.table.client.gateway.SqlExecutionException;
import org.apache.flink.table.client.gateway.TypedResult;
import org.apache.flink.table.client.gateway.local.result.ChangelogResult;
import org.apache.flink.table.client.gateway.local.result.DynamicResult;
import org.apache.flink.table.client.gateway.local.result.MaterializedResult;
import org.apache.flink.table.delegation.Parser;
import org.apache.flink.table.expressions.ResolvedExpression;
import org.apache.flink.table.operations.Operation;
import org.apache.flink.table.types.DataType;
import org.apache.flink.table.types.logical.utils.LogicalTypeUtils;
import org.apache.flink.table.types.utils.DataTypeUtils;
import org.apache.flink.types.Row;
import org.apache.flink.util.JarUtils;
import org.apache.flink.util.StringUtils;
import org.apache.commons.cli.Options;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Pattern;
import static org.apache.flink.util.Preconditions.checkNotNull;
/**
* Executor that performs the Flink communication locally. The calls are blocking depending on the
* response time to the Flink cluster. Flink jobs are not blocking.
*/
public class LocalExecutor implements Executor {
private static final Logger LOG = LoggerFactory.getLogger(LocalExecutor.class);
private static final String DEFAULT_ENV_FILE = "sql-client-defaults.yaml";
// Map to hold all the available sessions. the key is session identifier, and the value is the ExecutionContext
// created by the session context.
private final ConcurrentHashMap<String, ExecutionContext<?>> contextMap;
// deployment
private final ClusterClientServiceLoader clusterClientServiceLoader;
private final Environment defaultEnvironment;
private final List<URL> dependencies;
private final Configuration flinkConfig;
private final List<CustomCommandLine> commandLines;
private final Options commandLineOptions;
// result maintenance
private final ResultStore resultStore;
// insert into sql match pattern
private static final Pattern INSERT_SQL_PATTERN = Pattern.compile("(INSERT\\s+(INTO|OVERWRITE).*)",
Pattern.CASE_INSENSITIVE | Pattern.DOTALL);
/**
* Creates a local executor for submitting table programs and retrieving results.
*/
public LocalExecutor(URL defaultEnv, List<URL> jars, List<URL> libraries) {
// discover configuration
final String flinkConfigDir;
try {
// find the configuration directory
flinkConfigDir = CliFrontend.getConfigurationDirectoryFromEnv();
// load the global configuration
this.flinkConfig = GlobalConfiguration.loadConfiguration(flinkConfigDir);
// initialize default file system
FileSystem.initialize(flinkConfig, PluginUtils.createPluginManagerFromRootFolder(flinkConfig));
// load command lines for deployment
this.commandLines = CliFrontend.loadCustomCommandLines(flinkConfig, flinkConfigDir);
this.commandLineOptions = collectCommandLineOptions(commandLines);
} catch (Exception e) {
throw new SqlClientException("Could not load Flink configuration.", e);
}
// try to find a default environment
if (defaultEnv == null) {
final String defaultFilePath = flinkConfigDir + "/" + DEFAULT_ENV_FILE;
System.out.println("No default environment specified.");
System.out.print("Searching for '" + defaultFilePath + "'...");
final File file = new File(defaultFilePath);
if (file.exists()) {
System.out.println("found.");
try {
defaultEnv = Path.fromLocalFile(file).toUri().toURL();
} catch (MalformedURLException e) {
throw new SqlClientException(e);
}
LOG.info("Using default environment file: {}", defaultEnv);
} else {
System.out.println("not found.");
}
}
// inform user
if (defaultEnv != null) {
System.out.println("Reading default environment from: " + defaultEnv);
try {
defaultEnvironment = Environment.parse(defaultEnv);
} catch (IOException e) {
throw new SqlClientException("Could not read default environment file at: " + defaultEnv, e);
}
} else {
defaultEnvironment = new Environment();
}
this.contextMap = new ConcurrentHashMap<>();
// discover dependencies
dependencies = discoverDependencies(jars, libraries);
// prepare result store
resultStore = new ResultStore(flinkConfig);
clusterClientServiceLoader = new DefaultClusterClientServiceLoader();
}
/**
* Constructor for testing purposes.
*/
public LocalExecutor(
Environment defaultEnvironment,
List<URL> dependencies,
Configuration flinkConfig,
CustomCommandLine commandLine,
ClusterClientServiceLoader clusterClientServiceLoader) {
this.defaultEnvironment = defaultEnvironment;
this.dependencies = dependencies;
this.flinkConfig = flinkConfig;
this.commandLines = Collections.singletonList(commandLine);
this.commandLineOptions = collectCommandLineOptions(commandLines);
this.contextMap = new ConcurrentHashMap<>();
// prepare result store
this.resultStore = new ResultStore(flinkConfig);
this.clusterClientServiceLoader = checkNotNull(clusterClientServiceLoader);
}
@Override
public void start() {
// nothing to do yet
}
/** Returns ExecutionContext.Builder with given {@link SessionContext} session context. */
private ExecutionContext.Builder createExecutionContextBuilder(SessionContext sessionContext) {
return ExecutionContext.builder(
defaultEnvironment,
sessionContext,
this.dependencies,
this.flinkConfig,
this.clusterClientServiceLoader,
this.commandLineOptions,
this.commandLines);
}
@Override
public String openSession(SessionContext sessionContext) throws SqlExecutionException {
String sessionId = sessionContext.getSessionId();
if (this.contextMap.containsKey(sessionId)) {
throw new SqlExecutionException("Found another session with the same session identifier: " + sessionId);
} else {
this.contextMap.put(
sessionId,
createExecutionContextBuilder(sessionContext).build());
}
return sessionId;
}
@Override
public void closeSession(String sessionId) throws SqlExecutionException {
resultStore.getResults().forEach((resultId) -> {
try {
cancelQuery(sessionId, resultId);
} catch (Throwable t) {
// ignore any throwable to keep the clean up running
}
});
// Remove the session's ExecutionContext from contextMap and close it.
ExecutionContext<?> context = this.contextMap.remove(sessionId);
if (context != null) {
context.close();
}
}
/**
* Get the existed {@link ExecutionContext} from contextMap, or thrown exception if does not exist.
*/
@VisibleForTesting
protected ExecutionContext<?> getExecutionContext(String sessionId) throws SqlExecutionException {
ExecutionContext<?> context = this.contextMap.get(sessionId);
if (context == null) {
throw new SqlExecutionException("Invalid session identifier: " + sessionId);
}
return context;
}
@Override
public Map<String, String> getSessionProperties(String sessionId) throws SqlExecutionException {
final Environment env = getExecutionContext(sessionId).getEnvironment();
final Map<String, String> properties = new HashMap<>();
properties.putAll(env.getExecution().asTopLevelMap());
properties.putAll(env.getDeployment().asTopLevelMap());
properties.putAll(env.getConfiguration().asMap());
return properties;
}
@Override
public void resetSessionProperties(String sessionId) throws SqlExecutionException {
ExecutionContext<?> context = getExecutionContext(sessionId);
// Renew the ExecutionContext by merging the default environment with original session context.
// Book keep all the session states of current ExecutionContext then
// re-register them into the new one.
ExecutionContext<?> newContext = createExecutionContextBuilder(
context.getOriginalSessionContext())
.sessionState(context.getSessionState())
.build();
this.contextMap.put(sessionId, newContext);
}
@Override
public void setSessionProperty(String sessionId, String key, String value) throws SqlExecutionException {
ExecutionContext<?> context = getExecutionContext(sessionId);
Environment env = context.getEnvironment();
Environment newEnv;
try {
newEnv = Environment.enrich(env, Collections.singletonMap(key, value));
} catch (Throwable t) {
throw new SqlExecutionException("Could not set session property.", t);
}
// Renew the ExecutionContext by new environment.
// Book keep all the session states of current ExecutionContext then
// re-register them into the new one.
ExecutionContext<?> newContext = createExecutionContextBuilder(
context.getOriginalSessionContext())
.env(newEnv)
.sessionState(context.getSessionState())
.build();
this.contextMap.put(sessionId, newContext);
}
@Override
public TableResult executeSql(String sessionId, String statement) throws SqlExecutionException {
final ExecutionContext<?> context = getExecutionContext(sessionId);
final TableEnvironment tEnv = context.getTableEnvironment();
try {
return context.wrapClassLoader(() -> tEnv.executeSql(statement));
} catch (Exception e) {
throw new SqlExecutionException("Could not execute statement: " + statement, e);
}
}
@Override
public List<String> listModules(String sessionId) throws SqlExecutionException {
final ExecutionContext<?> context = getExecutionContext(sessionId);
final TableEnvironment tableEnv = context.getTableEnvironment();
return context.wrapClassLoader(() -> Arrays.asList(tableEnv.listModules()));
}
@Override
public Parser getSqlParser(String sessionId) {
final ExecutionContext<?> context = getExecutionContext(sessionId);
final TableEnvironment tableEnv = context.getTableEnvironment();
final Parser parser = ((TableEnvironmentInternal) tableEnv).getParser();
return new Parser() {
@Override
public List<Operation> parse(String statement) {
return context.wrapClassLoader(() -> parser.parse(statement));
}
@Override
public UnresolvedIdentifier parseIdentifier(String identifier) {
return context.wrapClassLoader(() -> parser.parseIdentifier(identifier));
}
@Override
public ResolvedExpression parseSqlExpression(String sqlExpression, TableSchema inputSchema) {
return context.wrapClassLoader(() -> parser.parseSqlExpression(sqlExpression, inputSchema));
}
};
}
@Override
public List<String> completeStatement(String sessionId, String statement, int position) {
final ExecutionContext<?> context = getExecutionContext(sessionId);
final TableEnvironment tableEnv = context.getTableEnvironment();
try {
return context.wrapClassLoader(() ->
Arrays.asList(tableEnv.getCompletionHints(statement, position)));
} catch (Throwable t) {
// catch everything such that the query does not crash the executor
if (LOG.isDebugEnabled()) {
LOG.debug("Could not complete statement at " + position + ":" + statement, t);
}
return Collections.emptyList();
}
}
@Override
public ResultDescriptor executeQuery(String sessionId, String query) throws SqlExecutionException {
final ExecutionContext<?> context = getExecutionContext(sessionId);
return executeQueryInternal(sessionId, context, query);
}
@Override
public TypedResult<List<Tuple2<Boolean, Row>>> retrieveResultChanges(
String sessionId,
String resultId) throws SqlExecutionException {
final DynamicResult<?> result = resultStore.getResult(resultId);
if (result == null) {
throw new SqlExecutionException("Could not find a result with result identifier '" + resultId + "'.");
}
if (result.isMaterialized()) {
throw new SqlExecutionException("Invalid result retrieval mode.");
}
return ((ChangelogResult<?>) result).retrieveChanges();
}
@Override
public TypedResult<Integer> snapshotResult(String sessionId, String resultId, int pageSize) throws SqlExecutionException {
final DynamicResult<?> result = resultStore.getResult(resultId);
if (result == null) {
throw new SqlExecutionException("Could not find a result with result identifier '" + resultId + "'.");
}
if (!result.isMaterialized()) {
throw new SqlExecutionException("Invalid result retrieval mode.");
}
return ((MaterializedResult<?>) result).snapshot(pageSize);
}
@Override
public List<Row> retrieveResultPage(String resultId, int page) throws SqlExecutionException {
final DynamicResult<?> result = resultStore.getResult(resultId);
if (result == null) {
throw new SqlExecutionException("Could not find a result with result identifier '" + resultId + "'.");
}
if (!result.isMaterialized()) {
throw new SqlExecutionException("Invalid result retrieval mode.");
}
return ((MaterializedResult<?>) result).retrievePage(page);
}
@Override
public void cancelQuery(String sessionId, String resultId) throws SqlExecutionException {
final ExecutionContext<?> context = getExecutionContext(sessionId);
cancelQueryInternal(context, resultId);
}
@Override
public ProgramTargetDescriptor executeUpdate(String sessionId, String statement) throws SqlExecutionException {
final ExecutionContext<?> context = getExecutionContext(sessionId);
return executeUpdateInternal(sessionId, context, statement);
}
// --------------------------------------------------------------------------------------------
private <T> void cancelQueryInternal(ExecutionContext<T> context, String resultId) {
final DynamicResult<T> result = resultStore.getResult(resultId);
if (result == null) {
throw new SqlExecutionException("Could not find a result with result identifier '" + resultId + "'.");
}
// stop retrieval and remove the result
LOG.info("Cancelling job {} and result retrieval.", resultId);
result.close();
resultStore.removeResult(resultId);
// stop Flink job
try (final ClusterDescriptor<T> clusterDescriptor = context.createClusterDescriptor()) {
ClusterClient<T> clusterClient = null;
try {
// retrieve existing cluster
clusterClient = clusterDescriptor.retrieve(context.getClusterId()).getClusterClient();
try {
clusterClient.cancel(new JobID(StringUtils.hexStringToByte(resultId))).get();
} catch (Throwable t) {
// the job might has finished earlier
}
} catch (Exception e) {
throw new SqlExecutionException("Could not retrieve or create a cluster.", e);
} finally {
try {
if (clusterClient != null) {
clusterClient.close();
}
} catch (Exception e) {
// ignore
}
}
} catch (SqlExecutionException e) {
throw e;
} catch (Exception e) {
throw new SqlExecutionException("Could not locate a cluster.", e);
}
}
private <C> ProgramTargetDescriptor executeUpdateInternal(
String sessionId,
ExecutionContext<C> context,
String statement) {
applyUpdate(context, statement);
//Todo: we should refactor following condition after TableEnvironment has support submit job directly.
if (!INSERT_SQL_PATTERN.matcher(statement.trim()).matches()) {
return null;
}
// create pipeline
final String jobName = sessionId + ": " + statement;
final Pipeline pipeline;
try {
pipeline = context.createPipeline(jobName);
} catch (Throwable t) {
// catch everything such that the statement does not crash the executor
throw new SqlExecutionException("Invalid SQL statement.", t);
}
// create a copy so that we can change settings without affecting the original config
Configuration configuration = new Configuration(context.getFlinkConfig());
// for update queries we don't wait for the job result, so run in detached mode
configuration.set(DeploymentOptions.ATTACHED, false);
// create execution
final ProgramDeployer deployer = new ProgramDeployer(configuration, jobName, pipeline);
// wrap in classloader because CodeGenOperatorFactory#getStreamOperatorClass
// requires to access UDF in deployer.deploy().
return context.wrapClassLoader(() -> {
try {
// blocking deployment
JobClient jobClient = deployer.deploy().get();
return ProgramTargetDescriptor.of(jobClient.getJobID());
} catch (Exception e) {
throw new RuntimeException("Error running SQL job.", e);
}
});
}
private <C> ResultDescriptor executeQueryInternal(String sessionId, ExecutionContext<C> context, String query) {
// create table
final Table table = createTable(context, context.getTableEnvironment(), query);
// TODO refactor this after Table#execute support all kinds of changes
// initialize result
final DynamicResult<C> result = resultStore.createResult(
context.getEnvironment(),
removeTimeAttributes(table.getSchema()),
context.getExecutionConfig(),
context.getClassLoader());
final String jobName = sessionId + ": " + query;
final String tableName = String.format("_tmp_table_%s", Math.abs(query.hashCode()));
final Pipeline pipeline;
try {
// writing to a sink requires an optimization step that might reference UDFs during code compilation
context.wrapClassLoader(() -> {
((TableEnvironmentInternal) context.getTableEnvironment()).registerTableSinkInternal(tableName, result.getTableSink());
table.insertInto(tableName);
});
pipeline = context.createPipeline(jobName);
} catch (Throwable t) {
// the result needs to be closed as long as
// it not stored in the result store
result.close();
// catch everything such that the query does not crash the executor
throw new SqlExecutionException("Invalid SQL query.", t);
} finally {
// Remove the temporal table object.
context.wrapClassLoader(() -> {
context.getTableEnvironment().dropTemporaryTable(tableName);
});
}
// create a copy so that we can change settings without affecting the original config
Configuration configuration = new Configuration(context.getFlinkConfig());
// for queries we wait for the job result, so run in attached mode
configuration.set(DeploymentOptions.ATTACHED, true);
// shut down the cluster if the shell is closed
configuration.set(DeploymentOptions.SHUTDOWN_IF_ATTACHED, true);
// create execution
final ProgramDeployer deployer = new ProgramDeployer(
configuration, jobName, pipeline);
JobClient jobClient;
// wrap in classloader because CodeGenOperatorFactory#getStreamOperatorClass
// requires to access UDF in deployer.deploy().
jobClient = context.wrapClassLoader(() -> {
try {
// blocking deployment
return deployer.deploy().get();
} catch (Exception e) {
throw new SqlExecutionException("Error while submitting job.", e);
}
});
String jobId = jobClient.getJobID().toString();
// store the result under the JobID
resultStore.storeResult(jobId, result);
// start result retrieval
result.startRetrieval(jobClient);
return new ResultDescriptor(
jobId,
removeTimeAttributes(table.getSchema()),
result.isMaterialized(),
context.getEnvironment().getExecution().isTableauMode());
}
/**
* Creates a table using the given query in the given table environment.
*/
private <C> Table createTable(ExecutionContext<C> context, TableEnvironment tableEnv, String selectQuery) {
// parse and validate query
try {
return context.wrapClassLoader(() -> tableEnv.sqlQuery(selectQuery));
} catch (Throwable t) {
// catch everything such that the query does not crash the executor
throw new SqlExecutionException("Invalid SQL statement.", t);
}
}
/**
* Applies the given update statement to the given table environment with query configuration.
*/
private <C> void applyUpdate(ExecutionContext<C> context, String updateStatement) {
final TableEnvironment tableEnv = context.getTableEnvironment();
try {
// TODO replace sqlUpdate with executeSql
// This needs we do more refactor, because we can't set the flinkConfig in ExecutionContext
// into StreamExecutionEnvironment
context.wrapClassLoader(() -> tableEnv.sqlUpdate(updateStatement));
} catch (Throwable t) {
// catch everything such that the statement does not crash the executor
throw new SqlExecutionException("Invalid SQL update statement.", t);
}
}
// --------------------------------------------------------------------------------------------
private static List<URL> discoverDependencies(List<URL> jars, List<URL> libraries) {
final List<URL> dependencies = new ArrayList<>();
try {
// find jar files
for (URL url : jars) {
JarUtils.checkJarFile(url);
dependencies.add(url);
}
// find jar files in library directories
for (URL libUrl : libraries) {
final File dir = new File(libUrl.toURI());
if (!dir.isDirectory()) {
throw new SqlClientException("Directory expected: " + dir);
} else if (!dir.canRead()) {
throw new SqlClientException("Directory cannot be read: " + dir);
}
final File[] files = dir.listFiles();
if (files == null) {
throw new SqlClientException("Directory cannot be read: " + dir);
}
for (File f : files) {
// only consider jars
if (f.isFile() && f.getAbsolutePath().toLowerCase().endsWith(".jar")) {
final URL url = f.toURI().toURL();
JarUtils.checkJarFile(url);
dependencies.add(url);
}
}
}
} catch (Exception e) {
throw new SqlClientException("Could not load all required JAR files.", e);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Using the following dependencies: {}", dependencies);
}
return dependencies;
}
private static Options collectCommandLineOptions(List<CustomCommandLine> commandLines) {
final Options customOptions = new Options();
for (CustomCommandLine customCommandLine : commandLines) {
customCommandLine.addGeneralOptions(customOptions);
customCommandLine.addRunOptions(customOptions);
}
return CliFrontendParser.mergeOptions(
CliFrontendParser.getRunCommandOptions(),
customOptions);
}
private static TableSchema removeTimeAttributes(TableSchema schema) {
final TableSchema.Builder builder = TableSchema.builder();
for (int i = 0; i < schema.getFieldCount(); i++) {
final DataType dataType = schema.getFieldDataTypes()[i];
final DataType convertedType = DataTypeUtils.replaceLogicalType(
dataType,
LogicalTypeUtils.removeTimeAttributes(dataType.getLogicalType()));
builder.field(schema.getFieldNames()[i], convertedType);
}
return builder.build();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.internal;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.Counter;
import org.elasticsearch.action.search.SearchTask;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.concurrent.AbstractRefCounted;
import org.elasticsearch.common.util.concurrent.RefCounted;
import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.search.collapse.CollapseContext;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.search.SearchExtBuilder;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.profile.Profilers;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.rescore.RescoreContext;
import org.elasticsearch.search.sort.SortAndFormats;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import java.util.ArrayList;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* This class encapsulates the state needed to execute a search. It holds a reference to the
* shards point in time snapshot (IndexReader / ContextIndexSearcher) and allows passing on
* state from one query / fetch phase to another.
*
* This class also implements {@link RefCounted} since in some situations like in {@link org.elasticsearch.search.SearchService}
* a SearchContext can be closed concurrently due to independent events ie. when an index gets removed. To prevent accessing closed
* IndexReader / IndexSearcher instances the SearchContext can be guarded by a reference count and fail if it's been closed by
* an external event.
*/
// For reference why we use RefCounted here see #20095
public abstract class SearchContext extends AbstractRefCounted implements Releasable {
public static final int DEFAULT_TERMINATE_AFTER = 0;
private Map<Lifetime, List<Releasable>> clearables = null;
private final AtomicBoolean closed = new AtomicBoolean(false);
private InnerHitsContext innerHitsContext;
protected SearchContext() {
super("search_context");
}
public abstract void setTask(SearchTask task);
public abstract SearchTask getTask();
public abstract boolean isCancelled();
@Override
public final void close() {
if (closed.compareAndSet(false, true)) { // prevent double closing
decRef();
}
}
@Override
protected final void closeInternal() {
try {
clearReleasables(Lifetime.CONTEXT);
} finally {
doClose();
}
}
@Override
protected void alreadyClosed() {
throw new IllegalStateException("search context is already closed can't increment refCount current count [" + refCount() + "]");
}
protected abstract void doClose();
/**
* Should be called before executing the main query and after all other parameters have been set.
* @param rewrite if the set query should be rewritten against the searcher returned from {@link #searcher()}
*/
public abstract void preProcess(boolean rewrite);
/** Automatically apply all required filters to the given query such as
* alias filters, types filters, etc. */
public abstract Query buildFilteredQuery(Query query);
public abstract long id();
public abstract String source();
public abstract ShardSearchRequest request();
public abstract SearchType searchType();
public abstract SearchShardTarget shardTarget();
public abstract int numberOfShards();
public abstract float queryBoost();
public abstract long getOriginNanoTime();
public abstract ScrollContext scrollContext();
public abstract SearchContext scrollContext(ScrollContext scroll);
public abstract SearchContextAggregations aggregations();
public abstract SearchContext aggregations(SearchContextAggregations aggregations);
public abstract void addSearchExt(SearchExtBuilder searchExtBuilder);
public abstract SearchExtBuilder getSearchExt(String name);
public abstract SearchContextHighlight highlight();
public abstract void highlight(SearchContextHighlight highlight);
public InnerHitsContext innerHits() {
if (innerHitsContext == null) {
innerHitsContext = new InnerHitsContext();
}
return innerHitsContext;
}
public abstract SuggestionSearchContext suggest();
public abstract void suggest(SuggestionSearchContext suggest);
/**
* @return list of all rescore contexts. empty if there aren't any.
*/
public abstract List<RescoreContext> rescore();
public abstract void addRescore(RescoreContext rescore);
public abstract boolean hasScriptFields();
public abstract ScriptFieldsContext scriptFields();
/**
* A shortcut function to see whether there is a fetchSourceContext and it says the source is requested.
*/
public abstract boolean sourceRequested();
public abstract boolean hasFetchSourceContext();
public abstract FetchSourceContext fetchSourceContext();
public abstract SearchContext fetchSourceContext(FetchSourceContext fetchSourceContext);
public abstract DocValueFieldsContext docValueFieldsContext();
public abstract SearchContext docValueFieldsContext(DocValueFieldsContext docValueFieldsContext);
public abstract ContextIndexSearcher searcher();
public abstract IndexShard indexShard();
public abstract MapperService mapperService();
public abstract SimilarityService similarityService();
public abstract BigArrays bigArrays();
public abstract BitsetFilterCache bitsetFilterCache();
public abstract <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType fieldType);
public abstract TimeValue timeout();
public abstract void timeout(TimeValue timeout);
public abstract int terminateAfter();
public abstract void terminateAfter(int terminateAfter);
/**
* Indicates if the current index should perform frequent low level search cancellation check.
*
* Enabling low-level checks will make long running searches to react to the cancellation request faster. However,
* since it will produce more cancellation checks it might slow the search performance down.
*/
public abstract boolean lowLevelCancellation();
public abstract SearchContext minimumScore(float minimumScore);
public abstract Float minimumScore();
public abstract SearchContext sort(SortAndFormats sort);
public abstract SortAndFormats sort();
public abstract SearchContext trackScores(boolean trackScores);
public abstract boolean trackScores();
public abstract SearchContext trackTotalHits(boolean trackTotalHits);
/**
* Indicates if the total hit count for the query should be tracked. Defaults to {@code true}
*/
public abstract boolean trackTotalHits();
public abstract SearchContext searchAfter(FieldDoc searchAfter);
public abstract FieldDoc searchAfter();
public abstract SearchContext collapse(CollapseContext collapse);
public abstract CollapseContext collapse();
public abstract SearchContext parsedPostFilter(ParsedQuery postFilter);
public abstract ParsedQuery parsedPostFilter();
public abstract Query aliasFilter();
public abstract SearchContext parsedQuery(ParsedQuery query);
public abstract ParsedQuery parsedQuery();
/**
* The query to execute, might be rewritten.
*/
public abstract Query query();
public abstract int from();
public abstract SearchContext from(int from);
public abstract int size();
public abstract SearchContext size(int size);
public abstract boolean hasStoredFields();
public abstract boolean hasStoredFieldsContext();
/**
* A shortcut function to see whether there is a storedFieldsContext and it says the fields are requested.
*/
public abstract boolean storedFieldsRequested();
public abstract StoredFieldsContext storedFieldsContext();
public abstract SearchContext storedFieldsContext(StoredFieldsContext storedFieldsContext);
public abstract boolean explain();
public abstract void explain(boolean explain);
@Nullable
public abstract List<String> groupStats();
public abstract void groupStats(List<String> groupStats);
public abstract boolean version();
public abstract void version(boolean version);
public abstract int[] docIdsToLoad();
public abstract int docIdsToLoadFrom();
public abstract int docIdsToLoadSize();
public abstract SearchContext docIdsToLoad(int[] docIdsToLoad, int docsIdsToLoadFrom, int docsIdsToLoadSize);
public abstract void accessed(long accessTime);
public abstract long lastAccessTime();
public abstract long keepAlive();
public abstract void keepAlive(long keepAlive);
public SearchLookup lookup() {
return getQueryShardContext().lookup();
}
public abstract DfsSearchResult dfsResult();
public abstract QuerySearchResult queryResult();
public abstract FetchPhase fetchPhase();
public abstract FetchSearchResult fetchResult();
/**
* Return a handle over the profilers for the current search request, or {@code null} if profiling is not enabled.
*/
public abstract Profilers getProfilers();
/**
* Schedule the release of a resource. The time when {@link Releasable#close()} will be called on this object
* is function of the provided {@link Lifetime}.
*/
public void addReleasable(Releasable releasable, Lifetime lifetime) {
if (clearables == null) {
clearables = new EnumMap<>(Lifetime.class);
}
List<Releasable> releasables = clearables.get(lifetime);
if (releasables == null) {
releasables = new ArrayList<>();
clearables.put(lifetime, releasables);
}
releasables.add(releasable);
}
public void clearReleasables(Lifetime lifetime) {
if (clearables != null) {
List<List<Releasable>>releasables = new ArrayList<>();
for (Lifetime lc : Lifetime.values()) {
if (lc.compareTo(lifetime) > 0) {
break;
}
List<Releasable> remove = clearables.remove(lc);
if (remove != null) {
releasables.add(remove);
}
}
Releasables.close(Iterables.flatten(releasables));
}
}
/**
* @return true if the request contains only suggest
*/
public final boolean hasOnlySuggest() {
return request().source() != null
&& request().source().isSuggestOnly();
}
/**
* Looks up the given field, but does not restrict to fields in the types set on this context.
*/
public abstract MappedFieldType smartNameFieldType(String name);
public abstract ObjectMapper getObjectMapper(String name);
public abstract Counter timeEstimateCounter();
/** Return a view of the additional query collectors that should be run for this context. */
public abstract Map<Class<?>, Collector> queryCollectors();
/**
* The life time of an object that is used during search execution.
*/
public enum Lifetime {
/**
* This life time is for objects that only live during collection time.
*/
COLLECTION,
/**
* This life time is for objects that need to live until the end of the current search phase.
*/
PHASE,
/**
* This life time is for objects that need to live until the search context they are attached to is destroyed.
*/
CONTEXT
}
public abstract QueryShardContext getQueryShardContext();
@Override
public String toString() {
StringBuilder result = new StringBuilder().append(shardTarget());
if (searchType() != SearchType.DEFAULT) {
result.append("searchType=[").append(searchType()).append("]");
}
if (scrollContext() != null) {
if (scrollContext().scroll != null) {
result.append("scroll=[").append(scrollContext().scroll.keepAlive()).append("]");
} else {
result.append("scroll=[null]");
}
}
result.append(" query=[").append(query()).append("]");
return result.toString();
}
}
| |
/*
* Copyright 2019 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.stunner.client.widgets.editor;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.workbench.common.stunner.core.client.session.ClientSession;
import org.kie.workbench.common.stunner.core.client.session.command.ManagedClientSessionCommands;
import org.kie.workbench.common.stunner.core.client.session.command.impl.ClearSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.CopySelectionSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.CutSelectionSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.DeleteSelectionSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.ExportToJpgSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.ExportToPdfSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.ExportToPngSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.ExportToRawFormatSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.ExportToSvgSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.PasteSelectionSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.RedoSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.SaveDiagramSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.SwitchGridSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.UndoSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.ValidateSessionCommand;
import org.kie.workbench.common.stunner.core.client.session.command.impl.VisitGraphSessionCommand;
import org.mockito.InOrder;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class EditorSessionCommandsTest {
@Mock
private ManagedClientSessionCommands commands;
@Mock
private ClientSession session;
private EditorSessionCommands editorSessionCommands;
@Before
@SuppressWarnings("unchecked")
public void setup() {
this.editorSessionCommands = new EditorSessionCommands(commands);
when(commands.register(any(Class.class))).thenReturn(commands);
}
@Test
public void testInit() {
editorSessionCommands.init();
final InOrder inOrder = inOrder(commands);
inOrder.verify(commands).register(VisitGraphSessionCommand.class);
inOrder.verify(commands).register(SwitchGridSessionCommand.class);
inOrder.verify(commands).register(ClearSessionCommand.class);
inOrder.verify(commands).register(DeleteSelectionSessionCommand.class);
inOrder.verify(commands).register(UndoSessionCommand.class);
inOrder.verify(commands).register(RedoSessionCommand.class);
inOrder.verify(commands).register(ValidateSessionCommand.class);
inOrder.verify(commands).register(ExportToPngSessionCommand.class);
inOrder.verify(commands).register(ExportToJpgSessionCommand.class);
inOrder.verify(commands).register(ExportToPdfSessionCommand.class);
inOrder.verify(commands).register(ExportToSvgSessionCommand.class);
inOrder.verify(commands).register(ExportToRawFormatSessionCommand.class);
inOrder.verify(commands).register(CopySelectionSessionCommand.class);
inOrder.verify(commands).register(PasteSelectionSessionCommand.class);
inOrder.verify(commands).register(CutSelectionSessionCommand.class);
inOrder.verify(commands).register(SaveDiagramSessionCommand.class);
}
@Test
public void testBind() {
editorSessionCommands.bind(session);
verify(commands).bind(session);
}
@Test
public void testClear() {
editorSessionCommands.clear();
verify(commands).clearCommands();
}
@Test
public void testDestory() {
editorSessionCommands.destroy();
verify(commands).destroy();
}
@Test
public void testGetCommands() {
assertEquals(commands, editorSessionCommands.getCommands());
}
@Test
public void testGetVisitGraphSessionCommand() {
editorSessionCommands.getVisitGraphSessionCommand();
verify(commands).get(eq(VisitGraphSessionCommand.class));
}
@Test
public void testGetSwitchGridSessionCommand() {
editorSessionCommands.getSwitchGridSessionCommand();
verify(commands).get(eq(SwitchGridSessionCommand.class));
}
@Test
public void testGetClearSessionCommand() {
editorSessionCommands.getClearSessionCommand();
verify(commands).get(eq(ClearSessionCommand.class));
}
@Test
public void testGetDeleteSelectionSessionCommand() {
editorSessionCommands.getDeleteSelectionSessionCommand();
verify(commands).get(eq(DeleteSelectionSessionCommand.class));
}
@Test
public void testGetUndoSessionCommand() {
editorSessionCommands.getUndoSessionCommand();
verify(commands).get(eq(UndoSessionCommand.class));
}
@Test
public void testGetRedoSessionCommand() {
editorSessionCommands.getRedoSessionCommand();
verify(commands).get(eq(RedoSessionCommand.class));
}
@Test
public void testGetValidateSessionCommand() {
editorSessionCommands.getValidateSessionCommand();
verify(commands).get(eq(ValidateSessionCommand.class));
}
@Test
public void testGetExportToPngSessionCommand() {
editorSessionCommands.getExportToPngSessionCommand();
verify(commands).get(eq(ExportToPngSessionCommand.class));
}
@Test
public void testGetExportToJpgSessionCommand() {
editorSessionCommands.getExportToJpgSessionCommand();
verify(commands).get(eq(ExportToJpgSessionCommand.class));
}
@Test
public void testGetExportToPdfSessionCommand() {
editorSessionCommands.getExportToPdfSessionCommand();
verify(commands).get(eq(ExportToPdfSessionCommand.class));
}
@Test
public void testGetExportToSvgSessionCommand() {
editorSessionCommands.getExportToSvgSessionCommand();
verify(commands).get(eq(ExportToSvgSessionCommand.class));
}
@Test
public void testGetExportToRawSessionCommand() {
editorSessionCommands.getExportToRawFormatSessionCommand();
verify(commands).get(eq(ExportToRawFormatSessionCommand.class));
}
@Test
public void testGetCopySelectionSessionCommand() {
editorSessionCommands.getCopySelectionSessionCommand();
verify(commands).get(eq(CopySelectionSessionCommand.class));
}
@Test
public void testGetPasteSelectionSessionCommand() {
editorSessionCommands.getPasteSelectionSessionCommand();
verify(commands).get(eq(PasteSelectionSessionCommand.class));
}
@Test
public void testGetCutSelectionSessionCommand() {
editorSessionCommands.getCutSelectionSessionCommand();
verify(commands).get(eq(CutSelectionSessionCommand.class));
}
@Test
public void testGetSaveDiagramSessionCommand() {
editorSessionCommands.getSaveDiagramSessionCommand();
verify(commands).get(eq(SaveDiagramSessionCommand.class));
}
}
| |
package org.cloudiator.messaging.services;
import com.google.inject.Inject;
import javax.annotation.Nullable;
import javax.inject.Named;
import org.cloudiator.messages.Process;
import org.cloudiator.messages.Process.CreateFaasProcessRequest;
import org.cloudiator.messages.Process.CreateHdfsClusterRequest;
import org.cloudiator.messages.Process.CreateHdfsProcessRequest;
import org.cloudiator.messages.Process.CreateLanceProcessRequest;
import org.cloudiator.messages.Process.CreateProcessRequest;
import org.cloudiator.messages.Process.CreateScheduleRequest;
import org.cloudiator.messages.Process.CreateSparkClusterRequest;
import org.cloudiator.messages.Process.CreateSparkProcessRequest;
import org.cloudiator.messages.Process.DeleteLanceProcessRequest;
import org.cloudiator.messages.Process.DeleteProcessRequest;
import org.cloudiator.messages.Process.DeleteScheduleRequest;
import org.cloudiator.messages.Process.FaasProcessCreatedResponse;
import org.cloudiator.messages.Process.FinishProcessRequest;
import org.cloudiator.messages.Process.FinishProcessResponse;
import org.cloudiator.messages.Process.HdfsClusterCreatedResponse;
import org.cloudiator.messages.Process.HdfsProcessCreatedResponse;
import org.cloudiator.messages.Process.LanceProcessCreatedResponse;
import org.cloudiator.messages.Process.LanceProcessDeletedResponse;
import org.cloudiator.messages.Process.LanceUpdateRequest;
import org.cloudiator.messages.Process.LanceUpdateResponse;
import org.cloudiator.messages.Process.ProcessCreatedResponse;
import org.cloudiator.messages.Process.ProcessDeletedResponse;
import org.cloudiator.messages.Process.ProcessEvent;
import org.cloudiator.messages.Process.ProcessQueryRequest;
import org.cloudiator.messages.Process.ProcessQueryResponse;
import org.cloudiator.messages.Process.ProcessStatusQuery;
import org.cloudiator.messages.Process.ProcessStatusResponse;
import org.cloudiator.messages.Process.ScaleRequest;
import org.cloudiator.messages.Process.ScaleResponse;
import org.cloudiator.messages.Process.ScheduleCreatedResponse;
import org.cloudiator.messages.Process.ScheduleDeleteResponse;
import org.cloudiator.messages.Process.ScheduleEvent;
import org.cloudiator.messages.Process.ScheduleGraphRequest;
import org.cloudiator.messages.Process.ScheduleGraphResponse;
import org.cloudiator.messages.Process.ScheduleQueryRequest;
import org.cloudiator.messages.Process.ScheduleQueryResponse;
import org.cloudiator.messages.Process.SparkClusterCreatedResponse;
import org.cloudiator.messages.Process.SparkProcessCreatedResponse;
import org.cloudiator.messaging.MessageCallback;
import org.cloudiator.messaging.MessageInterface;
import org.cloudiator.messaging.ResponseCallback;
import org.cloudiator.messaging.ResponseException;
public class ProcessServiceImpl implements ProcessService {
private final MessageInterface messageInterface;
@Inject(optional = true)
@Named("responseTimeout")
private long timeout = 20000;
@Inject
public ProcessServiceImpl(MessageInterface messageInterface) {
this.messageInterface = messageInterface;
}
@Override
public ScheduleQueryResponse querySchedules(ScheduleQueryRequest scheduleQueryRequest)
throws ResponseException {
return this.messageInterface
.call(scheduleQueryRequest, ScheduleQueryResponse.class, timeout);
}
@Override
public ProcessQueryResponse queryProcesses(ProcessQueryRequest processQueryRequest)
throws ResponseException {
return this.messageInterface.call(processQueryRequest, ProcessQueryResponse.class, timeout);
}
@Override
public void subscribeScheduleQueryRequest(MessageCallback<ScheduleQueryRequest> callback) {
this.messageInterface
.subscribe(ScheduleQueryRequest.class, ScheduleQueryRequest.parser(), callback);
}
@Override
public void subscribeScheduleDeleteRequest(MessageCallback<DeleteScheduleRequest> callback) {
this.messageInterface
.subscribe(DeleteScheduleRequest.class, DeleteScheduleRequest.parser(), callback);
}
@Override
public void subscribeProcessQueryRequest(MessageCallback<ProcessQueryRequest> callback) {
messageInterface.subscribe(ProcessQueryRequest.class, ProcessQueryRequest.parser(), callback);
}
@Override
public ScheduleCreatedResponse createSchedule(CreateScheduleRequest createScheduleRequest)
throws ResponseException {
return messageInterface.call(createScheduleRequest, ScheduleCreatedResponse.class, timeout);
}
@Override
public void createScheduleAsync(CreateScheduleRequest createScheduleRequest,
ResponseCallback<ScheduleCreatedResponse> callback) {
messageInterface.callAsync(createScheduleRequest, ScheduleCreatedResponse.class, callback);
}
@Override
public void deleteScheduleAsync(DeleteScheduleRequest deleteScheduleRequest,
ResponseCallback<ScheduleDeleteResponse> callback) {
messageInterface.callAsync(deleteScheduleRequest, ScheduleDeleteResponse.class, callback);
}
@Override
public ProcessCreatedResponse createProcess(CreateProcessRequest createProcessRequest)
throws ResponseException {
return messageInterface.call(createProcessRequest, ProcessCreatedResponse.class, timeout);
}
@Override
public ProcessDeletedResponse deleteProcess(DeleteProcessRequest deleteProcessRequest)
throws ResponseException {
return messageInterface.call(deleteProcessRequest, ProcessDeletedResponse.class, timeout);
}
@Override
public void createProcessAsync(CreateProcessRequest createProcessRequest,
ResponseCallback<ProcessCreatedResponse> callback) {
messageInterface.callAsync(createProcessRequest, ProcessCreatedResponse.class, callback);
}
@Override
public void deleteProcessAsync(DeleteProcessRequest deleteProcessRequest,
ResponseCallback<ProcessDeletedResponse> callback) {
messageInterface.callAsync(deleteProcessRequest, ProcessDeletedResponse.class, callback);
}
@Override
public void subscribeCreateProcessRequest(MessageCallback<CreateProcessRequest> callback) {
messageInterface.subscribe(CreateProcessRequest.class, CreateProcessRequest.parser(), callback);
}
@Override
public void subscribeDeleteProcessRequest(MessageCallback<DeleteProcessRequest> callback) {
messageInterface.subscribe(DeleteProcessRequest.class, DeleteProcessRequest.parser(), callback);
}
@Override
public LanceProcessCreatedResponse createLanceProcess(
CreateLanceProcessRequest createLanceProcessRequest) throws ResponseException {
return messageInterface
.call(createLanceProcessRequest, LanceProcessCreatedResponse.class, timeout);
}
@Override
public void createLanceProcessAsync(CreateLanceProcessRequest createLanceProcessRequest,
ResponseCallback<LanceProcessCreatedResponse> callback) {
messageInterface
.callAsync(createLanceProcessRequest, LanceProcessCreatedResponse.class, callback);
}
@Override
public void subscribeCreateLanceProcessRequest(
MessageCallback<CreateLanceProcessRequest> callback) {
messageInterface
.subscribe(CreateLanceProcessRequest.class, CreateLanceProcessRequest.parser(), callback);
}
@Override
public void deleteLanceProcessAsync(DeleteLanceProcessRequest deleteLanceProcessRequest,
ResponseCallback<LanceProcessDeletedResponse> callback) {
messageInterface
.callAsync(deleteLanceProcessRequest, LanceProcessDeletedResponse.class, callback);
}
@Override
public void subscribeDeleteLanceProcessRequest(
MessageCallback<DeleteLanceProcessRequest> callback) {
messageInterface
.subscribe(DeleteLanceProcessRequest.class, DeleteLanceProcessRequest.parser(), callback);
}
@Override
public SparkProcessCreatedResponse createSparkProcess(
CreateSparkProcessRequest createSparkProcessRequest) throws ResponseException {
return messageInterface
.call(createSparkProcessRequest, SparkProcessCreatedResponse.class, timeout);
}
@Override
public void createSparkProcessAsync(CreateSparkProcessRequest createSparkProcessRequest,
ResponseCallback<SparkProcessCreatedResponse> callback) {
messageInterface
.callAsync(createSparkProcessRequest, SparkProcessCreatedResponse.class, callback);
}
@Override
public void subscribeCreateSparkProcessRequest(
MessageCallback<CreateSparkProcessRequest> callback) {
messageInterface
.subscribe(CreateSparkProcessRequest.class, CreateSparkProcessRequest.parser(), callback);
}
@Override
public SparkClusterCreatedResponse createSparkCluster(
CreateSparkClusterRequest createSparkClusterRequest) throws ResponseException {
return messageInterface
.call(createSparkClusterRequest, SparkClusterCreatedResponse.class, timeout);
}
@Override
public void createSparkClusterAsync(
CreateSparkClusterRequest createSparkClusterRequest,
ResponseCallback<SparkClusterCreatedResponse> callback) {
messageInterface
.callAsync(createSparkClusterRequest, SparkClusterCreatedResponse.class, callback);
}
@Override
public void subscribeCreateSparkClusterRequest(
MessageCallback<CreateSparkClusterRequest> callback) {
messageInterface
.subscribe(CreateSparkClusterRequest.class, CreateSparkClusterRequest.parser(), callback);
}
//HDFS
@Override
public HdfsProcessCreatedResponse createHdfsProcess(
CreateHdfsProcessRequest createHdfsProcessRequest) throws ResponseException {
return messageInterface
.call(createHdfsProcessRequest, HdfsProcessCreatedResponse.class, timeout);
}
@Override
public void createHdfsProcessAsync(CreateHdfsProcessRequest createHdfsProcessRequest,
ResponseCallback<HdfsProcessCreatedResponse> callback) {
messageInterface
.callAsync(createHdfsProcessRequest, HdfsProcessCreatedResponse.class, callback);
}
@Override
public void subscribeCreateHdfsProcessRequest(
MessageCallback<CreateHdfsProcessRequest> callback) {
messageInterface
.subscribe(CreateHdfsProcessRequest.class, CreateHdfsProcessRequest.parser(), callback);
}
@Override
public HdfsClusterCreatedResponse createHdfsCluster(
CreateHdfsClusterRequest createHdfsClusterRequest) throws ResponseException {
return messageInterface
.call(createHdfsClusterRequest, HdfsClusterCreatedResponse.class, timeout);
}
@Override
public void createHdfsClusterAsync(
CreateHdfsClusterRequest createHdfsClusterRequest,
ResponseCallback<HdfsClusterCreatedResponse> callback) {
messageInterface
.callAsync(createHdfsClusterRequest, HdfsClusterCreatedResponse.class, callback);
}
@Override
public void subscribeCreateHdfsClusterRequest(
MessageCallback<CreateHdfsClusterRequest> callback) {
messageInterface
.subscribe(CreateHdfsClusterRequest.class, CreateHdfsClusterRequest.parser(), callback);
}
@Override
public FaasProcessCreatedResponse createFaasProcess(
Process.CreateFaasProcessRequest createFaasProcessRequest) throws ResponseException {
return messageInterface
.call(createFaasProcessRequest, FaasProcessCreatedResponse.class, timeout);
}
@Override
public void createFaasProcessAsync(Process.CreateFaasProcessRequest createFaasProcessRequest,
ResponseCallback<Process.FaasProcessCreatedResponse> callback) {
messageInterface
.callAsync(createFaasProcessRequest, FaasProcessCreatedResponse.class, callback);
}
@Override
public void subscribeCreateFaasProcessRequest(
MessageCallback<Process.CreateFaasProcessRequest> callback) {
messageInterface
.subscribe(CreateFaasProcessRequest.class, CreateFaasProcessRequest.parser(), callback);
}
@Override
public void subscribeSchedule(MessageCallback<CreateScheduleRequest> callback) {
messageInterface
.subscribe(CreateScheduleRequest.class, CreateScheduleRequest.parser(), callback);
}
@Override
public void announceProcessEvent(ProcessEvent processEvent) {
messageInterface.publish(processEvent);
}
@Override
public void subscribeProcessEvent(MessageCallback<ProcessEvent> callback) {
messageInterface.subscribe(ProcessEvent.class, ProcessEvent.parser(), callback);
}
@Override
public ScheduleGraphResponse graph(ScheduleGraphRequest scheduleGraphRequest)
throws ResponseException {
return messageInterface.call(scheduleGraphRequest, ScheduleGraphResponse.class, timeout);
}
@Override
public ProcessStatusResponse queryProcessStatus(ProcessStatusQuery processStatusQuery,
@Nullable Long timeout)
throws ResponseException {
if (timeout == null) {
timeout = this.timeout;
}
return messageInterface.call(processStatusQuery, ProcessStatusResponse.class, timeout);
}
@Override
public void announceScheduleEvent(ScheduleEvent scheduleEvent) {
messageInterface.publish(scheduleEvent);
}
@Override
public ScaleResponse createScale(ScaleRequest scaleRequest) throws ResponseException {
return messageInterface.call(scaleRequest, ScaleResponse.class, timeout);
}
@Override
public void createScaleRequestAsync(ScaleRequest scaleRequest,
ResponseCallback<ScaleResponse> callback) {
messageInterface.callAsync(scaleRequest, ScaleResponse.class, callback);
}
@Override
public void subscribeScaleRequest(MessageCallback<ScaleRequest> callback) {
messageInterface.subscribe(ScaleRequest.class, ScaleRequest.parser(), callback);
}
@Override
public void updateLanceEnvironmentAsync(LanceUpdateRequest lanceUpdateRequest,
ResponseCallback<LanceUpdateResponse> callback) {
messageInterface.callAsync(lanceUpdateRequest, LanceUpdateResponse.class, callback);
}
@Override
public FinishProcessResponse finishProcess(FinishProcessRequest finishProcessRequest)
throws ResponseException {
return messageInterface.call(finishProcessRequest, FinishProcessResponse.class, timeout);
}
}
| |
/*
* Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
/*
* The Apache Software License, Version 1.1
*
*
* Copyright (c) 1999-2002 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution,
* if any, must include the following acknowledgment:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowledgment may appear in the software itself,
* if and wherever such third-party acknowledgments normally appear.
*
* 4. The names "Xerces" and "Apache Software Foundation" must
* not be used to endorse or promote products derived from this
* software without prior written permission. For written
* permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache",
* nor may "Apache" appear in their name, without prior written
* permission of the Apache Software Foundation.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation and was
* originally based on software copyright (c) 1999, International
* Business Machines, Inc., http://www.apache.org. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*/
package com.sun.org.apache.xerces.internal.impl.dtd.models;
/**
* This class is a very simple bitset class. The DFA content model code needs
* to support a bit set, but the java BitSet class is way, way overkill. Our
* bitset never needs to be expanded after creation, hash itself, etc...
*
* Since the vast majority of content models will never require more than 64
* bits, and since allocation of anything in Java is expensive, this class
* provides a hybrid implementation that uses two ints for instances that use
* 64 bits or fewer. It has a byte array reference member which will only be
* used if more than 64 bits are required.
*
* Note that the code that uses this class will never perform operations
* on sets of different sizes, so that check does not have to be made here.
*
* @xerces.internal
*
*/
// made this class public so it can be accessed by
// the XS content models from the schema package -neilg.
public class CMStateSet
{
// -------------------------------------------------------------------
// Constructors
// -------------------------------------------------------------------
public CMStateSet(int bitCount)
{
// Store the required bit count and insure its legal
fBitCount = bitCount;
if (fBitCount < 0)
throw new RuntimeException("ImplementationMessages.VAL_CMSI");
//
// See if we need to allocate the byte array or whether we can live
// within the 64 bit high performance scheme.
//
if (fBitCount > 64)
{
fByteCount = fBitCount / 8;
if (fBitCount % 8 != 0)
fByteCount++;
fByteArray = new byte[fByteCount];
}
// Init all the bits to zero
zeroBits();
}
// -------------------------------------------------------------------
// Public inherited methods
// -------------------------------------------------------------------
public String toString()
{
StringBuffer strRet = new StringBuffer();
try
{
strRet.append("{");
for (int index = 0; index < fBitCount; index++)
{
if (getBit(index))
strRet.append(" " + index);
}
strRet.append(" }");
}
catch(RuntimeException exToCatch)
{
//
// We know this won't happen but we have to catch it to avoid it
// having to be in our 'throws' list.
//
}
return strRet.toString();
}
// -------------------------------------------------------------------
// Package final methods
// -------------------------------------------------------------------
// the XS content models from the schema package -neilg.
public final void intersection(CMStateSet setToAnd)
{
if (fBitCount < 65)
{
fBits1 &= setToAnd.fBits1;
fBits2 &= setToAnd.fBits2;
}
else
{
for (int index = fByteCount - 1; index >= 0; index--)
fByteArray[index] &= setToAnd.fByteArray[index];
}
}
public final boolean getBit(int bitToGet)
{
if (bitToGet >= fBitCount)
throw new RuntimeException("ImplementationMessages.VAL_CMSI");
if (fBitCount < 65)
{
final int mask = (0x1 << (bitToGet % 32));
if (bitToGet < 32)
return (fBits1 & mask) != 0;
else
return (fBits2 & mask) != 0;
}
else
{
// Create the mask and byte values
final byte mask = (byte)(0x1 << (bitToGet % 8));
final int ofs = bitToGet >> 3;
// And access the right bit and byte
return ((fByteArray[ofs] & mask) != 0);
}
}
public final boolean isEmpty()
{
if (fBitCount < 65)
{
return ((fBits1 == 0) && (fBits2 == 0));
}
else
{
for (int index = fByteCount - 1; index >= 0; index--)
{
if (fByteArray[index] != 0)
return false;
}
}
return true;
}
final boolean isSameSet(CMStateSet setToCompare)
{
if (fBitCount != setToCompare.fBitCount)
return false;
if (fBitCount < 65)
{
return ((fBits1 == setToCompare.fBits1)
&& (fBits2 == setToCompare.fBits2));
}
for (int index = fByteCount - 1; index >= 0; index--)
{
if (fByteArray[index] != setToCompare.fByteArray[index])
return false;
}
return true;
}
// the XS content models from the schema package -neilg.
public final void union(CMStateSet setToOr)
{
if (fBitCount < 65)
{
fBits1 |= setToOr.fBits1;
fBits2 |= setToOr.fBits2;
}
else
{
for (int index = fByteCount - 1; index >= 0; index--)
fByteArray[index] |= setToOr.fByteArray[index];
}
}
public final void setBit(int bitToSet)
{
if (bitToSet >= fBitCount)
throw new RuntimeException("ImplementationMessages.VAL_CMSI");
if (fBitCount < 65)
{
final int mask = (0x1 << (bitToSet % 32));
if (bitToSet < 32)
{
fBits1 &= ~mask;
fBits1 |= mask;
}
else
{
fBits2 &= ~mask;
fBits2 |= mask;
}
}
else
{
// Create the mask and byte values
final byte mask = (byte)(0x1 << (bitToSet % 8));
final int ofs = bitToSet >> 3;
// And access the right bit and byte
fByteArray[ofs] &= ~mask;
fByteArray[ofs] |= mask;
}
}
// the XS content models from the schema package -neilg.
public final void setTo(CMStateSet srcSet)
{
// They have to be the same size
if (fBitCount != srcSet.fBitCount)
throw new RuntimeException("ImplementationMessages.VAL_CMSI");
if (fBitCount < 65)
{
fBits1 = srcSet.fBits1;
fBits2 = srcSet.fBits2;
}
else
{
for (int index = fByteCount - 1; index >= 0; index--)
fByteArray[index] = srcSet.fByteArray[index];
}
}
// had to make this method public so it could be accessed from
// schema package - neilg.
public final void zeroBits()
{
if (fBitCount < 65)
{
fBits1 = 0;
fBits2 = 0;
}
else
{
for (int index = fByteCount - 1; index >= 0; index--)
fByteArray[index] = 0;
}
}
// -------------------------------------------------------------------
// Private data members
//
// fBitCount
// The count of bits that the outside world wants to support,
// so its the max bit index plus one.
//
// fByteCount
// If the bit count is > 64, then we use the fByteArray member to
// store the bits, and this indicates its size in bytes. Otherwise
// its value is meaningless.
//
// fBits1
// fBits2
// When the bit count is < 64 (very common), these hold the bits.
// Otherwise, the fByteArray member holds htem.
// -------------------------------------------------------------------
int fBitCount;
int fByteCount;
int fBits1;
int fBits2;
byte[] fByteArray;
/* Optimization(Jan, 2001) */
public boolean equals(Object o) {
if (!(o instanceof CMStateSet)) return false;
return isSameSet((CMStateSet)o);
}
public int hashCode() {
if (fBitCount < 65)
{
return fBits1+ fBits2 * 31;
}
else
{
int hash = 0;
for (int index = fByteCount - 1; index >= 0; index--)
hash = fByteArray[index] + hash * 31;
return hash;
}
}
/* Optimization(Jan, 2001) */
};
| |
//
// Copyright 2018 SenX S.A.S.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package io.warp10.crypto;
import io.warp10.crypto.SipHashInline;
import java.nio.charset.StandardCharsets;
import java.security.SecureRandom;
import java.util.Arrays;
import java.util.Random;
import org.junit.Assert;
import org.junit.Test;
/**
* Test class
*
* @see https://github.com/nahi/siphash-java-inline/blob/master/src/test/java/SipHashInlineTest.java
*
*/
public class SipHashInlineTest {
private long[] EXPECTED = new long[] { 0x726fdb47dd0e0e31L,
0x74f839c593dc67fdL, 0x0d6c8009d9a94f5aL, 0x85676696d7fb7e2dL,
0xcf2794e0277187b7L, 0x18765564cd99a68dL, 0xcbc9466e58fee3ceL,
0xab0200f58b01d137L, 0x93f5f5799a932462L, 0x9e0082df0ba9e4b0L,
0x7a5dbbc594ddb9f3L, 0xf4b32f46226bada7L, 0x751e8fbc860ee5fbL,
0x14ea5627c0843d90L, 0xf723ca908e7af2eeL, 0xa129ca6149be45e5L,
0x3f2acc7f57c29bdbL, 0x699ae9f52cbe4794L, 0x4bc1b3f0968dd39cL,
0xbb6dc91da77961bdL, 0xbed65cf21aa2ee98L, 0xd0f2cbb02e3b67c7L,
0x93536795e3a33e88L, 0xa80c038ccd5ccec8L, 0xb8ad50c6f649af94L,
0xbce192de8a85b8eaL, 0x17d835b85bbb15f3L, 0x2f2e6163076bcfadL,
0xde4daaaca71dc9a5L, 0xa6a2506687956571L, 0xad87a3535c49ef28L,
0x32d892fad841c342L, 0x7127512f72f27cceL, 0xa7f32346f95978e3L,
0x12e0b01abb051238L, 0x15e034d40fa197aeL, 0x314dffbe0815a3b4L,
0x027990f029623981L, 0xcadcd4e59ef40c4dL, 0x9abfd8766a33735cL,
0x0e3ea96b5304a7d0L, 0xad0c42d6fc585992L, 0x187306c89bc215a9L,
0xd4a60abcf3792b95L, 0xf935451de4f21df2L, 0xa9538f0419755787L,
0xdb9acddff56ca510L, 0xd06c98cd5c0975ebL, 0xe612a3cb9ecba951L,
0xc766e62cfcadaf96L, 0xee64435a9752fe72L, 0xa192d576b245165aL,
0x0a8787bf8ecb74b2L, 0x81b3e73d20b49b6fL, 0x7fa8220ba3b2eceaL,
0x245731c13ca42499L, 0xb78dbfaf3a8d83bdL, 0xea1ad565322a1a0bL,
0x60e61c23a3795013L, 0x6606d7e446282b93L, 0x6ca4ecb15c5f91e1L,
0x9f626da15c9625f3L, 0xe51b38608ef25f57L, 0x958a324ceb064572L };
// Ported from test vectors in siphash24.c at
// https://www.131002.net/siphash/siphash24.c
@Test
public void testVectors() {
long k0 = 0x0706050403020100L;
long k1 = 0x0f0e0d0c0b0a0908L;
for (int i = 0; i < EXPECTED.length; ++i) {
byte[] msg = new byte[i];
for (int j = 0; j < i; ++j) {
msg[j] = (byte) j;
}
Assert.assertEquals(EXPECTED[i], SipHashInline.hash24(k0, k1, msg, 0, msg.length));
}
}
@Test
public void testReversedHashes() {
long k0 = 0x0706050403020100L;
long k1 = 0x0f0e0d0c0b0a0908L;
byte[] msg = "Too many secrets, Marty!".getBytes(StandardCharsets.UTF_8);
byte[] rmsg = "!ytraM ,sterces ynam ooT".getBytes(StandardCharsets.UTF_8);
long hash24 = SipHashInline.hash24(k0,k1,msg,0,msg.length,false);
double ms = 0D;
byte[] key = new byte[16];
for (int i = 0; i < 1000000; i++) {
long nano = System.nanoTime();
//GTSHelper.classId(key, "Too many secrets, Marty!");
//SipHashInline.hash24(k0,k1,msg,0,msg.length,true);
//SipHashInline.hash24(k0,k1,msg,0,msg.length,true);
//SipHashInline.hash24(k0,k1,msg,0,msg.length,true);
Assert.assertEquals(SipHashInline.hash24(k0,k1,msg,0,msg.length,true),SipHashInline.hash24(k0,k1,rmsg,0,rmsg.length,false));
Assert.assertEquals(SipHashInline.hash24(k0,k1,msg,0,msg.length,false),SipHashInline.hash24(k0,k1,rmsg,0,rmsg.length,true));
ms += (System.nanoTime() - nano);
}
System.out.println(ms/1000000.0D);
}
/**
* This is a test to make sure we use a corrected version of SipHashInline which is not
* subject to negative values errors...
*/
@Test
public void testCollission() {
byte[] b1 = new byte[] { 109, -45, -99, -85, -72, 37, -51, 120, -56, -10, -17, -53, -83, 84, -127, 67 };
byte[] b2 = new byte[] { 109, -45, -99, -85, -72, 37, -51, 120, -56, 80, 111, 67, -59, 92, 100, 2 };
long h1 = SipHashInline.hash24(new byte[16], b1);
long h2 = SipHashInline.hash24(new byte[16], b2);
Assert.assertTrue(h1 != h2);
}
@Test
public void testHash24_palindromic() {
// Test for content length varying from 1 to 16 bytes
Random r = new Random();
int from = 1;
int n = 100;
int offset = 10;
byte[] data = new byte[n + offset];
r.nextBytes(data);
byte[] dblbuf = new byte[2 * n];
// Allocate random key
long k0 = r.nextLong();
long k1 = r.nextLong();
for (int i = from; i <= n; i++) {
r.nextBytes(data);
long palindromic = SipHashInline.hash24_palindromic(k0, k1, data, offset, i);
// Create concatenated buffer
for (int j = 0; j < i; j++) {
dblbuf[j] = data[offset + j];
dblbuf[i + j] = data[offset + (i - 1) - j];
}
long hash = SipHashInline.hash24(k0, k1, dblbuf, 0, i * 2);
Assert.assertEquals(palindromic, hash);
}
}
@Test
public void testHash24_palindromic_perf() {
Random r = new Random();
byte[] data = new byte[100];
r.nextBytes(data);
// Allocate random key
long k0 = r.nextLong();
long k1 = r.nextLong();
long nano = System.nanoTime();
for (int i = 0; i < 10000000; i++) {
long hash = SipHashInline.hash24_palindromic(k0, k1, data, 0, data.length);
}
System.out.println((System.nanoTime() - nano) / 1000000.0D);
}
@Test
public void testHash24_dbl_perf() {
Random r = new Random();
byte[] data = new byte[1000000];
r.nextBytes(data);
// Allocate random key
long k0 = r.nextLong();
long k1 = r.nextLong();
long nano = System.nanoTime();
for (int i = 0; i < 1000; i++) {
byte[] dbl = new byte[data.length * 2];
for (int j = 0; j < data.length; j++) {
dbl[j] = data[j];
dbl[data.length + j] = data[(data.length - 1) - j];
}
long hash = SipHashInline.hash24(k0, k1, dbl, 0, dbl.length);
}
System.out.println((System.nanoTime() - nano) / 1000000.0D);
}
@Test
public void testGetKey_perf() {
byte[] key = new byte[16];
int n = 2000000000;
long nano = System.nanoTime();
for (int i = 0; i < n; i++) {
long[] sipkey = SipHashInline.getKey(key);
}
System.out.println((System.nanoTime() - nano) / 1000000.0D);
}
@Test
public void testPerf() {
byte[] buf = new byte[256];
SecureRandom sr = new SecureRandom();
sr.nextBytes(buf);
long nano = System.nanoTime();
for (int i = 0; i < 1000; i++) {
long hash = SipHashInline.hash24(0L, 1L, buf, 0, 256);
}
System.out.println((System.nanoTime() - nano) / 1000000.0D);
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.rds.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
* Container for the parameters to the {@link com.amazonaws.services.rds.AmazonRDS#modifyDBParameterGroup(ModifyDBParameterGroupRequest) ModifyDBParameterGroup operation}.
* <p>
* Modifies the parameters of a DB parameter group. To modify more than
* one parameter, submit a list of the following:
* <code>ParameterName</code> ,
* <code>ParameterValue</code> , and
* <code>ApplyMethod</code> . A maximum of 20 parameters can be modified
* in a single request.
* </p>
* <p>
* <b>NOTE:</b> Changes to dynamic parameters are applied immediately.
* Changes to static parameters require a reboot without failover to the
* DB instance associated with the parameter group before the change can
* take effect.
* </p>
* <p>
* <b>IMPORTANT:</b> After you modify a DB parameter group, you should
* wait at least 5 minutes before creating your first DB instance that
* uses that DB parameter group as the default parameter group. This
* allows Amazon RDS to fully complete the modify action before the
* parameter group is used as the default for a new DB instance. This is
* especially important for parameters that are critical when creating
* the default database for a DB instance, such as the character set for
* the default database defined by the character_set_database parameter.
* You can use the Parameter Groups option of the Amazon RDS console or
* the DescribeDBParameters command to verify that your DB parameter
* group has been created or modified.
* </p>
*
* @see com.amazonaws.services.rds.AmazonRDS#modifyDBParameterGroup(ModifyDBParameterGroupRequest)
*/
public class ModifyDBParameterGroupRequest extends AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* The name of the DB parameter group. <p>Constraints: <ul> <li>Must be
* the name of an existing DB parameter group</li> <li>Must be 1 to 255
* alphanumeric characters</li> <li>First character must be a letter</li>
* <li>Cannot end with a hyphen or contain two consecutive hyphens</li>
* </ul>
*/
private String dBParameterGroupName;
/**
* An array of parameter names, values, and the apply method for the
* parameter update. At least one parameter name, value, and apply method
* must be supplied; subsequent arguments are optional. A maximum of 20
* parameters may be modified in a single request. <p>Valid Values (for
* the application method): <code>immediate | pending-reboot</code>
* <note>You can use the immediate value with dynamic parameters only.
* You can use the pending-reboot value for both dynamic and static
* parameters, and changes are applied when you reboot the DB instance
* without failover. </note>
*/
private com.amazonaws.internal.ListWithAutoConstructFlag<Parameter> parameters;
/**
* Default constructor for a new ModifyDBParameterGroupRequest object. Callers should use the
* setter or fluent setter (with...) methods to initialize this object after creating it.
*/
public ModifyDBParameterGroupRequest() {}
/**
* Constructs a new ModifyDBParameterGroupRequest object.
* Callers should use the setter or fluent setter (with...) methods to
* initialize any additional object members.
*
* @param dBParameterGroupName The name of the DB parameter group.
* <p>Constraints: <ul> <li>Must be the name of an existing DB parameter
* group</li> <li>Must be 1 to 255 alphanumeric characters</li> <li>First
* character must be a letter</li> <li>Cannot end with a hyphen or
* contain two consecutive hyphens</li> </ul>
* @param parameters An array of parameter names, values, and the apply
* method for the parameter update. At least one parameter name, value,
* and apply method must be supplied; subsequent arguments are optional.
* A maximum of 20 parameters may be modified in a single request.
* <p>Valid Values (for the application method): <code>immediate |
* pending-reboot</code> <note>You can use the immediate value with
* dynamic parameters only. You can use the pending-reboot value for both
* dynamic and static parameters, and changes are applied when you reboot
* the DB instance without failover. </note>
*/
public ModifyDBParameterGroupRequest(String dBParameterGroupName, java.util.List<Parameter> parameters) {
setDBParameterGroupName(dBParameterGroupName);
setParameters(parameters);
}
/**
* The name of the DB parameter group. <p>Constraints: <ul> <li>Must be
* the name of an existing DB parameter group</li> <li>Must be 1 to 255
* alphanumeric characters</li> <li>First character must be a letter</li>
* <li>Cannot end with a hyphen or contain two consecutive hyphens</li>
* </ul>
*
* @return The name of the DB parameter group. <p>Constraints: <ul> <li>Must be
* the name of an existing DB parameter group</li> <li>Must be 1 to 255
* alphanumeric characters</li> <li>First character must be a letter</li>
* <li>Cannot end with a hyphen or contain two consecutive hyphens</li>
* </ul>
*/
public String getDBParameterGroupName() {
return dBParameterGroupName;
}
/**
* The name of the DB parameter group. <p>Constraints: <ul> <li>Must be
* the name of an existing DB parameter group</li> <li>Must be 1 to 255
* alphanumeric characters</li> <li>First character must be a letter</li>
* <li>Cannot end with a hyphen or contain two consecutive hyphens</li>
* </ul>
*
* @param dBParameterGroupName The name of the DB parameter group. <p>Constraints: <ul> <li>Must be
* the name of an existing DB parameter group</li> <li>Must be 1 to 255
* alphanumeric characters</li> <li>First character must be a letter</li>
* <li>Cannot end with a hyphen or contain two consecutive hyphens</li>
* </ul>
*/
public void setDBParameterGroupName(String dBParameterGroupName) {
this.dBParameterGroupName = dBParameterGroupName;
}
/**
* The name of the DB parameter group. <p>Constraints: <ul> <li>Must be
* the name of an existing DB parameter group</li> <li>Must be 1 to 255
* alphanumeric characters</li> <li>First character must be a letter</li>
* <li>Cannot end with a hyphen or contain two consecutive hyphens</li>
* </ul>
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param dBParameterGroupName The name of the DB parameter group. <p>Constraints: <ul> <li>Must be
* the name of an existing DB parameter group</li> <li>Must be 1 to 255
* alphanumeric characters</li> <li>First character must be a letter</li>
* <li>Cannot end with a hyphen or contain two consecutive hyphens</li>
* </ul>
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public ModifyDBParameterGroupRequest withDBParameterGroupName(String dBParameterGroupName) {
this.dBParameterGroupName = dBParameterGroupName;
return this;
}
/**
* An array of parameter names, values, and the apply method for the
* parameter update. At least one parameter name, value, and apply method
* must be supplied; subsequent arguments are optional. A maximum of 20
* parameters may be modified in a single request. <p>Valid Values (for
* the application method): <code>immediate | pending-reboot</code>
* <note>You can use the immediate value with dynamic parameters only.
* You can use the pending-reboot value for both dynamic and static
* parameters, and changes are applied when you reboot the DB instance
* without failover. </note>
*
* @return An array of parameter names, values, and the apply method for the
* parameter update. At least one parameter name, value, and apply method
* must be supplied; subsequent arguments are optional. A maximum of 20
* parameters may be modified in a single request. <p>Valid Values (for
* the application method): <code>immediate | pending-reboot</code>
* <note>You can use the immediate value with dynamic parameters only.
* You can use the pending-reboot value for both dynamic and static
* parameters, and changes are applied when you reboot the DB instance
* without failover. </note>
*/
public java.util.List<Parameter> getParameters() {
if (parameters == null) {
parameters = new com.amazonaws.internal.ListWithAutoConstructFlag<Parameter>();
parameters.setAutoConstruct(true);
}
return parameters;
}
/**
* An array of parameter names, values, and the apply method for the
* parameter update. At least one parameter name, value, and apply method
* must be supplied; subsequent arguments are optional. A maximum of 20
* parameters may be modified in a single request. <p>Valid Values (for
* the application method): <code>immediate | pending-reboot</code>
* <note>You can use the immediate value with dynamic parameters only.
* You can use the pending-reboot value for both dynamic and static
* parameters, and changes are applied when you reboot the DB instance
* without failover. </note>
*
* @param parameters An array of parameter names, values, and the apply method for the
* parameter update. At least one parameter name, value, and apply method
* must be supplied; subsequent arguments are optional. A maximum of 20
* parameters may be modified in a single request. <p>Valid Values (for
* the application method): <code>immediate | pending-reboot</code>
* <note>You can use the immediate value with dynamic parameters only.
* You can use the pending-reboot value for both dynamic and static
* parameters, and changes are applied when you reboot the DB instance
* without failover. </note>
*/
public void setParameters(java.util.Collection<Parameter> parameters) {
if (parameters == null) {
this.parameters = null;
return;
}
com.amazonaws.internal.ListWithAutoConstructFlag<Parameter> parametersCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<Parameter>(parameters.size());
parametersCopy.addAll(parameters);
this.parameters = parametersCopy;
}
/**
* An array of parameter names, values, and the apply method for the
* parameter update. At least one parameter name, value, and apply method
* must be supplied; subsequent arguments are optional. A maximum of 20
* parameters may be modified in a single request. <p>Valid Values (for
* the application method): <code>immediate | pending-reboot</code>
* <note>You can use the immediate value with dynamic parameters only.
* You can use the pending-reboot value for both dynamic and static
* parameters, and changes are applied when you reboot the DB instance
* without failover. </note>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setParameters(java.util.Collection)} or {@link
* #withParameters(java.util.Collection)} if you want to override the
* existing values.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param parameters An array of parameter names, values, and the apply method for the
* parameter update. At least one parameter name, value, and apply method
* must be supplied; subsequent arguments are optional. A maximum of 20
* parameters may be modified in a single request. <p>Valid Values (for
* the application method): <code>immediate | pending-reboot</code>
* <note>You can use the immediate value with dynamic parameters only.
* You can use the pending-reboot value for both dynamic and static
* parameters, and changes are applied when you reboot the DB instance
* without failover. </note>
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public ModifyDBParameterGroupRequest withParameters(Parameter... parameters) {
if (getParameters() == null) setParameters(new java.util.ArrayList<Parameter>(parameters.length));
for (Parameter value : parameters) {
getParameters().add(value);
}
return this;
}
/**
* An array of parameter names, values, and the apply method for the
* parameter update. At least one parameter name, value, and apply method
* must be supplied; subsequent arguments are optional. A maximum of 20
* parameters may be modified in a single request. <p>Valid Values (for
* the application method): <code>immediate | pending-reboot</code>
* <note>You can use the immediate value with dynamic parameters only.
* You can use the pending-reboot value for both dynamic and static
* parameters, and changes are applied when you reboot the DB instance
* without failover. </note>
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param parameters An array of parameter names, values, and the apply method for the
* parameter update. At least one parameter name, value, and apply method
* must be supplied; subsequent arguments are optional. A maximum of 20
* parameters may be modified in a single request. <p>Valid Values (for
* the application method): <code>immediate | pending-reboot</code>
* <note>You can use the immediate value with dynamic parameters only.
* You can use the pending-reboot value for both dynamic and static
* parameters, and changes are applied when you reboot the DB instance
* without failover. </note>
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public ModifyDBParameterGroupRequest withParameters(java.util.Collection<Parameter> parameters) {
if (parameters == null) {
this.parameters = null;
} else {
com.amazonaws.internal.ListWithAutoConstructFlag<Parameter> parametersCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<Parameter>(parameters.size());
parametersCopy.addAll(parameters);
this.parameters = parametersCopy;
}
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDBParameterGroupName() != null) sb.append("DBParameterGroupName: " + getDBParameterGroupName() + ",");
if (getParameters() != null) sb.append("Parameters: " + getParameters() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDBParameterGroupName() == null) ? 0 : getDBParameterGroupName().hashCode());
hashCode = prime * hashCode + ((getParameters() == null) ? 0 : getParameters().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof ModifyDBParameterGroupRequest == false) return false;
ModifyDBParameterGroupRequest other = (ModifyDBParameterGroupRequest)obj;
if (other.getDBParameterGroupName() == null ^ this.getDBParameterGroupName() == null) return false;
if (other.getDBParameterGroupName() != null && other.getDBParameterGroupName().equals(this.getDBParameterGroupName()) == false) return false;
if (other.getParameters() == null ^ this.getParameters() == null) return false;
if (other.getParameters() != null && other.getParameters().equals(this.getParameters()) == false) return false;
return true;
}
@Override
public ModifyDBParameterGroupRequest clone() {
return (ModifyDBParameterGroupRequest) super.clone();
}
}
| |
/*
Copyright 2006 Jerry Huxtable
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.jhlabs.image;
import java.awt.*;
import java.awt.image.*;
/**
* An abstract superclass for filters which distort images in some way. The subclass only needs to override
* two methods to provide the mapping between source and destination pixels.
*/
public abstract class TransformFilter extends AbstractBufferedImageOp {
/**
* Treat pixels off the edge as zero.
*/
public final static int ZERO = 0;
/**
* Clamp pixels to the image edges.
*/
public final static int CLAMP = 1;
/**
* Wrap pixels off the edge onto the oppsoite edge.
*/
public final static int WRAP = 2;
/**
* Clamp pixels RGB to the image edges, but zero the alpha. This prevents gray borders on your image.
*/
public final static int RGB_CLAMP = 3;
/**
* Use nearest-neighbout interpolation.
*/
public final static int NEAREST_NEIGHBOUR = 0;
/**
* Use bilinear interpolation.
*/
public final static int BILINEAR = 1;
/**
* The action to take for pixels off the image edge.
*/
protected int edgeAction = RGB_CLAMP;
/**
* The type of interpolation to use.
*/
protected int interpolation = BILINEAR;
/**
* The output image rectangle.
*/
protected Rectangle transformedSpace;
/**
* The input image rectangle.
*/
protected Rectangle originalSpace;
/**
* Set the action to perform for pixels off the edge of the image.
* @param edgeAction one of ZERO, CLAMP or WRAP
* @see #getEdgeAction
*/
public void setEdgeAction(int edgeAction) {
this.edgeAction = edgeAction;
}
/**
* Get the action to perform for pixels off the edge of the image.
* @return one of ZERO, CLAMP or WRAP
* @see #setEdgeAction
*/
public int getEdgeAction() {
return edgeAction;
}
/**
* Set the type of interpolation to perform.
* @param interpolation one of NEAREST_NEIGHBOUR or BILINEAR
* @see #getInterpolation
*/
public void setInterpolation(int interpolation) {
this.interpolation = interpolation;
}
/**
* Get the type of interpolation to perform.
* @return one of NEAREST_NEIGHBOUR or BILINEAR
* @see #setInterpolation
*/
public int getInterpolation() {
return interpolation;
}
/**
* Inverse transform a point. This method needs to be overriden by all subclasses.
* @param x the X position of the pixel in the output image
* @param y the Y position of the pixel in the output image
* @param out the position of the pixel in the input image
*/
protected abstract void transformInverse(int x, int y, float[] out);
/**
* Forward transform a rectangle. Used to determine the size of the output image.
* @param rect the rectangle to transform
*/
protected void transformSpace(Rectangle rect) {
}
public BufferedImage filter( BufferedImage src, BufferedImage dst ) {
int width = src.getWidth();
int height = src.getHeight();
// int type = src.getType();
// WritableRaster srcRaster = src.getRaster();
originalSpace = new Rectangle(0, 0, width, height);
transformedSpace = new Rectangle(0, 0, width, height);
transformSpace(transformedSpace);
if ( dst == null ) {
ColorModel dstCM = src.getColorModel();
dst = new BufferedImage(dstCM, dstCM.createCompatibleWritableRaster(transformedSpace.width, transformedSpace.height), dstCM.isAlphaPremultiplied(), null);
}
// WritableRaster dstRaster = dst.getRaster();
int[] inPixels = getRGB( src, 0, 0, width, height, null );
if ( interpolation == NEAREST_NEIGHBOUR )
return filterPixelsNN( dst, width, height, inPixels, transformedSpace );
int srcWidth = width;
int srcHeight = height;
int srcWidth1 = width-1;
int srcHeight1 = height-1;
int outWidth = transformedSpace.width;
int outHeight = transformedSpace.height;
int outX, outY;
// int index = 0;
int[] outPixels = new int[outWidth];
outX = transformedSpace.x;
outY = transformedSpace.y;
float[] out = new float[2];
for (int y = 0; y < outHeight; y++) {
for (int x = 0; x < outWidth; x++) {
transformInverse(outX+x, outY+y, out);
int srcX = (int)Math.floor( out[0] );
int srcY = (int)Math.floor( out[1] );
float xWeight = out[0]-srcX;
float yWeight = out[1]-srcY;
int nw, ne, sw, se;
if ( srcX >= 0 && srcX < srcWidth1 && srcY >= 0 && srcY < srcHeight1) {
// Easy case, all corners are in the image
int i = srcWidth*srcY + srcX;
nw = inPixels[i];
ne = inPixels[i+1];
sw = inPixels[i+srcWidth];
se = inPixels[i+srcWidth+1];
} else {
// Some of the corners are off the image
nw = getPixel( inPixels, srcX, srcY, srcWidth, srcHeight );
ne = getPixel( inPixels, srcX+1, srcY, srcWidth, srcHeight );
sw = getPixel( inPixels, srcX, srcY+1, srcWidth, srcHeight );
se = getPixel( inPixels, srcX+1, srcY+1, srcWidth, srcHeight );
}
outPixels[x] = ImageMath.bilinearInterpolate(xWeight, yWeight, nw, ne, sw, se);
}
setRGB( dst, 0, y, transformedSpace.width, 1, outPixels );
}
return dst;
}
final private int getPixel( int[] pixels, int x, int y, int width, int height ) {
if (x < 0 || x >= width || y < 0 || y >= height) {
switch (edgeAction) {
case ZERO:
default:
return 0;
case WRAP:
return pixels[(ImageMath.mod(y, height) * width) + ImageMath.mod(x, width)];
case CLAMP:
return pixels[(ImageMath.clamp(y, 0, height-1) * width) + ImageMath.clamp(x, 0, width-1)];
case RGB_CLAMP:
return pixels[(ImageMath.clamp(y, 0, height-1) * width) + ImageMath.clamp(x, 0, width-1)] & 0x00ffffff;
}
}
return pixels[ y*width+x ];
}
protected BufferedImage filterPixelsNN( BufferedImage dst, int width, int height, int[] inPixels, Rectangle transformedSpace ) {
int srcWidth = width;
int srcHeight = height;
int outWidth = transformedSpace.width;
int outHeight = transformedSpace.height;
int outX, outY, srcX, srcY;
int[] outPixels = new int[outWidth];
outX = transformedSpace.x;
outY = transformedSpace.y;
// int[] rgb = new int[4];
float[] out = new float[2];
for (int y = 0; y < outHeight; y++) {
for (int x = 0; x < outWidth; x++) {
transformInverse(outX+x, outY+y, out);
srcX = (int)out[0];
srcY = (int)out[1];
// int casting rounds towards zero, so we check out[0] < 0, not srcX < 0
if (out[0] < 0 || srcX >= srcWidth || out[1] < 0 || srcY >= srcHeight) {
int p;
switch (edgeAction) {
case ZERO:
default:
p = 0;
break;
case WRAP:
p = inPixels[(ImageMath.mod(srcY, srcHeight) * srcWidth) + ImageMath.mod(srcX, srcWidth)];
break;
case CLAMP:
p = inPixels[(ImageMath.clamp(srcY, 0, srcHeight-1) * srcWidth) + ImageMath.clamp(srcX, 0, srcWidth-1)];
break;
case RGB_CLAMP:
p = inPixels[(ImageMath.clamp(srcY, 0, srcHeight-1) * srcWidth) + ImageMath.clamp(srcX, 0, srcWidth-1)] & 0x00ffffff;
}
outPixels[x] = p;
} else {
int i = srcWidth*srcY + srcX;
// rgb[0] = inPixels[i];
outPixels[x] = inPixels[i];
}
}
setRGB( dst, 0, y, transformedSpace.width, 1, outPixels );
}
return dst;
}
}
| |
/*
* Copyright 2004 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicates;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import com.google.javascript.jscomp.CompilerOptions.LanguageMode;
import com.google.javascript.rhino.IR;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import com.google.javascript.rhino.jstype.TernaryValue;
import junit.framework.TestCase;
import java.util.Collection;
import java.util.Set;
/**
* Tests for NodeUtil
*/
public class NodeUtilTest extends TestCase {
private static Node parse(String js) {
Compiler compiler = new Compiler();
compiler.initCompilerOptionsIfTesting();
compiler.getOptions().setLanguageIn(LanguageMode.ECMASCRIPT5);
Node n = compiler.parseTestCode(js);
assertEquals(0, compiler.getErrorCount());
return n;
}
static Node getNode(String js) {
Node root = parse("var a=(" + js + ");");
Node expr = root.getFirstChild();
Node var = expr.getFirstChild();
return var.getFirstChild();
}
public void testIsLiteralOrConstValue() {
assertLiteralAndImmutable(getNode("10"));
assertLiteralAndImmutable(getNode("-10"));
assertLiteralButNotImmutable(getNode("[10, 20]"));
assertLiteralButNotImmutable(getNode("{'a': 20}"));
assertLiteralButNotImmutable(getNode("[10, , 1.0, [undefined], 'a']"));
assertLiteralButNotImmutable(getNode("/abc/"));
assertLiteralAndImmutable(getNode("\"string\""));
assertLiteralAndImmutable(getNode("'aaa'"));
assertLiteralAndImmutable(getNode("null"));
assertLiteralAndImmutable(getNode("undefined"));
assertLiteralAndImmutable(getNode("void 0"));
assertNotLiteral(getNode("abc"));
assertNotLiteral(getNode("[10, foo(), 20]"));
assertNotLiteral(getNode("foo()"));
assertNotLiteral(getNode("c + d"));
assertNotLiteral(getNode("{'a': foo()}"));
assertNotLiteral(getNode("void foo()"));
}
public void assertLiteralAndImmutable(Node n) {
assertTrue(NodeUtil.isLiteralValue(n, true));
assertTrue(NodeUtil.isLiteralValue(n, false));
assertTrue(NodeUtil.isImmutableValue(n));
}
public void assertLiteralButNotImmutable(Node n) {
assertTrue(NodeUtil.isLiteralValue(n, true));
assertTrue(NodeUtil.isLiteralValue(n, false));
assertFalse(NodeUtil.isImmutableValue(n));
}
public void assertNotLiteral(Node n) {
assertFalse(NodeUtil.isLiteralValue(n, true));
assertFalse(NodeUtil.isLiteralValue(n, false));
assertFalse(NodeUtil.isImmutableValue(n));
}
public void testGetBooleanValue() {
assertPureBooleanTrue("true");
assertPureBooleanTrue("10");
assertPureBooleanTrue("'0'");
assertPureBooleanTrue("/a/");
assertPureBooleanTrue("{}");
assertPureBooleanTrue("[]");
assertPureBooleanFalse("false");
assertPureBooleanFalse("null");
assertPureBooleanFalse("0");
assertPureBooleanFalse("''");
assertPureBooleanFalse("undefined");
assertPureBooleanFalse("void 0");
assertPureBooleanUnknown("void foo()");
assertPureBooleanUnknown("b");
assertPureBooleanUnknown("-'0.0'");
// Known but getBooleanValue return false for expressions with side-effects
assertPureBooleanUnknown("{a:foo()}");
assertPureBooleanUnknown("[foo()]");
}
private void assertPureBooleanTrue(String val) {
assertEquals(TernaryValue.TRUE, NodeUtil.getPureBooleanValue(getNode(val)));
}
private void assertPureBooleanFalse(String val) {
assertEquals(
TernaryValue.FALSE, NodeUtil.getPureBooleanValue(getNode(val)));
}
private void assertPureBooleanUnknown(String val) {
assertEquals(
TernaryValue.UNKNOWN, NodeUtil.getPureBooleanValue(getNode(val)));
}
public void testGetExpressionBooleanValue() {
assertImpureBooleanTrue("a=true");
assertImpureBooleanFalse("a=false");
assertImpureBooleanTrue("a=(false,true)");
assertImpureBooleanFalse("a=(true,false)");
assertImpureBooleanTrue("a=(false || true)");
assertImpureBooleanFalse("a=(true && false)");
assertImpureBooleanTrue("a=!(true && false)");
assertImpureBooleanTrue("a,true");
assertImpureBooleanFalse("a,false");
assertImpureBooleanTrue("true||false");
assertImpureBooleanFalse("false||false");
assertImpureBooleanTrue("true&&true");
assertImpureBooleanFalse("true&&false");
assertImpureBooleanFalse("!true");
assertImpureBooleanTrue("!false");
assertImpureBooleanTrue("!''");
// Assignment ops other than ASSIGN are unknown.
assertImpureBooleanUnknown("a *= 2");
// Complex expressions that contain anything other then "=", ",", or "!" are
// unknown.
assertImpureBooleanUnknown("2 + 2");
assertImpureBooleanTrue("a=1");
assertImpureBooleanTrue("a=/a/");
assertImpureBooleanTrue("a={}");
assertImpureBooleanTrue("true");
assertImpureBooleanTrue("10");
assertImpureBooleanTrue("'0'");
assertImpureBooleanTrue("/a/");
assertImpureBooleanTrue("{}");
assertImpureBooleanTrue("[]");
assertImpureBooleanFalse("false");
assertImpureBooleanFalse("null");
assertImpureBooleanFalse("0");
assertImpureBooleanFalse("''");
assertImpureBooleanFalse("undefined");
assertImpureBooleanFalse("void 0");
assertImpureBooleanFalse("void foo()");
assertImpureBooleanTrue("a?true:true");
assertImpureBooleanFalse("a?false:false");
assertImpureBooleanUnknown("a?true:false");
assertImpureBooleanUnknown("a?true:foo()");
assertImpureBooleanUnknown("b");
assertImpureBooleanUnknown("-'0.0'");
assertImpureBooleanTrue("{a:foo()}");
assertImpureBooleanTrue("[foo()]");
}
private void assertImpureBooleanTrue(String val) {
assertEquals(TernaryValue.TRUE,
NodeUtil.getImpureBooleanValue(getNode(val)));
}
private void assertImpureBooleanFalse(String val) {
assertEquals(TernaryValue.FALSE,
NodeUtil.getImpureBooleanValue(getNode(val)));
}
private void assertImpureBooleanUnknown(String val) {
assertEquals(TernaryValue.UNKNOWN,
NodeUtil.getImpureBooleanValue(getNode(val)));
}
public void testGetStringValue() {
assertEquals("true", NodeUtil.getStringValue(getNode("true")));
assertEquals("10", NodeUtil.getStringValue(getNode("10")));
assertEquals("1", NodeUtil.getStringValue(getNode("1.0")));
assertEquals("0", NodeUtil.getStringValue(getNode("'0'")));
assertEquals(null, NodeUtil.getStringValue(getNode("/a/")));
assertEquals("[object Object]", NodeUtil.getStringValue(getNode("{}")));
assertEquals("", NodeUtil.getStringValue(getNode("[]")));
assertEquals("false", NodeUtil.getStringValue(getNode("false")));
assertEquals("null", NodeUtil.getStringValue(getNode("null")));
assertEquals("0", NodeUtil.getStringValue(getNode("0")));
assertEquals("", NodeUtil.getStringValue(getNode("''")));
assertEquals("undefined", NodeUtil.getStringValue(getNode("undefined")));
assertEquals("undefined", NodeUtil.getStringValue(getNode("void 0")));
assertEquals("undefined", NodeUtil.getStringValue(getNode("void foo()")));
assertEquals("NaN", NodeUtil.getStringValue(getNode("NaN")));
assertEquals("Infinity", NodeUtil.getStringValue(getNode("Infinity")));
assertEquals(null, NodeUtil.getStringValue(getNode("x")));
}
public void testGetArrayStringValue() {
assertEquals("", NodeUtil.getStringValue(getNode("[]")));
assertEquals("", NodeUtil.getStringValue(getNode("['']")));
assertEquals("", NodeUtil.getStringValue(getNode("[null]")));
assertEquals("", NodeUtil.getStringValue(getNode("[undefined]")));
assertEquals("", NodeUtil.getStringValue(getNode("[void 0]")));
assertEquals("NaN", NodeUtil.getStringValue(getNode("[NaN]")));
assertEquals(",", NodeUtil.getStringValue(getNode("[,'']")));
assertEquals(",,", NodeUtil.getStringValue(getNode("[[''],[''],['']]")));
assertEquals("1,2", NodeUtil.getStringValue(getNode("[[1.0],[2.0]]")));
assertEquals(null, NodeUtil.getStringValue(getNode("[a]")));
assertEquals(null, NodeUtil.getStringValue(getNode("[1,a]")));
}
public void testIsObjectLiteralKey1() throws Exception {
testIsObjectLiteralKey(
parseExpr("({})"), false);
testIsObjectLiteralKey(
parseExpr("a"), false);
testIsObjectLiteralKey(
parseExpr("'a'"), false);
testIsObjectLiteralKey(
parseExpr("1"), false);
testIsObjectLiteralKey(
parseExpr("({a: 1})").getFirstChild(), true);
testIsObjectLiteralKey(
parseExpr("({1: 1})").getFirstChild(), true);
testIsObjectLiteralKey(
parseExpr("({get a(){}})").getFirstChild(), true);
testIsObjectLiteralKey(
parseExpr("({set a(b){}})").getFirstChild(), true);
}
private Node parseExpr(String js) {
Compiler compiler = new Compiler();
CompilerOptions options = new CompilerOptions();
options.setLanguageIn(LanguageMode.ECMASCRIPT5);
compiler.initOptions(options);
Node root = compiler.parseTestCode(js);
return root.getFirstChild().getFirstChild();
}
private void testIsObjectLiteralKey(Node node, boolean expected) {
assertEquals(expected, NodeUtil.isObjectLitKey(node, node.getParent()));
}
public void testGetFunctionName1() throws Exception {
Compiler compiler = new Compiler();
Node parent = compiler.parseTestCode("function name(){}");
testGetFunctionName(parent.getFirstChild(), "name");
}
public void testGetFunctionName2() throws Exception {
Compiler compiler = new Compiler();
Node parent = compiler.parseTestCode("var name = function(){}")
.getFirstChild().getFirstChild();
testGetFunctionName(parent.getFirstChild(), "name");
}
public void testGetFunctionName3() throws Exception {
Compiler compiler = new Compiler();
Node parent = compiler.parseTestCode("qualified.name = function(){}")
.getFirstChild().getFirstChild();
testGetFunctionName(parent.getLastChild(), "qualified.name");
}
public void testGetFunctionName4() throws Exception {
Compiler compiler = new Compiler();
Node parent = compiler.parseTestCode("var name2 = function name1(){}")
.getFirstChild().getFirstChild();
testGetFunctionName(parent.getFirstChild(), "name2");
}
public void testGetFunctionName5() throws Exception {
Compiler compiler = new Compiler();
Node n = compiler.parseTestCode("qualified.name2 = function name1(){}");
Node parent = n.getFirstChild().getFirstChild();
testGetFunctionName(parent.getLastChild(), "qualified.name2");
}
private void testGetFunctionName(Node function, String name) {
assertEquals(Token.FUNCTION, function.getType());
assertEquals(name, NodeUtil.getFunctionName(function));
}
public void testContainsFunctionDeclaration() {
assertTrue(NodeUtil.containsFunction(
getNode("function foo(){}")));
assertTrue(NodeUtil.containsFunction(
getNode("(b?function(){}:null)")));
assertFalse(NodeUtil.containsFunction(
getNode("(b?foo():null)")));
assertFalse(NodeUtil.containsFunction(
getNode("foo()")));
}
private void assertSideEffect(boolean se, String js) {
Node n = parse(js);
assertEquals(se, NodeUtil.mayHaveSideEffects(n.getFirstChild()));
}
private void assertSideEffect(boolean se, String js, boolean globalRegExp) {
Node n = parse(js);
Compiler compiler = new Compiler();
compiler.setHasRegExpGlobalReferences(globalRegExp);
assertEquals(se, NodeUtil.mayHaveSideEffects(n.getFirstChild(), compiler));
}
public void testMayHaveSideEffects() {
assertSideEffect(true, "i++");
assertSideEffect(true, "[b, [a, i++]]");
assertSideEffect(true, "i=3");
assertSideEffect(true, "[0, i=3]");
assertSideEffect(true, "b()");
assertSideEffect(true, "[1, b()]");
assertSideEffect(true, "b.b=4");
assertSideEffect(true, "b.b--");
assertSideEffect(true, "i--");
assertSideEffect(true, "a[0][i=4]");
assertSideEffect(true, "a += 3");
assertSideEffect(true, "a, b, z += 4");
assertSideEffect(true, "a ? c : d++");
assertSideEffect(true, "a + c++");
assertSideEffect(true, "a + c - d()");
assertSideEffect(true, "a + c - d()");
assertSideEffect(true, "function foo() {}");
assertSideEffect(true, "while(true);");
assertSideEffect(true, "if(true){a()}");
assertSideEffect(false, "if(true){a}");
assertSideEffect(false, "(function() { })");
assertSideEffect(false, "(function() { i++ })");
assertSideEffect(false, "[function a(){}]");
assertSideEffect(false, "a");
assertSideEffect(false, "[b, c [d, [e]]]");
assertSideEffect(false, "({a: x, b: y, c: z})");
assertSideEffect(false, "/abc/gi");
assertSideEffect(false, "'a'");
assertSideEffect(false, "0");
assertSideEffect(false, "a + c");
assertSideEffect(false, "'c' + a[0]");
assertSideEffect(false, "a[0][1]");
assertSideEffect(false, "'a' + c");
assertSideEffect(false, "'a' + a.name");
assertSideEffect(false, "1, 2, 3");
assertSideEffect(false, "a, b, 3");
assertSideEffect(false, "(function(a, b) { })");
assertSideEffect(false, "a ? c : d");
assertSideEffect(false, "'1' + navigator.userAgent");
assertSideEffect(false, "new RegExp('foobar', 'i')");
assertSideEffect(true, "new RegExp(SomethingWacky(), 'i')");
assertSideEffect(false, "new Array()");
assertSideEffect(false, "new Array");
assertSideEffect(false, "new Array(4)");
assertSideEffect(false, "new Array('a', 'b', 'c')");
assertSideEffect(true, "new SomeClassINeverHeardOf()");
assertSideEffect(true, "new SomeClassINeverHeardOf()");
assertSideEffect(false, "({}).foo = 4");
assertSideEffect(false, "([]).foo = 4");
assertSideEffect(false, "(function() {}).foo = 4");
assertSideEffect(true, "this.foo = 4");
assertSideEffect(true, "a.foo = 4");
assertSideEffect(true, "(function() { return n; })().foo = 4");
assertSideEffect(true, "([]).foo = bar()");
assertSideEffect(false, "undefined");
assertSideEffect(false, "void 0");
assertSideEffect(true, "void foo()");
assertSideEffect(false, "-Infinity");
assertSideEffect(false, "Infinity");
assertSideEffect(false, "NaN");
assertSideEffect(false, "({}||[]).foo = 2;");
assertSideEffect(false, "(true ? {} : []).foo = 2;");
assertSideEffect(false, "({},[]).foo = 2;");
assertSideEffect(true, "delete a.b");
}
public void testObjectMethodSideEffects() {
// "toString" and "valueOf" are assumed to be side-effect free
assertSideEffect(false, "o.toString()");
assertSideEffect(false, "o.valueOf()");
// other methods depend on the extern definitions
assertSideEffect(true, "o.watch()");
}
public void testRegExpSideEffect() {
// A RegExp Object by itself doesn't have any side-effects
assertSideEffect(false, "/abc/gi", true);
assertSideEffect(false, "/abc/gi", false);
// RegExp instance methods have global side-effects, so whether they are
// considered side-effect free depends on whether the global properties
// are referenced.
assertSideEffect(true, "(/abc/gi).test('')", true);
assertSideEffect(false, "(/abc/gi).test('')", false);
assertSideEffect(true, "(/abc/gi).test(a)", true);
assertSideEffect(false, "(/abc/gi).test(b)", false);
assertSideEffect(true, "(/abc/gi).exec('')", true);
assertSideEffect(false, "(/abc/gi).exec('')", false);
// Some RegExp object method that may have side-effects.
assertSideEffect(true, "(/abc/gi).foo('')", true);
assertSideEffect(true, "(/abc/gi).foo('')", false);
// Try the string RegExp ops.
assertSideEffect(true, "''.match('a')", true);
assertSideEffect(false, "''.match('a')", false);
assertSideEffect(true, "''.match(/(a)/)", true);
assertSideEffect(false, "''.match(/(a)/)", false);
assertSideEffect(true, "''.replace('a')", true);
assertSideEffect(false, "''.replace('a')", false);
assertSideEffect(true, "''.search('a')", true);
assertSideEffect(false, "''.search('a')", false);
assertSideEffect(true, "''.split('a')", true);
assertSideEffect(false, "''.split('a')", false);
// Some non-RegExp string op that may have side-effects.
assertSideEffect(true, "''.foo('a')", true);
assertSideEffect(true, "''.foo('a')", false);
// 'a' might be a RegExp object with the 'g' flag, in which case
// the state might change by running any of the string ops.
// Specifically, using these methods resets the "lastIndex" if used
// in combination with a RegExp instance "exec" method.
assertSideEffect(true, "''.match(a)", true);
assertSideEffect(true, "''.match(a)", false);
}
private void assertMutableState(boolean se, String js) {
Node n = parse(js);
assertEquals(se, NodeUtil.mayEffectMutableState(n.getFirstChild()));
}
public void testMayEffectMutableState() {
assertMutableState(true, "i++");
assertMutableState(true, "[b, [a, i++]]");
assertMutableState(true, "i=3");
assertMutableState(true, "[0, i=3]");
assertMutableState(true, "b()");
assertMutableState(true, "void b()");
assertMutableState(true, "[1, b()]");
assertMutableState(true, "b.b=4");
assertMutableState(true, "b.b--");
assertMutableState(true, "i--");
assertMutableState(true, "a[0][i=4]");
assertMutableState(true, "a += 3");
assertMutableState(true, "a, b, z += 4");
assertMutableState(true, "a ? c : d++");
assertMutableState(true, "a + c++");
assertMutableState(true, "a + c - d()");
assertMutableState(true, "a + c - d()");
assertMutableState(true, "function foo() {}");
assertMutableState(true, "while(true);");
assertMutableState(true, "if(true){a()}");
assertMutableState(false, "if(true){a}");
assertMutableState(true, "(function() { })");
assertMutableState(true, "(function() { i++ })");
assertMutableState(true, "[function a(){}]");
assertMutableState(false, "a");
assertMutableState(true, "[b, c [d, [e]]]");
assertMutableState(true, "({a: x, b: y, c: z})");
// Note: RegExp objects are not immutable, for instance, the exec
// method maintains state for "global" searches.
assertMutableState(true, "/abc/gi");
assertMutableState(false, "'a'");
assertMutableState(false, "0");
assertMutableState(false, "a + c");
assertMutableState(false, "'c' + a[0]");
assertMutableState(false, "a[0][1]");
assertMutableState(false, "'a' + c");
assertMutableState(false, "'a' + a.name");
assertMutableState(false, "1, 2, 3");
assertMutableState(false, "a, b, 3");
assertMutableState(true, "(function(a, b) { })");
assertMutableState(false, "a ? c : d");
assertMutableState(false, "'1' + navigator.userAgent");
assertMutableState(true, "new RegExp('foobar', 'i')");
assertMutableState(true, "new RegExp(SomethingWacky(), 'i')");
assertMutableState(true, "new Array()");
assertMutableState(true, "new Array");
assertMutableState(true, "new Array(4)");
assertMutableState(true, "new Array('a', 'b', 'c')");
assertMutableState(true, "new SomeClassINeverHeardOf()");
}
public void testIsFunctionExpression() {
assertContainsAnonFunc(true, "(function(){})");
assertContainsAnonFunc(true, "[function a(){}]");
assertContainsAnonFunc(false, "{x: function a(){}}");
assertContainsAnonFunc(true, "(function a(){})()");
assertContainsAnonFunc(true, "x = function a(){};");
assertContainsAnonFunc(true, "var x = function a(){};");
assertContainsAnonFunc(true, "if (function a(){});");
assertContainsAnonFunc(true, "while (function a(){});");
assertContainsAnonFunc(true, "do; while (function a(){});");
assertContainsAnonFunc(true, "for (function a(){};;);");
assertContainsAnonFunc(true, "for (;function a(){};);");
assertContainsAnonFunc(true, "for (;;function a(){});");
assertContainsAnonFunc(true, "for (p in function a(){});");
assertContainsAnonFunc(true, "with (function a(){}) {}");
assertContainsAnonFunc(false, "function a(){}");
assertContainsAnonFunc(false, "if (x) function a(){};");
assertContainsAnonFunc(false, "if (x) { function a(){} }");
assertContainsAnonFunc(false, "if (x); else function a(){};");
assertContainsAnonFunc(false, "while (x) function a(){};");
assertContainsAnonFunc(false, "do function a(){} while (0);");
assertContainsAnonFunc(false, "for (;;) function a(){}");
assertContainsAnonFunc(false, "for (p in o) function a(){};");
assertContainsAnonFunc(false, "with (x) function a(){}");
}
private void assertContainsAnonFunc(boolean expected, String js) {
Node funcParent = findParentOfFuncDescendant(parse(js));
assertNotNull("Expected function node in parse tree of: " + js, funcParent);
Node funcNode = getFuncChild(funcParent);
assertEquals(expected, NodeUtil.isFunctionExpression(funcNode));
}
private Node findParentOfFuncDescendant(Node n) {
for (Node c = n.getFirstChild(); c != null; c = c.getNext()) {
if (c.isFunction()) {
return n;
}
Node result = findParentOfFuncDescendant(c);
if (result != null) {
return result;
}
}
return null;
}
private Node getFuncChild(Node n) {
for (Node c = n.getFirstChild(); c != null; c = c.getNext()) {
if (c.isFunction()) {
return c;
}
}
return null;
}
public void testContainsType() {
assertTrue(NodeUtil.containsType(
parse("this"), Token.THIS));
assertTrue(NodeUtil.containsType(
parse("function foo(){}(this)"), Token.THIS));
assertTrue(NodeUtil.containsType(
parse("b?this:null"), Token.THIS));
assertFalse(NodeUtil.containsType(
parse("a"), Token.THIS));
assertFalse(NodeUtil.containsType(
parse("function foo(){}"), Token.THIS));
assertFalse(NodeUtil.containsType(
parse("(b?foo():null)"), Token.THIS));
}
public void testReferencesThis() {
assertTrue(NodeUtil.referencesThis(
parse("this")));
// Don't descend into functions (starts at the script node)
assertFalse(NodeUtil.referencesThis(
parse("function foo(){this}")));
// But starting with a function properly check for 'this'
Node n = parse("function foo(){this}").getFirstChild();
assertEquals(n.getType(), Token.FUNCTION);
assertTrue(NodeUtil.referencesThis(n));
assertTrue(NodeUtil.referencesThis(
parse("b?this:null")));
assertFalse(NodeUtil.referencesThis(
parse("a")));
n = parse("function foo(){}").getFirstChild();
assertEquals(n.getType(), Token.FUNCTION);
assertFalse(NodeUtil.referencesThis(n));
assertFalse(NodeUtil.referencesThis(
parse("(b?foo():null)")));
}
public void testGetNodeTypeReferenceCount() {
assertEquals(0, NodeUtil.getNodeTypeReferenceCount(
parse("function foo(){}"), Token.THIS,
Predicates.<Node>alwaysTrue()));
assertEquals(1, NodeUtil.getNodeTypeReferenceCount(
parse("this"), Token.THIS,
Predicates.<Node>alwaysTrue()));
assertEquals(2, NodeUtil.getNodeTypeReferenceCount(
parse("this;function foo(){}(this)"), Token.THIS,
Predicates.<Node>alwaysTrue()));
}
public void testIsNameReferenceCount() {
assertTrue(NodeUtil.isNameReferenced(
parse("function foo(){}"), "foo"));
assertTrue(NodeUtil.isNameReferenced(
parse("var foo = function(){}"), "foo"));
assertFalse(NodeUtil.isNameReferenced(
parse("function foo(){}"), "undefined"));
assertTrue(NodeUtil.isNameReferenced(
parse("undefined"), "undefined"));
assertTrue(NodeUtil.isNameReferenced(
parse("undefined;function foo(){}(undefined)"), "undefined"));
assertTrue(NodeUtil.isNameReferenced(
parse("goo.foo"), "goo"));
assertFalse(NodeUtil.isNameReferenced(
parse("goo.foo"), "foo"));
}
public void testGetNameReferenceCount() {
assertEquals(0, NodeUtil.getNameReferenceCount(
parse("function foo(){}"), "undefined"));
assertEquals(1, NodeUtil.getNameReferenceCount(
parse("undefined"), "undefined"));
assertEquals(2, NodeUtil.getNameReferenceCount(
parse("undefined;function foo(){}(undefined)"), "undefined"));
assertEquals(1, NodeUtil.getNameReferenceCount(
parse("goo.foo"), "goo"));
assertEquals(0, NodeUtil.getNameReferenceCount(
parse("goo.foo"), "foo"));
assertEquals(1, NodeUtil.getNameReferenceCount(
parse("function foo(){}"), "foo"));
assertEquals(1, NodeUtil.getNameReferenceCount(
parse("var foo = function(){}"), "foo"));
}
public void testGetVarsDeclaredInBranch() {
Compiler compiler = new Compiler();
assertNodeNames(Sets.newHashSet("foo"),
NodeUtil.getVarsDeclaredInBranch(
parse("var foo;")));
assertNodeNames(Sets.newHashSet("foo", "goo"),
NodeUtil.getVarsDeclaredInBranch(
parse("var foo,goo;")));
assertNodeNames(Sets.<String>newHashSet(),
NodeUtil.getVarsDeclaredInBranch(
parse("foo();")));
assertNodeNames(Sets.<String>newHashSet(),
NodeUtil.getVarsDeclaredInBranch(
parse("function f(){var foo;}")));
assertNodeNames(Sets.newHashSet("goo"),
NodeUtil.getVarsDeclaredInBranch(
parse("var goo;function f(){var foo;}")));
}
private void assertNodeNames(Set<String> nodeNames, Collection<Node> nodes) {
Set<String> actualNames = Sets.newHashSet();
for (Node node : nodes) {
actualNames.add(node.getString());
}
assertEquals(nodeNames, actualNames);
}
public void testIsControlStructureCodeBlock() {
Node root = parse("if (x) foo(); else boo();");
Node ifNode = root.getFirstChild();
Node ifCondition = ifNode.getFirstChild();
Node ifCase = ifNode.getFirstChild().getNext();
Node elseCase = ifNode.getLastChild();
assertFalse(NodeUtil.isControlStructureCodeBlock(ifNode, ifCondition));
assertTrue(NodeUtil.isControlStructureCodeBlock(ifNode, ifCase));
assertTrue(NodeUtil.isControlStructureCodeBlock(ifNode, elseCase));
}
public void testIsFunctionExpression1() {
Node root = parse("(function foo() {})");
Node statementNode = root.getFirstChild();
assertTrue(statementNode.isExprResult());
Node functionNode = statementNode.getFirstChild();
assertTrue(functionNode.isFunction());
assertTrue(NodeUtil.isFunctionExpression(functionNode));
}
public void testIsFunctionExpression2() {
Node root = parse("function foo() {}");
Node functionNode = root.getFirstChild();
assertTrue(functionNode.isFunction());
assertFalse(NodeUtil.isFunctionExpression(functionNode));
}
public void testRemoveChildBlock() {
// Test removing the inner block.
Node actual = parse("{{x()}}");
Node outerBlockNode = actual.getFirstChild();
Node innerBlockNode = outerBlockNode.getFirstChild();
innerBlockNode.setIsSyntheticBlock(true);
NodeUtil.removeChild(outerBlockNode, innerBlockNode);
String expected = "{{}}";
String difference = parse(expected).checkTreeEquals(actual);
if (difference != null) {
assertTrue("Nodes do not match:\n" + difference, false);
}
}
public void testRemoveTryChild1() {
// Test removing the finally clause.
Node actual = parse("try {foo()} catch(e) {} finally {}");
Node tryNode = actual.getFirstChild();
Node tryBlock = tryNode.getFirstChild();
Node catchBlocks = tryNode.getFirstChild().getNext();
Node finallyBlock = tryNode.getLastChild();
NodeUtil.removeChild(tryNode, finallyBlock);
String expected = "try {foo()} catch(e) {}";
String difference = parse(expected).checkTreeEquals(actual);
if (difference != null) {
assertTrue("Nodes do not match:\n" + difference, false);
}
}
public void testRemoveTryChild2() {
// Test removing the try clause.
Node actual = parse("try {foo()} catch(e) {} finally {}");
Node tryNode = actual.getFirstChild();
Node tryBlock = tryNode.getFirstChild();
Node catchBlocks = tryNode.getFirstChild().getNext();
NodeUtil.removeChild(tryNode, tryBlock);
String expected = "try {} catch(e) {} finally {}";
String difference = parse(expected).checkTreeEquals(actual);
if (difference != null) {
assertTrue("Nodes do not match:\n" + difference, false);
}
}
public void testRemoveTryChild3() {
// Test removing the catch clause.
Node actual = parse("try {foo()} catch(e) {} finally {}");
Node tryNode = actual.getFirstChild();
Node tryBlock = tryNode.getFirstChild();
Node catchBlocks = tryNode.getFirstChild().getNext();
Node catchBlock = catchBlocks.getFirstChild();
Node finallyBlock = tryNode.getLastChild();
NodeUtil.removeChild(catchBlocks, catchBlock);
String expected = "try {foo()} finally {}";
String difference = parse(expected).checkTreeEquals(actual);
if (difference != null) {
assertTrue("Nodes do not match:\n" + difference, false);
}
}
public void testRemoveTryChild4() {
// Test removing the catch clause without a finally.
Node actual = parse("try {foo()} catch(e) {} finally {}");
Node tryNode = actual.getFirstChild();
Node tryBlock = tryNode.getFirstChild();
Node catchBlocks = tryNode.getFirstChild().getNext();
Node catchBlock = catchBlocks.getFirstChild();
Node finallyBlock = tryNode.getLastChild();
NodeUtil.removeChild(tryNode, catchBlocks);
String expected = "try {foo()} finally {}";
String difference = parse(expected).checkTreeEquals(actual);
if (difference != null) {
assertTrue("Nodes do not match:\n" + difference, false);
}
}
public void testRemoveTryChild5() {
Node actual = parse("try {foo()} catch(e) {} finally {}");
Node tryNode = actual.getFirstChild();
Node tryBlock = tryNode.getFirstChild();
Node catchBlocks = tryNode.getFirstChild().getNext();
Node catchBlock = catchBlocks.getFirstChild();
Node finallyBlock = tryNode.getLastChild();
NodeUtil.removeChild(catchBlocks, catchBlock);
String expected = "try {foo()} finally {}";
String difference = parse(expected).checkTreeEquals(actual);
if (difference != null) {
assertTrue("Nodes do not match:\n" + difference, false);
}
}
public void testRemoveVarChild() {
Compiler compiler = new Compiler();
// Test removing the first child.
Node actual = parse("var foo, goo, hoo");
Node varNode = actual.getFirstChild();
Node nameNode = varNode.getFirstChild();
NodeUtil.removeChild(varNode, nameNode);
String expected = "var goo, hoo";
String difference = parse(expected).checkTreeEquals(actual);
if (difference != null) {
assertTrue("Nodes do not match:\n" + difference, false);
}
// Test removing the second child.
actual = parse("var foo, goo, hoo");
varNode = actual.getFirstChild();
nameNode = varNode.getFirstChild().getNext();
NodeUtil.removeChild(varNode, nameNode);
expected = "var foo, hoo";
difference = parse(expected).checkTreeEquals(actual);
if (difference != null) {
assertTrue("Nodes do not match:\n" + difference, false);
}
// Test removing the last child of several children.
actual = parse("var foo, hoo");
varNode = actual.getFirstChild();
nameNode = varNode.getFirstChild().getNext();
NodeUtil.removeChild(varNode, nameNode);
expected = "var foo";
difference = parse(expected).checkTreeEquals(actual);
if (difference != null) {
assertTrue("Nodes do not match:\n" + difference, false);
}
// Test removing the last.
actual = parse("var hoo");
varNode = actual.getFirstChild();
nameNode = varNode.getFirstChild();
NodeUtil.removeChild(varNode, nameNode);
expected = "";
difference = parse(expected).checkTreeEquals(actual);
if (difference != null) {
assertTrue("Nodes do not match:\n" + difference, false);
}
}
public void testRemoveLabelChild1() {
Compiler compiler = new Compiler();
// Test removing the first child.
Node actual = parse("foo: goo()");
Node labelNode = actual.getFirstChild();
Node callExpressNode = labelNode.getLastChild();
NodeUtil.removeChild(labelNode, callExpressNode);
String expected = "";
String difference = parse(expected).checkTreeEquals(actual);
if (difference != null) {
assertTrue("Nodes do not match:\n" + difference, false);
}
}
public void testRemoveLabelChild2() {
// Test removing the first child.
Node actual = parse("achoo: foo: goo()");
Node labelNode = actual.getFirstChild();
Node callExpressNode = labelNode.getLastChild();
NodeUtil.removeChild(labelNode, callExpressNode);
String expected = "";
String difference = parse(expected).checkTreeEquals(actual);
if (difference != null) {
assertTrue("Nodes do not match:\n" + difference, false);
}
}
public void testRemoveForChild() {
Compiler compiler = new Compiler();
// Test removing the initializer.
Node actual = parse("for(var a=0;a<0;a++)foo()");
Node forNode = actual.getFirstChild();
Node child = forNode.getFirstChild();
NodeUtil.removeChild(forNode, child);
String expected = "for(;a<0;a++)foo()";
String difference = parse(expected).checkTreeEquals(actual);
assertNull("Nodes do not match:\n" + difference, difference);
// Test removing the condition.
actual = parse("for(var a=0;a<0;a++)foo()");
forNode = actual.getFirstChild();
child = forNode.getFirstChild().getNext();
NodeUtil.removeChild(forNode, child);
expected = "for(var a=0;;a++)foo()";
difference = parse(expected).checkTreeEquals(actual);
assertNull("Nodes do not match:\n" + difference, difference);
// Test removing the increment.
actual = parse("for(var a=0;a<0;a++)foo()");
forNode = actual.getFirstChild();
child = forNode.getFirstChild().getNext().getNext();
NodeUtil.removeChild(forNode, child);
expected = "for(var a=0;a<0;)foo()";
difference = parse(expected).checkTreeEquals(actual);
assertNull("Nodes do not match:\n" + difference, difference);
// Test removing the body.
actual = parse("for(var a=0;a<0;a++)foo()");
forNode = actual.getFirstChild();
child = forNode.getLastChild();
NodeUtil.removeChild(forNode, child);
expected = "for(var a=0;a<0;a++);";
difference = parse(expected).checkTreeEquals(actual);
assertNull("Nodes do not match:\n" + difference, difference);
// Test removing the body.
actual = parse("for(a in ack)foo();");
forNode = actual.getFirstChild();
child = forNode.getLastChild();
NodeUtil.removeChild(forNode, child);
expected = "for(a in ack);";
difference = parse(expected).checkTreeEquals(actual);
assertNull("Nodes do not match:\n" + difference, difference);
}
public void testMergeBlock1() {
Compiler compiler = new Compiler();
// Test removing the initializer.
Node actual = parse("{{a();b();}}");
Node parentBlock = actual.getFirstChild();
Node childBlock = parentBlock.getFirstChild();
assertTrue(NodeUtil.tryMergeBlock(childBlock));
String expected = "{a();b();}";
String difference = parse(expected).checkTreeEquals(actual);
assertNull("Nodes do not match:\n" + difference, difference);
}
public void testMergeBlock2() {
Compiler compiler = new Compiler();
// Test removing the initializer.
Node actual = parse("foo:{a();}");
Node parentLabel = actual.getFirstChild();
Node childBlock = parentLabel.getLastChild();
assertFalse(NodeUtil.tryMergeBlock(childBlock));
}
public void testMergeBlock3() {
Compiler compiler = new Compiler();
// Test removing the initializer.
String code = "foo:{a();boo()}";
Node actual = parse("foo:{a();boo()}");
Node parentLabel = actual.getFirstChild();
Node childBlock = parentLabel.getLastChild();
assertFalse(NodeUtil.tryMergeBlock(childBlock));
String expected = code;
String difference = parse(expected).checkTreeEquals(actual);
assertNull("Nodes do not match:\n" + difference, difference);
}
public void testGetSourceName() {
Node n = new Node(Token.BLOCK);
Node parent = new Node(Token.BLOCK, n);
parent.setSourceFileForTesting("foo");
assertEquals("foo", NodeUtil.getSourceName(n));
}
public void testLocalValue1() throws Exception {
// Names are not known to be local.
assertFalse(testLocalValue("x"));
assertFalse(testLocalValue("x()"));
assertFalse(testLocalValue("this"));
assertFalse(testLocalValue("arguments"));
// We can't know if new objects are local unless we know
// that they don't alias themselves.
assertFalse(testLocalValue("new x()"));
// property references are assume to be non-local
assertFalse(testLocalValue("(new x()).y"));
assertFalse(testLocalValue("(new x())['y']"));
// Primitive values are local
assertTrue(testLocalValue("null"));
assertTrue(testLocalValue("undefined"));
assertTrue(testLocalValue("Infinity"));
assertTrue(testLocalValue("NaN"));
assertTrue(testLocalValue("1"));
assertTrue(testLocalValue("'a'"));
assertTrue(testLocalValue("true"));
assertTrue(testLocalValue("false"));
assertTrue(testLocalValue("[]"));
assertTrue(testLocalValue("{}"));
// The contents of arrays and objects don't matter
assertTrue(testLocalValue("[x]"));
assertTrue(testLocalValue("{'a':x}"));
// Pre-increment results in primitive number
assertTrue(testLocalValue("++x"));
assertTrue(testLocalValue("--x"));
// Post-increment, the previous value matters.
assertFalse(testLocalValue("x++"));
assertFalse(testLocalValue("x--"));
// The left side of an only assign matters if it is an alias or mutable.
assertTrue(testLocalValue("x=1"));
assertFalse(testLocalValue("x=[]"));
assertFalse(testLocalValue("x=y"));
// The right hand side of assignment opts don't matter, as they force
// a local result.
assertTrue(testLocalValue("x+=y"));
assertTrue(testLocalValue("x*=y"));
// Comparisons always result in locals, as they force a local boolean
// result.
assertTrue(testLocalValue("x==y"));
assertTrue(testLocalValue("x!=y"));
assertTrue(testLocalValue("x>y"));
// Only the right side of a comma matters
assertTrue(testLocalValue("(1,2)"));
assertTrue(testLocalValue("(x,1)"));
assertFalse(testLocalValue("(x,y)"));
// Both the operands of OR matter
assertTrue(testLocalValue("1||2"));
assertFalse(testLocalValue("x||1"));
assertFalse(testLocalValue("x||y"));
assertFalse(testLocalValue("1||y"));
// Both the operands of AND matter
assertTrue(testLocalValue("1&&2"));
assertFalse(testLocalValue("x&&1"));
assertFalse(testLocalValue("x&&y"));
assertFalse(testLocalValue("1&&y"));
// Only the results of HOOK matter
assertTrue(testLocalValue("x?1:2"));
assertFalse(testLocalValue("x?x:2"));
assertFalse(testLocalValue("x?1:x"));
assertFalse(testLocalValue("x?x:y"));
// Results of ops are local values
assertTrue(testLocalValue("!y"));
assertTrue(testLocalValue("~y"));
assertTrue(testLocalValue("y + 1"));
assertTrue(testLocalValue("y + z"));
assertTrue(testLocalValue("y * z"));
assertTrue(testLocalValue("'a' in x"));
assertTrue(testLocalValue("typeof x"));
assertTrue(testLocalValue("x instanceof y"));
assertTrue(testLocalValue("void x"));
assertTrue(testLocalValue("void 0"));
assertFalse(testLocalValue("{}.x"));
assertTrue(testLocalValue("{}.toString()"));
assertTrue(testLocalValue("o.toString()"));
assertFalse(testLocalValue("o.valueOf()"));
assertTrue(testLocalValue("delete a.b"));
}
public void testLocalValue2() {
Node newExpr = getNode("new x()");
assertFalse(NodeUtil.evaluatesToLocalValue(newExpr));
Preconditions.checkState(newExpr.isNew());
Node.SideEffectFlags flags = new Node.SideEffectFlags();
flags.clearAllFlags();
newExpr.setSideEffectFlags(flags.valueOf());
assertTrue(NodeUtil.evaluatesToLocalValue(newExpr));
flags.clearAllFlags();
flags.setMutatesThis();
newExpr.setSideEffectFlags(flags.valueOf());
assertTrue(NodeUtil.evaluatesToLocalValue(newExpr));
flags.clearAllFlags();
flags.setReturnsTainted();
newExpr.setSideEffectFlags(flags.valueOf());
assertTrue(NodeUtil.evaluatesToLocalValue(newExpr));
flags.clearAllFlags();
flags.setThrows();
newExpr.setSideEffectFlags(flags.valueOf());
assertFalse(NodeUtil.evaluatesToLocalValue(newExpr));
flags.clearAllFlags();
flags.setMutatesArguments();
newExpr.setSideEffectFlags(flags.valueOf());
assertFalse(NodeUtil.evaluatesToLocalValue(newExpr));
flags.clearAllFlags();
flags.setMutatesGlobalState();
newExpr.setSideEffectFlags(flags.valueOf());
assertFalse(NodeUtil.evaluatesToLocalValue(newExpr));
}
public void testCallSideEffects() {
Node callExpr = getNode("new x().method()");
assertTrue(NodeUtil.functionCallHasSideEffects(callExpr));
Node newExpr = callExpr.getFirstChild().getFirstChild();
Preconditions.checkState(newExpr.isNew());
Node.SideEffectFlags flags = new Node.SideEffectFlags();
// No side effects, local result
flags.clearAllFlags();
newExpr.setSideEffectFlags(flags.valueOf());
flags.clearAllFlags();
callExpr.setSideEffectFlags(flags.valueOf());
assertTrue(NodeUtil.evaluatesToLocalValue(callExpr));
assertFalse(NodeUtil.functionCallHasSideEffects(callExpr));
assertFalse(NodeUtil.mayHaveSideEffects(callExpr));
// Modifies this, local result
flags.clearAllFlags();
newExpr.setSideEffectFlags(flags.valueOf());
flags.clearAllFlags();
flags.setMutatesThis();
callExpr.setSideEffectFlags(flags.valueOf());
assertTrue(NodeUtil.evaluatesToLocalValue(callExpr));
assertFalse(NodeUtil.functionCallHasSideEffects(callExpr));
assertFalse(NodeUtil.mayHaveSideEffects(callExpr));
// Modifies this, non-local result
flags.clearAllFlags();
newExpr.setSideEffectFlags(flags.valueOf());
flags.clearAllFlags();
flags.setMutatesThis();
flags.setReturnsTainted();
callExpr.setSideEffectFlags(flags.valueOf());
assertFalse(NodeUtil.evaluatesToLocalValue(callExpr));
assertFalse(NodeUtil.functionCallHasSideEffects(callExpr));
assertFalse(NodeUtil.mayHaveSideEffects(callExpr));
// No modifications, non-local result
flags.clearAllFlags();
newExpr.setSideEffectFlags(flags.valueOf());
flags.clearAllFlags();
flags.setReturnsTainted();
callExpr.setSideEffectFlags(flags.valueOf());
assertFalse(NodeUtil.evaluatesToLocalValue(callExpr));
assertFalse(NodeUtil.functionCallHasSideEffects(callExpr));
assertFalse(NodeUtil.mayHaveSideEffects(callExpr));
// The new modifies global state, no side-effect call, non-local result
// This call could be removed, but not the new.
flags.clearAllFlags();
flags.setMutatesGlobalState();
newExpr.setSideEffectFlags(flags.valueOf());
flags.clearAllFlags();
callExpr.setSideEffectFlags(flags.valueOf());
assertTrue(NodeUtil.evaluatesToLocalValue(callExpr));
assertFalse(NodeUtil.functionCallHasSideEffects(callExpr));
assertTrue(NodeUtil.mayHaveSideEffects(callExpr));
}
private boolean testLocalValue(String js) {
return NodeUtil.evaluatesToLocalValue(getNode(js));
}
public void testValidDefine() {
assertTrue(testValidDefineValue("1"));
assertTrue(testValidDefineValue("-3"));
assertTrue(testValidDefineValue("true"));
assertTrue(testValidDefineValue("false"));
assertTrue(testValidDefineValue("'foo'"));
assertFalse(testValidDefineValue("x"));
assertFalse(testValidDefineValue("null"));
assertFalse(testValidDefineValue("undefined"));
assertFalse(testValidDefineValue("NaN"));
assertTrue(testValidDefineValue("!true"));
assertTrue(testValidDefineValue("-true"));
assertTrue(testValidDefineValue("1 & 8"));
assertTrue(testValidDefineValue("1 + 8"));
assertTrue(testValidDefineValue("'a' + 'b'"));
assertFalse(testValidDefineValue("1 & foo"));
}
private boolean testValidDefineValue(String js) {
Node script = parse("var test = " + js + ";");
Node var = script.getFirstChild();
Node name = var.getFirstChild();
Node value = name.getFirstChild();
ImmutableSet<String> defines = ImmutableSet.of();
return NodeUtil.isValidDefineValue(value, defines);
}
public void testGetNumberValue() {
// Strings
assertEquals(1.0, NodeUtil.getNumberValue(getNode("'\\uFEFF1'")));
assertEquals(0.0, NodeUtil.getNumberValue(getNode("''")));
assertEquals(0.0, NodeUtil.getNumberValue(getNode("' '")));
assertEquals(0.0, NodeUtil.getNumberValue(getNode("' \\t'")));
assertEquals(0.0, NodeUtil.getNumberValue(getNode("'+0'")));
assertEquals(-0.0, NodeUtil.getNumberValue(getNode("'-0'")));
assertEquals(2.0, NodeUtil.getNumberValue(getNode("'+2'")));
assertEquals(-1.6, NodeUtil.getNumberValue(getNode("'-1.6'")));
assertEquals(16.0, NodeUtil.getNumberValue(getNode("'16'")));
assertEquals(16.0, NodeUtil.getNumberValue(getNode("' 16 '")));
assertEquals(16.0, NodeUtil.getNumberValue(getNode("' 16 '")));
assertEquals(12300.0, NodeUtil.getNumberValue(getNode("'123e2'")));
assertEquals(12300.0, NodeUtil.getNumberValue(getNode("'123E2'")));
assertEquals(1.23, NodeUtil.getNumberValue(getNode("'123e-2'")));
assertEquals(1.23, NodeUtil.getNumberValue(getNode("'123E-2'")));
assertEquals(-1.23, NodeUtil.getNumberValue(getNode("'-123e-2'")));
assertEquals(-1.23, NodeUtil.getNumberValue(getNode("'-123E-2'")));
assertEquals(1.23, NodeUtil.getNumberValue(getNode("'+123e-2'")));
assertEquals(1.23, NodeUtil.getNumberValue(getNode("'+123E-2'")));
assertEquals(12300.0, NodeUtil.getNumberValue(getNode("'+123e+2'")));
assertEquals(12300.0, NodeUtil.getNumberValue(getNode("'+123E+2'")));
assertEquals(15.0, NodeUtil.getNumberValue(getNode("'0xf'")));
assertEquals(15.0, NodeUtil.getNumberValue(getNode("'0xF'")));
// Chrome and rhino behavior differently from FF and IE. FF and IE
// consider a negative hex number to be invalid
assertEquals(null, NodeUtil.getNumberValue(getNode("'-0xf'")));
assertEquals(null, NodeUtil.getNumberValue(getNode("'-0xF'")));
assertEquals(null, NodeUtil.getNumberValue(getNode("'+0xf'")));
assertEquals(null, NodeUtil.getNumberValue(getNode("'+0xF'")));
assertEquals(16.0, NodeUtil.getNumberValue(getNode("'0X10'")));
assertEquals(Double.NaN, NodeUtil.getNumberValue(getNode("'0X10.8'")));
assertEquals(77.0, NodeUtil.getNumberValue(getNode("'077'")));
assertEquals(-77.0, NodeUtil.getNumberValue(getNode("'-077'")));
assertEquals(-77.5, NodeUtil.getNumberValue(getNode("'-077.5'")));
assertEquals(
Double.NEGATIVE_INFINITY,
NodeUtil.getNumberValue(getNode("'-Infinity'")));
assertEquals(
Double.POSITIVE_INFINITY,
NodeUtil.getNumberValue(getNode("'Infinity'")));
assertEquals(
Double.POSITIVE_INFINITY,
NodeUtil.getNumberValue(getNode("'+Infinity'")));
// Firefox treats "infinity" as "Infinity", IE treats it as NaN
assertEquals(null, NodeUtil.getNumberValue(getNode("'-infinity'")));
assertEquals(null, NodeUtil.getNumberValue(getNode("'infinity'")));
assertEquals(null, NodeUtil.getNumberValue(getNode("'+infinity'")));
assertEquals(Double.NaN, NodeUtil.getNumberValue(getNode("'NaN'")));
assertEquals(
Double.NaN, NodeUtil.getNumberValue(getNode("'some unknown string'")));
assertEquals(Double.NaN, NodeUtil.getNumberValue(getNode("'123 blah'")));
// Literals
assertEquals(1.0, NodeUtil.getNumberValue(getNode("1")));
// "-1" is parsed as a literal
assertEquals(-1.0, NodeUtil.getNumberValue(getNode("-1")));
// "+1" is parse as an op + literal
assertEquals(null, NodeUtil.getNumberValue(getNode("+1")));
assertEquals(22.0, NodeUtil.getNumberValue(getNode("22")));
assertEquals(18.0, NodeUtil.getNumberValue(getNode("022")));
assertEquals(34.0, NodeUtil.getNumberValue(getNode("0x22")));
assertEquals(
1.0, NodeUtil.getNumberValue(getNode("true")));
assertEquals(
0.0, NodeUtil.getNumberValue(getNode("false")));
assertEquals(
0.0, NodeUtil.getNumberValue(getNode("null")));
assertEquals(
Double.NaN, NodeUtil.getNumberValue(getNode("void 0")));
assertEquals(
Double.NaN, NodeUtil.getNumberValue(getNode("void f")));
// values with side-effects are ignored.
assertEquals(
null, NodeUtil.getNumberValue(getNode("void f()")));
assertEquals(
Double.NaN, NodeUtil.getNumberValue(getNode("NaN")));
assertEquals(
Double.POSITIVE_INFINITY,
NodeUtil.getNumberValue(getNode("Infinity")));
assertEquals(
Double.NEGATIVE_INFINITY,
NodeUtil.getNumberValue(getNode("-Infinity")));
// "infinity" is not a known name.
assertEquals(null, NodeUtil.getNumberValue(getNode("infinity")));
assertEquals(null, NodeUtil.getNumberValue(getNode("-infinity")));
// getNumberValue only converts literals
assertEquals(null, NodeUtil.getNumberValue(getNode("x")));
assertEquals(null, NodeUtil.getNumberValue(getNode("x.y")));
assertEquals(null, NodeUtil.getNumberValue(getNode("1/2")));
assertEquals(null, NodeUtil.getNumberValue(getNode("1-2")));
assertEquals(null, NodeUtil.getNumberValue(getNode("+1")));
}
public void testIsNumbericResult() {
assertTrue(NodeUtil.isNumericResult(getNode("1")));
assertFalse(NodeUtil.isNumericResult(getNode("true")));
assertTrue(NodeUtil.isNumericResult(getNode("+true")));
assertTrue(NodeUtil.isNumericResult(getNode("+1")));
assertTrue(NodeUtil.isNumericResult(getNode("-1")));
assertTrue(NodeUtil.isNumericResult(getNode("-Infinity")));
assertTrue(NodeUtil.isNumericResult(getNode("Infinity")));
assertTrue(NodeUtil.isNumericResult(getNode("NaN")));
assertFalse(NodeUtil.isNumericResult(getNode("undefined")));
assertFalse(NodeUtil.isNumericResult(getNode("void 0")));
assertTrue(NodeUtil.isNumericResult(getNode("a << b")));
assertTrue(NodeUtil.isNumericResult(getNode("a >> b")));
assertTrue(NodeUtil.isNumericResult(getNode("a >>> b")));
assertFalse(NodeUtil.isNumericResult(getNode("a == b")));
assertFalse(NodeUtil.isNumericResult(getNode("a != b")));
assertFalse(NodeUtil.isNumericResult(getNode("a === b")));
assertFalse(NodeUtil.isNumericResult(getNode("a !== b")));
assertFalse(NodeUtil.isNumericResult(getNode("a < b")));
assertFalse(NodeUtil.isNumericResult(getNode("a > b")));
assertFalse(NodeUtil.isNumericResult(getNode("a <= b")));
assertFalse(NodeUtil.isNumericResult(getNode("a >= b")));
assertFalse(NodeUtil.isNumericResult(getNode("a in b")));
assertFalse(NodeUtil.isNumericResult(getNode("a instanceof b")));
assertFalse(NodeUtil.isNumericResult(getNode("'a'")));
assertFalse(NodeUtil.isNumericResult(getNode("'a'+b")));
assertFalse(NodeUtil.isNumericResult(getNode("a+'b'")));
assertFalse(NodeUtil.isNumericResult(getNode("a+b")));
assertFalse(NodeUtil.isNumericResult(getNode("a()")));
assertFalse(NodeUtil.isNumericResult(getNode("''.a")));
assertFalse(NodeUtil.isNumericResult(getNode("a.b")));
assertFalse(NodeUtil.isNumericResult(getNode("a.b()")));
assertFalse(NodeUtil.isNumericResult(getNode("a().b()")));
assertFalse(NodeUtil.isNumericResult(getNode("new a()")));
// Definitely not numeric
assertFalse(NodeUtil.isNumericResult(getNode("([1,2])")));
assertFalse(NodeUtil.isNumericResult(getNode("({a:1})")));
// Recurse into the expression when necessary.
assertTrue(NodeUtil.isNumericResult(getNode("1 && 2")));
assertTrue(NodeUtil.isNumericResult(getNode("1 || 2")));
assertTrue(NodeUtil.isNumericResult(getNode("a ? 2 : 3")));
assertTrue(NodeUtil.isNumericResult(getNode("a,1")));
assertTrue(NodeUtil.isNumericResult(getNode("a=1")));
}
public void testIsBooleanResult() {
assertFalse(NodeUtil.isBooleanResult(getNode("1")));
assertTrue(NodeUtil.isBooleanResult(getNode("true")));
assertFalse(NodeUtil.isBooleanResult(getNode("+true")));
assertFalse(NodeUtil.isBooleanResult(getNode("+1")));
assertFalse(NodeUtil.isBooleanResult(getNode("-1")));
assertFalse(NodeUtil.isBooleanResult(getNode("-Infinity")));
assertFalse(NodeUtil.isBooleanResult(getNode("Infinity")));
assertFalse(NodeUtil.isBooleanResult(getNode("NaN")));
assertFalse(NodeUtil.isBooleanResult(getNode("undefined")));
assertFalse(NodeUtil.isBooleanResult(getNode("void 0")));
assertFalse(NodeUtil.isBooleanResult(getNode("a << b")));
assertFalse(NodeUtil.isBooleanResult(getNode("a >> b")));
assertFalse(NodeUtil.isBooleanResult(getNode("a >>> b")));
assertTrue(NodeUtil.isBooleanResult(getNode("a == b")));
assertTrue(NodeUtil.isBooleanResult(getNode("a != b")));
assertTrue(NodeUtil.isBooleanResult(getNode("a === b")));
assertTrue(NodeUtil.isBooleanResult(getNode("a !== b")));
assertTrue(NodeUtil.isBooleanResult(getNode("a < b")));
assertTrue(NodeUtil.isBooleanResult(getNode("a > b")));
assertTrue(NodeUtil.isBooleanResult(getNode("a <= b")));
assertTrue(NodeUtil.isBooleanResult(getNode("a >= b")));
assertTrue(NodeUtil.isBooleanResult(getNode("a in b")));
assertTrue(NodeUtil.isBooleanResult(getNode("a instanceof b")));
assertFalse(NodeUtil.isBooleanResult(getNode("'a'")));
assertFalse(NodeUtil.isBooleanResult(getNode("'a'+b")));
assertFalse(NodeUtil.isBooleanResult(getNode("a+'b'")));
assertFalse(NodeUtil.isBooleanResult(getNode("a+b")));
assertFalse(NodeUtil.isBooleanResult(getNode("a()")));
assertFalse(NodeUtil.isBooleanResult(getNode("''.a")));
assertFalse(NodeUtil.isBooleanResult(getNode("a.b")));
assertFalse(NodeUtil.isBooleanResult(getNode("a.b()")));
assertFalse(NodeUtil.isBooleanResult(getNode("a().b()")));
assertFalse(NodeUtil.isBooleanResult(getNode("new a()")));
assertTrue(NodeUtil.isBooleanResult(getNode("delete a")));
// Definitely not boolean
assertFalse(NodeUtil.isBooleanResult(getNode("([true,false])")));
assertFalse(NodeUtil.isBooleanResult(getNode("({a:true})")));
// These are boolean but aren't handled yet, "false" here means "unknown".
assertTrue(NodeUtil.isBooleanResult(getNode("true && false")));
assertTrue(NodeUtil.isBooleanResult(getNode("true || false")));
assertTrue(NodeUtil.isBooleanResult(getNode("a ? true : false")));
assertTrue(NodeUtil.isBooleanResult(getNode("a,true")));
assertTrue(NodeUtil.isBooleanResult(getNode("a=true")));
assertFalse(NodeUtil.isBooleanResult(getNode("a=1")));
}
public void testMayBeString() {
assertFalse(NodeUtil.mayBeString(getNode("1")));
assertFalse(NodeUtil.mayBeString(getNode("true")));
assertFalse(NodeUtil.mayBeString(getNode("+true")));
assertFalse(NodeUtil.mayBeString(getNode("+1")));
assertFalse(NodeUtil.mayBeString(getNode("-1")));
assertFalse(NodeUtil.mayBeString(getNode("-Infinity")));
assertFalse(NodeUtil.mayBeString(getNode("Infinity")));
assertFalse(NodeUtil.mayBeString(getNode("NaN")));
assertFalse(NodeUtil.mayBeString(getNode("undefined")));
assertFalse(NodeUtil.mayBeString(getNode("void 0")));
assertFalse(NodeUtil.mayBeString(getNode("null")));
assertFalse(NodeUtil.mayBeString(getNode("a << b")));
assertFalse(NodeUtil.mayBeString(getNode("a >> b")));
assertFalse(NodeUtil.mayBeString(getNode("a >>> b")));
assertFalse(NodeUtil.mayBeString(getNode("a == b")));
assertFalse(NodeUtil.mayBeString(getNode("a != b")));
assertFalse(NodeUtil.mayBeString(getNode("a === b")));
assertFalse(NodeUtil.mayBeString(getNode("a !== b")));
assertFalse(NodeUtil.mayBeString(getNode("a < b")));
assertFalse(NodeUtil.mayBeString(getNode("a > b")));
assertFalse(NodeUtil.mayBeString(getNode("a <= b")));
assertFalse(NodeUtil.mayBeString(getNode("a >= b")));
assertFalse(NodeUtil.mayBeString(getNode("a in b")));
assertFalse(NodeUtil.mayBeString(getNode("a instanceof b")));
assertTrue(NodeUtil.mayBeString(getNode("'a'")));
assertTrue(NodeUtil.mayBeString(getNode("'a'+b")));
assertTrue(NodeUtil.mayBeString(getNode("a+'b'")));
assertTrue(NodeUtil.mayBeString(getNode("a+b")));
assertTrue(NodeUtil.mayBeString(getNode("a()")));
assertTrue(NodeUtil.mayBeString(getNode("''.a")));
assertTrue(NodeUtil.mayBeString(getNode("a.b")));
assertTrue(NodeUtil.mayBeString(getNode("a.b()")));
assertTrue(NodeUtil.mayBeString(getNode("a().b()")));
assertTrue(NodeUtil.mayBeString(getNode("new a()")));
// These can't be strings but they aren't handled yet.
assertFalse(NodeUtil.mayBeString(getNode("1 && 2")));
assertFalse(NodeUtil.mayBeString(getNode("1 || 2")));
assertFalse(NodeUtil.mayBeString(getNode("1 ? 2 : 3")));
assertFalse(NodeUtil.mayBeString(getNode("1,2")));
assertFalse(NodeUtil.mayBeString(getNode("a=1")));
assertFalse(NodeUtil.mayBeString(getNode("1+1")));
assertFalse(NodeUtil.mayBeString(getNode("true+true")));
assertFalse(NodeUtil.mayBeString(getNode("null+null")));
assertFalse(NodeUtil.mayBeString(getNode("NaN+NaN")));
// These are not strings but they aren't primitives either
assertTrue(NodeUtil.mayBeString(getNode("([1,2])")));
assertTrue(NodeUtil.mayBeString(getNode("({a:1})")));
assertTrue(NodeUtil.mayBeString(getNode("({}+1)")));
assertTrue(NodeUtil.mayBeString(getNode("(1+{})")));
assertTrue(NodeUtil.mayBeString(getNode("([]+1)")));
assertTrue(NodeUtil.mayBeString(getNode("(1+[])")));
}
public void testValidNames() {
assertTrue(NodeUtil.isValidPropertyName("a"));
assertTrue(NodeUtil.isValidPropertyName("a3"));
assertFalse(NodeUtil.isValidPropertyName("3a"));
assertFalse(NodeUtil.isValidPropertyName("a."));
assertFalse(NodeUtil.isValidPropertyName(".a"));
assertFalse(NodeUtil.isValidPropertyName("a.b"));
assertFalse(NodeUtil.isValidPropertyName("true"));
assertFalse(NodeUtil.isValidPropertyName("a.true"));
assertFalse(NodeUtil.isValidPropertyName("a..b"));
assertTrue(NodeUtil.isValidSimpleName("a"));
assertTrue(NodeUtil.isValidSimpleName("a3"));
assertFalse(NodeUtil.isValidSimpleName("3a"));
assertFalse(NodeUtil.isValidSimpleName("a."));
assertFalse(NodeUtil.isValidSimpleName(".a"));
assertFalse(NodeUtil.isValidSimpleName("a.b"));
assertFalse(NodeUtil.isValidSimpleName("true"));
assertFalse(NodeUtil.isValidSimpleName("a.true"));
assertFalse(NodeUtil.isValidSimpleName("a..b"));
assertTrue(NodeUtil.isValidQualifiedName("a"));
assertTrue(NodeUtil.isValidQualifiedName("a3"));
assertFalse(NodeUtil.isValidQualifiedName("3a"));
assertFalse(NodeUtil.isValidQualifiedName("a."));
assertFalse(NodeUtil.isValidQualifiedName(".a"));
assertTrue(NodeUtil.isValidQualifiedName("a.b"));
assertFalse(NodeUtil.isValidQualifiedName("true"));
assertFalse(NodeUtil.isValidQualifiedName("a.true"));
assertFalse(NodeUtil.isValidQualifiedName("a..b"));
}
public void testGetNearestFunctionName() {
testFunctionName("(function() {})()", null);
testFunctionName("function a() {}", "a");
testFunctionName("(function a() {})", "a");
testFunctionName("({a:function () {}})", "a");
testFunctionName("({get a() {}})", "a");
testFunctionName("({set a(b) {}})", "a");
testFunctionName("({set a(b) {}})", "a");
testFunctionName("({1:function () {}})", "1");
testFunctionName("var a = function a() {}", "a");
testFunctionName("var a;a = function a() {}", "a");
testFunctionName("var o;o.a = function a() {}", "o.a");
testFunctionName("this.a = function a() {}", "this.a");
}
public void testGetBestLValue() {
assertEquals("x", getFunctionLValue("var x = function() {};"));
assertEquals("x", getFunctionLValue("x = function() {};"));
assertEquals("x", getFunctionLValue("function x() {};"));
assertEquals("x", getFunctionLValue("var x = y ? z : function() {};"));
assertEquals("x", getFunctionLValue("var x = y ? function() {} : z;"));
assertEquals("x", getFunctionLValue("var x = y && function() {};"));
assertEquals("x", getFunctionLValue("var x = y || function() {};"));
assertEquals("x", getFunctionLValue("var x = (y, function() {});"));
}
public void testIsNaN() {
assertEquals(true, NodeUtil.isNaN(getNode("NaN")));
assertEquals(false, NodeUtil.isNaN(getNode("Infinity")));
assertEquals(false, NodeUtil.isNaN(getNode("x")));
assertEquals(true, NodeUtil.isNaN(getNode("0/0")));
assertEquals(false, NodeUtil.isNaN(getNode("1/0")));
assertEquals(false, NodeUtil.isNaN(getNode("0/1")));
assertEquals(false, NodeUtil.isNaN(IR.number(0.0)));
}
public void testIsExecutedExactlyOnce() {
assertEquals(true, executedOnceTestCase("x;"));
assertEquals(true, executedOnceTestCase("x && 1;"));
assertEquals(false, executedOnceTestCase("1 && x;"));
assertEquals(false, executedOnceTestCase("1 && (x && 1);"));
assertEquals(true, executedOnceTestCase("x || 1;"));
assertEquals(false, executedOnceTestCase("1 || x;"));
assertEquals(false, executedOnceTestCase("1 && (x || 1);"));
assertEquals(true, executedOnceTestCase("x ? 1 : 2;"));
assertEquals(false, executedOnceTestCase("1 ? 1 : x;"));
assertEquals(false, executedOnceTestCase("1 ? x : 2;"));
assertEquals(false, executedOnceTestCase("1 && (x ? 1 : 2);"));
assertEquals(true, executedOnceTestCase("if (x) {}"));
assertEquals(false, executedOnceTestCase("if (true) {x;}"));
assertEquals(false, executedOnceTestCase("if (true) {} else {x;}"));
assertEquals(false, executedOnceTestCase("if (1) { if (x) {} }"));
assertEquals(true, executedOnceTestCase("for(x;;){}"));
assertEquals(false, executedOnceTestCase("for(;x;){}"));
assertEquals(false, executedOnceTestCase("for(;;x){}"));
assertEquals(false, executedOnceTestCase("for(;;){x;}"));
assertEquals(false, executedOnceTestCase("if (1) { for(x;;){} }"));
assertEquals(false, executedOnceTestCase("for(x in {}){}"));
assertEquals(true, executedOnceTestCase("for({}.a in x){}"));
assertEquals(false, executedOnceTestCase("for({}.a in {}){x}"));
assertEquals(false, executedOnceTestCase("if (1) { for(x in {}){} }"));
assertEquals(true, executedOnceTestCase("switch (x) {}"));
assertEquals(false, executedOnceTestCase("switch (1) {case x:}"));
assertEquals(false, executedOnceTestCase("switch (1) {case 1: x}"));
assertEquals(false, executedOnceTestCase("switch (1) {default: x}"));
assertEquals(false, executedOnceTestCase("if (1) { switch (x) {} }"));
assertEquals(false, executedOnceTestCase("while (x) {}"));
assertEquals(false, executedOnceTestCase("while (1) {x}"));
assertEquals(false, executedOnceTestCase("do {} while (x)"));
assertEquals(false, executedOnceTestCase("do {x} while (1)"));
assertEquals(false, executedOnceTestCase("try {x} catch (e) {}"));
assertEquals(false, executedOnceTestCase("try {} catch (e) {x}"));
assertEquals(true, executedOnceTestCase("try {} finally {x}"));
assertEquals(false, executedOnceTestCase("if (1) { try {} finally {x} }"));
}
private boolean executedOnceTestCase(String code) {
Node ast = parse(code);
Node nameNode = getNameNode(ast, "x");
return NodeUtil.isExecutedExactlyOnce(nameNode);
}
private String getFunctionLValue(String js) {
Node lVal = NodeUtil.getBestLValue(getFunctionNode(js));
return lVal == null ? null : lVal.getString();
}
static void testFunctionName(String js, String expected) {
assertEquals(
expected,
NodeUtil.getNearestFunctionName(getFunctionNode(js)));
}
static Node getFunctionNode(String js) {
Node root = parse(js);
return getFunctionNode(root);
}
static Node getFunctionNode(Node n) {
if (n.isFunction()) {
return n;
}
for (Node c : n.children()) {
Node result = getFunctionNode(c);
if (result != null) {
return result;
}
}
return null;
}
static Node getNameNode(Node n, String name) {
if (n.isName() && n.getString().equals(name)) {
return n;
}
for (Node c : n.children()) {
Node result = getNameNode(c, name);
if (result != null) {
return result;
}
}
return null;
}
}
| |
// Copyright 2010 Google Inc. All Rights Reserved.
package com.google.devtools.simple.runtime.components.android;
import com.google.devtools.simple.common.ComponentCategory;
import com.google.devtools.simple.common.PropertyCategory;
import com.google.devtools.simple.common.YaVersion;
import com.google.devtools.simple.runtime.annotations.DesignerComponent;
import com.google.devtools.simple.runtime.annotations.DesignerProperty;
import com.google.devtools.simple.runtime.annotations.SimpleEvent;
import com.google.devtools.simple.runtime.annotations.SimpleFunction;
import com.google.devtools.simple.runtime.annotations.SimpleObject;
import com.google.devtools.simple.runtime.annotations.SimpleProperty;
import com.google.devtools.simple.runtime.events.EventDispatcher;
import android.os.Handler;
/**
* A component that provides a high-level interface to a light sensor on a LEGO
* MINDSTORMS NXT robot.
*
*/
@DesignerComponent(version = YaVersion.NXT_LIGHTSENSOR_COMPONENT_VERSION,
description = "A component that provides a high-level interface to a light sensor on a " +
"LEGO MINDSTORMS NXT robot.",
category = ComponentCategory.LEGOMINDSTORMS,
nonVisible = true,
iconName = "images/legoMindstormsNxt.png")
@SimpleObject
public class NxtLightSensor extends LegoMindstormsNxtSensor implements Deleteable {
private enum State { UNKNOWN, BELOW_RANGE, WITHIN_RANGE, ABOVE_RANGE }
private static final String DEFAULT_SENSOR_PORT = "3";
private static final int DEFAULT_BOTTOM_OF_RANGE = 256;
private static final int DEFAULT_TOP_OF_RANGE = 767;
private Handler handler;
private final Runnable sensorReader;
private State previousState;
private int bottomOfRange;
private int topOfRange;
private boolean belowRangeEventEnabled;
private boolean withinRangeEventEnabled;
private boolean aboveRangeEventEnabled;
private boolean generateLight;
/**
* Creates a new NxtLightSensor component.
*/
public NxtLightSensor(ComponentContainer container) {
super(container, "NxtLightSensor");
handler = new Handler();
previousState = State.UNKNOWN;
sensorReader = new Runnable() {
public void run() {
if (bluetooth != null && bluetooth.IsConnected()) {
SensorValue<Integer> sensorValue = getLightValue("");
if (sensorValue.valid) {
State currentState;
if (sensorValue.value < bottomOfRange) {
currentState = State.BELOW_RANGE;
} else if (sensorValue.value > topOfRange) {
currentState = State.ABOVE_RANGE;
} else {
currentState = State.WITHIN_RANGE;
}
if (currentState != previousState) {
if (currentState == State.BELOW_RANGE && belowRangeEventEnabled) {
BelowRange();
}
if (currentState == State.WITHIN_RANGE && withinRangeEventEnabled) {
WithinRange();
}
if (currentState == State.ABOVE_RANGE && aboveRangeEventEnabled) {
AboveRange();
}
}
previousState = currentState;
}
}
if (isHandlerNeeded()) {
handler.post(sensorReader);
}
}
};
SensorPort(DEFAULT_SENSOR_PORT);
BottomOfRange(DEFAULT_BOTTOM_OF_RANGE);
TopOfRange(DEFAULT_TOP_OF_RANGE);
BelowRangeEventEnabled(false);
WithinRangeEventEnabled(false);
AboveRangeEventEnabled(false);
GenerateLight(false);
}
@Override
protected void initializeSensor(String functionName) {
setInputMode(functionName, port,
generateLight ? SENSOR_TYPE_LIGHT_ACTIVE : SENSOR_TYPE_LIGHT_INACTIVE,
SENSOR_MODE_PCTFULLSCALEMODE);
}
/**
* Specifies the sensor port that the sensor is connected to.
*/
@DesignerProperty(editorType = DesignerProperty.PROPERTY_TYPE_LEGO_NXT_SENSOR_PORT,
defaultValue = DEFAULT_SENSOR_PORT)
@SimpleProperty(userVisible = false)
public void SensorPort(String sensorPortLetter) {
setSensorPort(sensorPortLetter);
}
/**
* Returns whether the light sensor should generate light.
*/
@SimpleProperty(description = "Whether the light sensor should generate light.",
category = PropertyCategory.BEHAVIOR)
public boolean GenerateLight() {
return generateLight;
}
/**
* Specifies whether the light sensor should generate light.
*/
@DesignerProperty(editorType = DesignerProperty.PROPERTY_TYPE_BOOLEAN,
defaultValue = "False")
@SimpleProperty
public void GenerateLight(boolean generateLight) {
this.generateLight = generateLight;
if (bluetooth != null && bluetooth.IsConnected()) {
initializeSensor("GenerateLight");
}
}
@SimpleFunction(description = "Returns the current light level as a value between 0 and 1023, " +
"or -1 if the light level can not be read.")
public int GetLightLevel() {
String functionName = "GetLightLevel";
if (!checkBluetooth(functionName)) {
return -1;
}
SensorValue<Integer> sensorValue = getLightValue(functionName);
if (sensorValue.valid) {
return sensorValue.value;
}
// invalid response
return -1;
}
private SensorValue<Integer> getLightValue(String functionName) {
byte[] returnPackage = getInputValues(functionName, port);
if (returnPackage != null) {
boolean valid = getBooleanValueFromBytes(returnPackage, 4);
if (valid) {
int normalizedValue = getUWORDValueFromBytes(returnPackage, 10);
return new SensorValue<Integer>(true, normalizedValue);
}
}
// invalid response
return new SensorValue<Integer>(false, null);
}
/**
* Returns the bottom of the range used for the BelowRange, WithinRange,
* and AboveRange events.
*/
@SimpleProperty(description = "The bottom of the range used for the BelowRange, WithinRange," +
" and AboveRange events.",
category = PropertyCategory.BEHAVIOR)
public int BottomOfRange() {
return bottomOfRange;
}
/**
* Specifies the bottom of the range used for the BelowRange, WithinRange,
* and AboveRange events.
*/
@DesignerProperty(editorType = DesignerProperty.PROPERTY_TYPE_INTEGER,
defaultValue = "" + DEFAULT_BOTTOM_OF_RANGE)
@SimpleProperty
public void BottomOfRange(int bottomOfRange) {
this.bottomOfRange = bottomOfRange;
previousState = State.UNKNOWN;
}
/**
* Returns the top of the range used for the BelowRange, WithinRange, and
* AboveRange events.
*/
@SimpleProperty(description = "The top of the range used for the BelowRange, WithinRange, and" +
" AboveRange events.",
category = PropertyCategory.BEHAVIOR)
public int TopOfRange() {
return topOfRange;
}
/**
* Specifies the top of the range used for the BelowRange, WithinRange, and
* AboveRange events.
*/
@DesignerProperty(editorType = DesignerProperty.PROPERTY_TYPE_INTEGER,
defaultValue = "" + DEFAULT_TOP_OF_RANGE)
@SimpleProperty
public void TopOfRange(int topOfRange) {
this.topOfRange = topOfRange;
previousState = State.UNKNOWN;
}
/**
* Returns whether the BelowRange event should fire when the light level
* goes below the BottomOfRange.
*/
@SimpleProperty(description = "Whether the BelowRange event should fire when the light level" +
" goes below the BottomOfRange.",
category = PropertyCategory.BEHAVIOR)
public boolean BelowRangeEventEnabled() {
return belowRangeEventEnabled;
}
/**
* Specifies whether the BelowRange event should fire when the light level
* goes below the BottomOfRange.
*/
@DesignerProperty(editorType = DesignerProperty.PROPERTY_TYPE_BOOLEAN, defaultValue = "False")
@SimpleProperty
public void BelowRangeEventEnabled(boolean enabled) {
boolean handlerWasNeeded = isHandlerNeeded();
belowRangeEventEnabled = enabled;
boolean handlerIsNeeded = isHandlerNeeded();
if (handlerWasNeeded && !handlerIsNeeded) {
handler.removeCallbacks(sensorReader);
}
if (!handlerWasNeeded && handlerIsNeeded) {
previousState = State.UNKNOWN;
handler.post(sensorReader);
}
}
@SimpleEvent(description = "Light level has gone below the range.")
public void BelowRange() {
EventDispatcher.dispatchEvent(this, "BelowRange");
}
/**
* Returns whether the WithinRange event should fire when the light level
* goes between the BottomOfRange and the TopOfRange.
*/
@SimpleProperty(description = "Whether the WithinRange event should fire when the light level" +
" goes between the BottomOfRange and the TopOfRange.",
category = PropertyCategory.BEHAVIOR)
public boolean WithinRangeEventEnabled() {
return withinRangeEventEnabled;
}
/**
* Specifies whether the WithinRange event should fire when the light level
* goes between the BottomOfRange and the TopOfRange.
*/
@DesignerProperty(editorType = DesignerProperty.PROPERTY_TYPE_BOOLEAN, defaultValue = "False")
@SimpleProperty
public void WithinRangeEventEnabled(boolean enabled) {
boolean handlerWasNeeded = isHandlerNeeded();
withinRangeEventEnabled = enabled;
boolean handlerIsNeeded = isHandlerNeeded();
if (handlerWasNeeded && !handlerIsNeeded) {
handler.removeCallbacks(sensorReader);
}
if (!handlerWasNeeded && handlerIsNeeded) {
previousState = State.UNKNOWN;
handler.post(sensorReader);
}
}
@SimpleEvent(description = "Light level has gone within the range.")
public void WithinRange() {
EventDispatcher.dispatchEvent(this, "WithinRange");
}
/**
* Returns whether the AboveRange event should fire when the light level
* goes above the TopOfRange.
*/
@SimpleProperty(description = "Whether the AboveRange event should fire when the light level" +
" goes above the TopOfRange.",
category = PropertyCategory.BEHAVIOR)
public boolean AboveRangeEventEnabled() {
return aboveRangeEventEnabled;
}
/**
* Specifies whether the AboveRange event should fire when the light level
* goes above the TopOfRange.
*/
@DesignerProperty(editorType = DesignerProperty.PROPERTY_TYPE_BOOLEAN, defaultValue = "False")
@SimpleProperty
public void AboveRangeEventEnabled(boolean enabled) {
boolean handlerWasNeeded = isHandlerNeeded();
aboveRangeEventEnabled = enabled;
boolean handlerIsNeeded = isHandlerNeeded();
if (handlerWasNeeded && !handlerIsNeeded) {
handler.removeCallbacks(sensorReader);
}
if (!handlerWasNeeded && handlerIsNeeded) {
previousState = State.UNKNOWN;
handler.post(sensorReader);
}
}
@SimpleEvent(description = "Light level has gone above the range.")
public void AboveRange() {
EventDispatcher.dispatchEvent(this, "AboveRange");
}
private boolean isHandlerNeeded() {
return belowRangeEventEnabled || withinRangeEventEnabled || aboveRangeEventEnabled;
}
// Deleteable implementation
@Override
public void onDelete() {
handler.removeCallbacks(sensorReader);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.rel.stream;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelOptRule;
import org.apache.calcite.plan.RelOptRuleCall;
import org.apache.calcite.plan.RelOptTable;
import org.apache.calcite.prepare.RelOptTableImpl;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.core.Aggregate;
import org.apache.calcite.rel.core.Filter;
import org.apache.calcite.rel.core.Join;
import org.apache.calcite.rel.core.Project;
import org.apache.calcite.rel.core.RelFactories;
import org.apache.calcite.rel.core.Sort;
import org.apache.calcite.rel.core.TableScan;
import org.apache.calcite.rel.core.Union;
import org.apache.calcite.rel.core.Values;
import org.apache.calcite.rel.logical.LogicalAggregate;
import org.apache.calcite.rel.logical.LogicalFilter;
import org.apache.calcite.rel.logical.LogicalJoin;
import org.apache.calcite.rel.logical.LogicalProject;
import org.apache.calcite.rel.logical.LogicalSort;
import org.apache.calcite.rel.logical.LogicalTableScan;
import org.apache.calcite.rel.logical.LogicalUnion;
import org.apache.calcite.schema.StreamableTable;
import org.apache.calcite.schema.Table;
import org.apache.calcite.tools.RelBuilder;
import org.apache.calcite.tools.RelBuilderFactory;
import org.apache.calcite.util.Util;
import com.google.common.collect.ImmutableList;
import java.util.ArrayList;
import java.util.List;
/**
* Rules and relational operators for streaming relational expressions.
*/
public class StreamRules {
private StreamRules() {}
public static final ImmutableList<RelOptRule> RULES =
ImmutableList.of(
new DeltaProjectTransposeRule(RelFactories.LOGICAL_BUILDER),
new DeltaFilterTransposeRule(RelFactories.LOGICAL_BUILDER),
new DeltaAggregateTransposeRule(RelFactories.LOGICAL_BUILDER),
new DeltaSortTransposeRule(RelFactories.LOGICAL_BUILDER),
new DeltaUnionTransposeRule(RelFactories.LOGICAL_BUILDER),
new DeltaJoinTransposeRule(RelFactories.LOGICAL_BUILDER),
new DeltaTableScanRule(RelFactories.LOGICAL_BUILDER),
new DeltaTableScanToEmptyRule(RelFactories.LOGICAL_BUILDER));
/** Planner rule that pushes a {@link Delta} through a {@link Project}. */
public static class DeltaProjectTransposeRule extends RelOptRule {
/**
* Creates a DeltaProjectTransposeRule.
*
* @param relBuilderFactory Builder for relational expressions
*/
public DeltaProjectTransposeRule(RelBuilderFactory relBuilderFactory) {
super(
operand(Delta.class,
operand(Project.class, any())),
relBuilderFactory, null);
}
@Override public void onMatch(RelOptRuleCall call) {
final Delta delta = call.rel(0);
Util.discard(delta);
final Project project = call.rel(1);
final LogicalDelta newDelta = LogicalDelta.create(project.getInput());
final LogicalProject newProject =
LogicalProject.create(newDelta, project.getProjects(),
project.getRowType().getFieldNames());
call.transformTo(newProject);
}
}
/** Planner rule that pushes a {@link Delta} through a {@link Filter}. */
public static class DeltaFilterTransposeRule extends RelOptRule {
/**
* Creates a DeltaFilterTransposeRule.
*
* @param relBuilderFactory Builder for relational expressions
*/
public DeltaFilterTransposeRule(RelBuilderFactory relBuilderFactory) {
super(
operand(Delta.class,
operand(Filter.class, any())),
relBuilderFactory, null);
}
@Override public void onMatch(RelOptRuleCall call) {
final Delta delta = call.rel(0);
Util.discard(delta);
final Filter filter = call.rel(1);
final LogicalDelta newDelta = LogicalDelta.create(filter.getInput());
final LogicalFilter newFilter =
LogicalFilter.create(newDelta, filter.getCondition());
call.transformTo(newFilter);
}
}
/** Planner rule that pushes a {@link Delta} through an {@link Aggregate}. */
public static class DeltaAggregateTransposeRule extends RelOptRule {
/**
* Creates a DeltaAggregateTransposeRule.
*
* @param relBuilderFactory Builder for relational expressions
*/
public DeltaAggregateTransposeRule(RelBuilderFactory relBuilderFactory) {
super(
operand(Delta.class,
operandJ(Aggregate.class, null, Aggregate::isSimple,
any())),
relBuilderFactory, null);
}
@Override public void onMatch(RelOptRuleCall call) {
final Delta delta = call.rel(0);
Util.discard(delta);
final Aggregate aggregate = call.rel(1);
final LogicalDelta newDelta =
LogicalDelta.create(aggregate.getInput());
final LogicalAggregate newAggregate =
LogicalAggregate.create(newDelta, aggregate.getGroupSet(),
aggregate.groupSets, aggregate.getAggCallList());
call.transformTo(newAggregate);
}
}
/** Planner rule that pushes a {@link Delta} through an {@link Sort}. */
public static class DeltaSortTransposeRule extends RelOptRule {
/**
* Creates a DeltaSortTransposeRule.
*
* @param relBuilderFactory Builder for relational expressions
*/
public DeltaSortTransposeRule(RelBuilderFactory relBuilderFactory) {
super(
operand(Delta.class,
operand(Sort.class, any())),
relBuilderFactory, null);
}
@Override public void onMatch(RelOptRuleCall call) {
final Delta delta = call.rel(0);
Util.discard(delta);
final Sort sort = call.rel(1);
final LogicalDelta newDelta =
LogicalDelta.create(sort.getInput());
final LogicalSort newSort =
LogicalSort.create(newDelta, sort.collation, sort.offset, sort.fetch);
call.transformTo(newSort);
}
}
/** Planner rule that pushes a {@link Delta} through an {@link Union}. */
public static class DeltaUnionTransposeRule extends RelOptRule {
/**
* Creates a DeltaUnionTransposeRule.
*
* @param relBuilderFactory Builder for relational expressions
*/
public DeltaUnionTransposeRule(RelBuilderFactory relBuilderFactory) {
super(
operand(Delta.class,
operand(Union.class, any())),
relBuilderFactory, null);
}
@Override public void onMatch(RelOptRuleCall call) {
final Delta delta = call.rel(0);
Util.discard(delta);
final Union union = call.rel(1);
final List<RelNode> newInputs = new ArrayList<>();
for (RelNode input : union.getInputs()) {
final LogicalDelta newDelta =
LogicalDelta.create(input);
newInputs.add(newDelta);
}
final LogicalUnion newUnion = LogicalUnion.create(newInputs, union.all);
call.transformTo(newUnion);
}
}
/** Planner rule that pushes a {@link Delta} into a {@link TableScan} of a
* {@link org.apache.calcite.schema.StreamableTable}.
*
* <p>Very likely, the stream was only represented as a table for uniformity
* with the other relations in the system. The Delta disappears and the stream
* can be implemented directly. */
public static class DeltaTableScanRule extends RelOptRule {
/**
* Creates a DeltaTableScanRule.
*
* @param relBuilderFactory Builder for relational expressions
*/
public DeltaTableScanRule(RelBuilderFactory relBuilderFactory) {
super(
operand(Delta.class,
operand(TableScan.class, none())),
relBuilderFactory, null);
}
@Override public void onMatch(RelOptRuleCall call) {
final Delta delta = call.rel(0);
final TableScan scan = call.rel(1);
final RelOptCluster cluster = delta.getCluster();
final RelOptTable relOptTable = scan.getTable();
final StreamableTable streamableTable =
relOptTable.unwrap(StreamableTable.class);
if (streamableTable != null) {
final Table table1 = streamableTable.stream();
final RelOptTable relOptTable2 =
RelOptTableImpl.create(relOptTable.getRelOptSchema(),
relOptTable.getRowType(), table1,
ImmutableList.<String>builder()
.addAll(relOptTable.getQualifiedName())
.add("(STREAM)").build());
final LogicalTableScan newScan =
LogicalTableScan.create(cluster, relOptTable2);
call.transformTo(newScan);
}
}
}
/**
* Planner rule that converts {@link Delta} over a {@link TableScan} of
* a table other than {@link org.apache.calcite.schema.StreamableTable} to
* an empty {@link Values}.
*/
public static class DeltaTableScanToEmptyRule extends RelOptRule {
/**
* Creates a DeltaTableScanToEmptyRule.
*
* @param relBuilderFactory Builder for relational expressions
*/
public DeltaTableScanToEmptyRule(RelBuilderFactory relBuilderFactory) {
super(
operand(Delta.class,
operand(TableScan.class, none())),
relBuilderFactory, null);
}
@Override public void onMatch(RelOptRuleCall call) {
final Delta delta = call.rel(0);
final TableScan scan = call.rel(1);
final RelOptTable relOptTable = scan.getTable();
final StreamableTable streamableTable =
relOptTable.unwrap(StreamableTable.class);
final RelBuilder builder = call.builder();
if (streamableTable == null) {
call.transformTo(builder.values(delta.getRowType()).build());
}
}
}
/**
* Planner rule that pushes a {@link Delta} through a {@link Join}.
*
* <p>We apply something analogous to the
* <a href="https://en.wikipedia.org/wiki/Product_rule">product rule of
* differential calculus</a> to implement the transpose:
*
* <blockquote><code>stream(x join y) →
* x join stream(y) union all stream(x) join y</code></blockquote>
*/
public static class DeltaJoinTransposeRule extends RelOptRule {
@Deprecated // to be removed before 2.0
public DeltaJoinTransposeRule() {
this(RelFactories.LOGICAL_BUILDER);
}
/**
* Creates a DeltaJoinTransposeRule.
*
* @param relBuilderFactory Builder for relational expressions
*/
public DeltaJoinTransposeRule(RelBuilderFactory relBuilderFactory) {
super(
operand(Delta.class,
operand(Join.class, any())),
relBuilderFactory, null);
}
public void onMatch(RelOptRuleCall call) {
final Delta delta = call.rel(0);
Util.discard(delta);
final Join join = call.rel(1);
final RelNode left = join.getLeft();
final RelNode right = join.getRight();
final LogicalDelta rightWithDelta = LogicalDelta.create(right);
final LogicalJoin joinL = LogicalJoin.create(left, rightWithDelta,
join.getCondition(), join.getVariablesSet(), join.getJoinType(),
join.isSemiJoinDone(),
ImmutableList.copyOf(join.getSystemFieldList()));
final LogicalDelta leftWithDelta = LogicalDelta.create(left);
final LogicalJoin joinR = LogicalJoin.create(leftWithDelta, right,
join.getCondition(), join.getVariablesSet(), join.getJoinType(),
join.isSemiJoinDone(),
ImmutableList.copyOf(join.getSystemFieldList()));
List<RelNode> inputsToUnion = new ArrayList<>();
inputsToUnion.add(joinL);
inputsToUnion.add(joinR);
final LogicalUnion newNode = LogicalUnion.create(inputsToUnion, true);
call.transformTo(newNode);
}
}
}
// End StreamRules.java
| |
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.initialization;
import groovy.lang.GroovyObject;
import groovy.lang.GroovySystem;
import groovy.lang.MetaClass;
import groovy.lang.MetaClassRegistry;
import org.apache.commons.lang.StringUtils;
import org.gradle.internal.classanalysis.AsmConstants;
import org.gradle.internal.classloader.TransformingClassLoader;
import org.gradle.internal.classloader.VisitableURLClassLoader;
import org.gradle.internal.classpath.ClassPath;
import org.gradle.internal.reflect.PropertyAccessorType;
import org.objectweb.asm.ClassReader;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.ClassWriter;
import org.objectweb.asm.FieldVisitor;
import org.objectweb.asm.Label;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import javax.annotation.Nullable;
import java.net.URL;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* A ClassLoader that takes care of mixing-in some methods and types into various classes, for binary compatibility with older Gradle versions.
*
* <p>Mixes GroovyObject into certain types.</p>
* <p>Generates empty interfaces for certain types that have been removed, but which are baked into the bytecode generated by the Groovy compiler.</p>
*/
public class MixInLegacyTypesClassLoader extends TransformingClassLoader {
private static final Type GROOVY_OBJECT_TYPE = Type.getType(GroovyObject.class);
private static final Type META_CLASS_REGISTRY_TYPE = Type.getType(MetaClassRegistry.class);
private static final Type GROOVY_SYSTEM_TYPE = Type.getType(GroovySystem.class);
private static final Type META_CLASS_TYPE = Type.getType(MetaClass.class);
private static final Type OBJECT_TYPE = Type.getType(Object.class);
private static final Type CLASS_TYPE = Type.getType(Class.class);
private static final Type STRING_TYPE = Type.getType(String.class);
private static final String RETURN_OBJECT_FROM_OBJECT_STRING_OBJECT = Type.getMethodDescriptor(OBJECT_TYPE, OBJECT_TYPE, STRING_TYPE, OBJECT_TYPE);
private static final String RETURN_OBJECT_FROM_STRING_OBJECT = Type.getMethodDescriptor(OBJECT_TYPE, STRING_TYPE, OBJECT_TYPE);
private static final String RETURN_OBJECT_FROM_STRING = Type.getMethodDescriptor(OBJECT_TYPE, STRING_TYPE);
private static final String RETURN_OBJECT_FROM_OBJECT_STRING = Type.getMethodDescriptor(OBJECT_TYPE, OBJECT_TYPE, STRING_TYPE);
private static final String RETURN_VOID_FROM_OBJECT_STRING_OBJECT = Type.getMethodDescriptor(Type.VOID_TYPE, OBJECT_TYPE, STRING_TYPE, OBJECT_TYPE);
private static final String RETURN_VOID_FROM_STRING_OBJECT = Type.getMethodDescriptor(Type.VOID_TYPE, STRING_TYPE, OBJECT_TYPE);
private static final String RETURN_META_CLASS_REGISTRY = Type.getMethodDescriptor(META_CLASS_REGISTRY_TYPE);
private static final String RETURN_META_CLASS_FROM_CLASS = Type.getMethodDescriptor(META_CLASS_TYPE, CLASS_TYPE);
private static final String RETURN_META_CLASS = Type.getMethodDescriptor(META_CLASS_TYPE);
private static final String RETURN_CLASS = Type.getMethodDescriptor(CLASS_TYPE);
private static final String META_CLASS_FIELD = "__meta_class__";
private LegacyTypesSupport legacyTypesSupport;
static {
try {
ClassLoader.registerAsParallelCapable();
} catch (NoSuchMethodError ignore) {
// Not supported on Java 6
}
}
public MixInLegacyTypesClassLoader(ClassLoader parent, ClassPath classPath, LegacyTypesSupport legacyTypesSupport) {
super("legacy-mixin-loader", parent, classPath);
this.legacyTypesSupport = legacyTypesSupport;
}
public MixInLegacyTypesClassLoader(ClassLoader parent, Collection<URL> urls, LegacyTypesSupport legacyTypesSupport) {
super("legacy-mixin-loader", parent, urls);
this.legacyTypesSupport = legacyTypesSupport;
}
@Nullable
@Override
protected byte[] generateMissingClass(String name) {
if (!legacyTypesSupport.getSyntheticClasses().contains(name)) {
return null;
}
return legacyTypesSupport.generateSyntheticClass(name);
}
@Override
protected boolean shouldTransform(String className) {
return legacyTypesSupport.getClassesToMixInGroovyObject().contains(className) || legacyTypesSupport.getSyntheticClasses().contains(className);
}
@Override
protected byte[] transform(String className, byte[] bytes) {
ClassReader classReader = new ClassReader(bytes);
ClassWriter classWriter = new ClassWriter(0);
classReader.accept(new TransformingAdapter(classWriter), 0);
bytes = classWriter.toByteArray();
return bytes;
}
private static class TransformingAdapter extends ClassVisitor {
private static final int PUBLIC_STATIC_FINAL = Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC | Opcodes.ACC_FINAL;
private String className;
/**
* We only add getters for `public static final String` constants. This is because in
* the converted classes only contain these kinds of constants.
*/
private Map<String, String> missingStaticStringConstantGetters = new HashMap<String, String>();
private Set<String> booleanGetGetters = new HashSet<String>();
private Set<String> booleanFields = new HashSet<String>();
private Set<String> booleanIsGetters = new HashSet<String>();
TransformingAdapter(ClassVisitor cv) {
super(AsmConstants.ASM_LEVEL, cv);
}
@Override
public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) {
this.className = name;
Set<String> interfaceNames = new LinkedHashSet<String>(Arrays.asList(interfaces));
interfaceNames.add(GROOVY_OBJECT_TYPE.getInternalName());
cv.visit(version, access, name, signature, superName, interfaceNames.toArray(new String[0]));
}
@Override
public FieldVisitor visitField(int access, String name, String desc, String signature, Object value) {
if (((access & PUBLIC_STATIC_FINAL) == PUBLIC_STATIC_FINAL) && Type.getDescriptor(String.class).equals(desc)) {
missingStaticStringConstantGetters.put("get" + name, (String) value);
}
if (((access & Opcodes.ACC_PRIVATE) > 0) && !isStatic(access) && (Type.getDescriptor(boolean.class).equals(desc))) {
booleanFields.add(name);
}
return super.visitField(access, name, desc, signature, value);
}
@Override
public MethodVisitor visitMethod(int access, String name, String desc, String signature, String[] exceptions) {
if (missingStaticStringConstantGetters.containsKey(name)) {
missingStaticStringConstantGetters.remove(name);
}
if (((access & Opcodes.ACC_PUBLIC) > 0) && !isStatic(access) && Type.getMethodDescriptor(Type.BOOLEAN_TYPE).equals(desc)) {
PropertyAccessorType accessorType = PropertyAccessorType.fromName(name);
if (accessorType != null) {
String propertyName = accessorType.propertyNameFor(name);
if (accessorType == PropertyAccessorType.IS_GETTER) {
booleanIsGetters.add(propertyName);
} else if (accessorType == PropertyAccessorType.GET_GETTER) {
booleanGetGetters.add(propertyName);
}
}
}
return super.visitMethod(access, name, desc, signature, exceptions);
}
@Override
public void visitEnd() {
addMetaClassField();
addGetMetaClass();
addSetMetaClass();
addGetProperty();
addSetProperty();
addInvokeMethod();
addStaticStringConstantGetters();
addBooleanGetGetters();
cv.visitEnd();
}
private boolean isStatic(int access) {
return (access & Opcodes.ACC_STATIC) > 0;
}
private void addMetaClassField() {
cv.visitField(Opcodes.ACC_PRIVATE, META_CLASS_FIELD, META_CLASS_TYPE.getDescriptor(), null, null);
}
private void addGetProperty() {
MethodVisitor methodVisitor = cv.visitMethod(Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC, "getProperty", RETURN_OBJECT_FROM_STRING, null, null);
methodVisitor.visitCode();
// this.getMetaClass()
methodVisitor.visitVarInsn(Opcodes.ALOAD, 0);
methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, className, "getMetaClass", RETURN_META_CLASS, false);
// getProperty(this, name)
methodVisitor.visitVarInsn(Opcodes.ALOAD, 0);
methodVisitor.visitVarInsn(Opcodes.ALOAD, 1);
methodVisitor.visitMethodInsn(Opcodes.INVOKEINTERFACE, META_CLASS_TYPE.getInternalName(), "getProperty", RETURN_OBJECT_FROM_OBJECT_STRING, true);
// return
methodVisitor.visitInsn(Opcodes.ARETURN);
methodVisitor.visitMaxs(3, 2);
methodVisitor.visitEnd();
}
private void addSetProperty() {
MethodVisitor methodVisitor = cv.visitMethod(Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC, "setProperty", RETURN_VOID_FROM_STRING_OBJECT, null, null);
methodVisitor.visitCode();
// this.getMetaClass()
methodVisitor.visitVarInsn(Opcodes.ALOAD, 0);
methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, className, "getMetaClass", RETURN_META_CLASS, false);
// setProperty(this, name, value)
methodVisitor.visitVarInsn(Opcodes.ALOAD, 0);
methodVisitor.visitVarInsn(Opcodes.ALOAD, 1);
methodVisitor.visitVarInsn(Opcodes.ALOAD, 2);
methodVisitor.visitMethodInsn(Opcodes.INVOKEINTERFACE, META_CLASS_TYPE.getInternalName(), "setProperty", RETURN_VOID_FROM_OBJECT_STRING_OBJECT, true);
methodVisitor.visitInsn(Opcodes.RETURN);
methodVisitor.visitMaxs(4, 3);
methodVisitor.visitEnd();
}
private void addInvokeMethod() {
MethodVisitor methodVisitor = cv.visitMethod(Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC, "invokeMethod", RETURN_OBJECT_FROM_STRING_OBJECT, null, null);
methodVisitor.visitCode();
// this.getMetaClass()
methodVisitor.visitVarInsn(Opcodes.ALOAD, 0);
methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, className, "getMetaClass", RETURN_META_CLASS, false);
// invokeMethod(this, name, args)
methodVisitor.visitVarInsn(Opcodes.ALOAD, 0);
methodVisitor.visitVarInsn(Opcodes.ALOAD, 1);
methodVisitor.visitVarInsn(Opcodes.ALOAD, 2);
methodVisitor.visitMethodInsn(Opcodes.INVOKEINTERFACE, META_CLASS_TYPE.getInternalName(), "invokeMethod", RETURN_OBJECT_FROM_OBJECT_STRING_OBJECT, true);
// return
methodVisitor.visitInsn(Opcodes.ARETURN);
methodVisitor.visitMaxs(4, 3);
methodVisitor.visitEnd();
}
private void addGetMetaClass() {
Label lookup = new Label();
MethodVisitor methodVisitor = cv.visitMethod(Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC, "getMetaClass", RETURN_META_CLASS, null, null);
methodVisitor.visitCode();
// if (this.metaClass != null) { return this.metaClass; }
methodVisitor.visitVarInsn(Opcodes.ALOAD, 0);
methodVisitor.visitFieldInsn(Opcodes.GETFIELD, className, META_CLASS_FIELD, META_CLASS_TYPE.getDescriptor());
methodVisitor.visitInsn(Opcodes.DUP);
methodVisitor.visitJumpInsn(Opcodes.IFNULL, lookup);
methodVisitor.visitInsn(Opcodes.ARETURN);
methodVisitor.visitLabel(lookup);
methodVisitor.visitFrame(Opcodes.F_NEW, 1, new Object[]{className}, 1, new Object[]{META_CLASS_TYPE.getInternalName()});
methodVisitor.visitVarInsn(Opcodes.ALOAD, 0); // for storing to field
// GroovySystem.getMetaClassRegistry()
methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, GROOVY_SYSTEM_TYPE.getInternalName(), "getMetaClassRegistry", RETURN_META_CLASS_REGISTRY, false);
// this.getClass()
methodVisitor.visitVarInsn(Opcodes.ALOAD, 0);
methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, OBJECT_TYPE.getInternalName(), "getClass", RETURN_CLASS, false);
// getMetaClass(..)
methodVisitor.visitMethodInsn(Opcodes.INVOKEINTERFACE, META_CLASS_REGISTRY_TYPE.getInternalName(), "getMetaClass", RETURN_META_CLASS_FROM_CLASS, true);
// this.metaClass = <value>
methodVisitor.visitFieldInsn(Opcodes.PUTFIELD, className, META_CLASS_FIELD, META_CLASS_TYPE.getDescriptor());
// return this.metaClass
methodVisitor.visitVarInsn(Opcodes.ALOAD, 0);
methodVisitor.visitFieldInsn(Opcodes.GETFIELD, className, META_CLASS_FIELD, META_CLASS_TYPE.getDescriptor());
methodVisitor.visitInsn(Opcodes.ARETURN);
methodVisitor.visitMaxs(4, 1);
methodVisitor.visitEnd();
}
private void addSetMetaClass() {
MethodVisitor methodVisitor = cv.visitMethod(Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC, "setMetaClass", Type.getMethodDescriptor(Type.VOID_TYPE, META_CLASS_TYPE), null, null);
methodVisitor.visitCode();
// this.metaClass = <value>
methodVisitor.visitVarInsn(Opcodes.ALOAD, 0);
methodVisitor.visitVarInsn(Opcodes.ALOAD, 1);
methodVisitor.visitFieldInsn(Opcodes.PUTFIELD, className, META_CLASS_FIELD, META_CLASS_TYPE.getDescriptor());
methodVisitor.visitInsn(Opcodes.RETURN);
methodVisitor.visitMaxs(2, 2);
methodVisitor.visitEnd();
}
private void addStaticStringConstantGetters() {
for (Map.Entry<String, String> constant : missingStaticStringConstantGetters.entrySet()) {
MethodVisitor mv = cv.visitMethod(Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC | Opcodes.ACC_SYNTHETIC,
constant.getKey(),
Type.getMethodDescriptor(Type.getType(String.class)), null, null);
mv.visitCode();
mv.visitLdcInsn(constant.getValue());
mv.visitInsn(Opcodes.ARETURN);
mv.visitMaxs(1, 0);
mv.visitEnd();
}
}
private void addBooleanGetGetters() {
Collection<String> accessibleBooleanFieldsWithoutGetGetters = new HashSet<String>();
accessibleBooleanFieldsWithoutGetGetters.addAll(booleanFields);
accessibleBooleanFieldsWithoutGetGetters.retainAll(booleanIsGetters);
accessibleBooleanFieldsWithoutGetGetters.removeAll(booleanGetGetters);
for (String booleanField : accessibleBooleanFieldsWithoutGetGetters) {
addBooleanGetGetter(booleanField);
}
}
private void addBooleanGetGetter(String booleanField) {
MethodVisitor mv = cv.visitMethod(Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC, "get" + StringUtils.capitalize(booleanField), "()Z", null, null);
mv.visitCode();
Label l0 = new Label();
mv.visitLabel(l0);
mv.visitVarInsn(Opcodes.ALOAD, 0);
mv.visitFieldInsn(Opcodes.GETFIELD, className, booleanField, "Z");
mv.visitInsn(Opcodes.IRETURN);
Label l1 = new Label();
mv.visitLabel(l1);
mv.visitLocalVariable("this", "L" + className + ";", null, l0, l1, 0);
mv.visitMaxs(1, 1);
mv.visitEnd();
}
}
public static class Spec extends VisitableURLClassLoader.Spec {
public Spec(String name, List<URL> classpath) {
super(name, classpath);
}
@Override
public String toString() {
return "{legacy-mixin-class-loader name:" + super.getName() + ", classpath:" + getClasspath() + "}";
}
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.psi.impl;
import com.intellij.psi.PsiElement;
import com.intellij.psi.util.PsiTreeUtil;
import com.jetbrains.python.PyNames;
import com.jetbrains.python.PyTokenTypes;
import com.jetbrains.python.PythonFQDNNames;
import com.jetbrains.python.psi.*;
import com.jetbrains.python.psi.resolve.PyResolveContext;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
/**
* TODO: Merge PythonDataflowUtil, {@link com.jetbrains.python.psi.impl.PyConstantExpressionEvaluator} and {@link com.jetbrains.python.psi.impl.PyEvaluator} and all its inheritors and improve Abstract Interpretation
*
* @author yole
*/
public class PyEvaluator {
private Set<PyExpression> myVisited = new HashSet<PyExpression>();
private Map<String, Object> myNamespace;
private boolean myEvaluateCollectionItems = true;
private boolean myEvaluateKeys = true;
public void setNamespace(Map<String, Object> namespace) {
myNamespace = namespace;
}
public void setEvaluateCollectionItems(boolean evaluateCollectionItems) {
myEvaluateCollectionItems = evaluateCollectionItems;
}
/**
* @param evaluateKeys evaluate keys for dicts or not (i.e. you wanna see string or StringLiteralExpressions as keys)
*/
public void setEvaluateKeys(final boolean evaluateKeys) {
myEvaluateKeys = evaluateKeys;
}
@Nullable
public Object evaluate(@Nullable PyExpression expr) {
if (expr == null || myVisited.contains(expr)) {
return null;
}
myVisited.add(expr);
if (expr instanceof PyParenthesizedExpression) {
return evaluate(((PyParenthesizedExpression)expr).getContainedExpression());
}
if (expr instanceof PySequenceExpression) {
return evaluateSequenceExpression((PySequenceExpression)expr);
}
final Boolean booleanExpression = getBooleanExpression(expr);
if (booleanExpression != null) { // support bool
return booleanExpression;
}
if (expr instanceof PyCallExpression) {
return evaluateCall((PyCallExpression)expr);
}
else if (expr instanceof PyReferenceExpression) {
return evaluateReferenceExpression((PyReferenceExpression)expr);
}
else if (expr instanceof PyStringLiteralExpression) {
return ((PyStringLiteralExpression)expr).getStringValue();
}
else if (expr instanceof PyBinaryExpression) {
PyBinaryExpression binaryExpr = (PyBinaryExpression)expr;
PyElementType op = binaryExpr.getOperator();
if (op == PyTokenTypes.PLUS) {
Object lhs = evaluate(binaryExpr.getLeftExpression());
Object rhs = evaluate(binaryExpr.getRightExpression());
if (lhs != null && rhs != null) {
return concatenate(lhs, rhs);
}
}
}
return null;
}
/**
* TODO: Move to PyExpression? PyUtil?
* True/False is bool literal in Py3K, but reference in Python2.
*
* @param expression expression to check
* @return true if expression is boolean
*/
@Nullable
private static Boolean getBooleanExpression(@NotNull final PyExpression expression) {
final boolean py3K = LanguageLevel.forElement(expression).isPy3K();
if ((py3K && (expression instanceof PyBoolLiteralExpression))) {
return ((PyBoolLiteralExpression)expression).getValue(); // Cool in Py2K
}
if ((!py3K && (expression instanceof PyReferenceExpression))) {
final String text = ((PyReferenceExpression)expression).getReferencedName(); // Ref in Python2
if (PyNames.TRUE.equals(text)) {
return true;
}
if (PyNames.FALSE.equals(text)) {
return false;
}
}
return null;
}
/**
* Evaluates some sequence (tuple, list)
*
* @param expr seq expression
* @return evaluated seq
*/
protected Object evaluateSequenceExpression(PySequenceExpression expr) {
PyExpression[] elements = expr.getElements();
if (expr instanceof PyDictLiteralExpression) {
Map<Object, Object> result = new HashMap<Object, Object>();
for (final PyKeyValueExpression keyValueExpression : ((PyDictLiteralExpression)expr).getElements()) {
addRecordFromDict(result, keyValueExpression.getKey(), keyValueExpression.getValue());
}
return result;
}
else {
List<Object> result = new ArrayList<Object>();
for (PyExpression element : elements) {
result.add(myEvaluateCollectionItems ? evaluate(element) : element);
}
return result;
}
}
public Object concatenate(Object lhs, Object rhs) {
if (lhs instanceof String && rhs instanceof String) {
return (String)lhs + (String)rhs;
}
if (lhs instanceof List && rhs instanceof List) {
List<Object> result = new ArrayList<Object>();
result.addAll((List)lhs);
result.addAll((List)rhs);
return result;
}
return null;
}
protected Object evaluateReferenceExpression(PyReferenceExpression expr) {
if (!expr.isQualified()) {
if (myNamespace != null) {
return myNamespace.get(expr.getReferencedName());
}
PsiElement result = expr.getReference(PyResolveContext.noImplicits()).resolve();
if (result instanceof PyTargetExpression) {
result = ((PyTargetExpression)result).findAssignedValue();
}
if (result instanceof PyExpression) {
return evaluate((PyExpression)result);
}
}
return null;
}
@Nullable
protected Object evaluateCall(PyCallExpression call) {
final PyExpression[] args = call.getArguments();
if (call.isCalleeText(PyNames.REPLACE) && args.length == 2) {
final PyExpression callee = call.getCallee();
if (!(callee instanceof PyQualifiedExpression)) return null;
final PyExpression qualifier = ((PyQualifiedExpression)callee).getQualifier();
Object result = evaluate(qualifier);
if (result instanceof String) {
Object arg1 = evaluate(args[0]);
Object arg2 = evaluate(args[1]);
if (arg1 instanceof String && arg2 instanceof String) {
return ((String)result).replace((String)arg1, (String)arg2);
}
}
}
// Support dict([("k": "v")]) syntax
if (call.isCallee(PythonFQDNNames.DICT_CLASS)) {
final Collection<PyTupleExpression> tuples = PsiTreeUtil.findChildrenOfType(call, PyTupleExpression.class);
if (!tuples.isEmpty()) {
final Map<Object, Object> result = new HashMap<Object, Object>();
for (final PyTupleExpression tuple : tuples) {
final PsiElement[] tupleElements = tuple.getChildren();
if (tupleElements.length != 2) {
return null;
}
final PyExpression key = PyUtil.as(tupleElements[0], PyExpression.class);
final PyExpression value = PyUtil.as(tupleElements[1], PyExpression.class);
if ((key != null)) {
addRecordFromDict(result, key, value);
}
}
return result;
}
}
return null;
}
/**
* Adds record for map when working with dict
*
* @param result map to return to user
* @param key dict key
* @param value dict value
*/
private void addRecordFromDict(@NotNull final Map<Object, Object> result,
@NotNull final PyExpression key,
@Nullable final PyExpression value) {
result.put(myEvaluateKeys ? evaluate(key) : key, myEvaluateCollectionItems ? evaluate(value) : value);
}
/**
* Shortcut that evaluates expression with default params and casts it to particular type (if possible)
*
* @param expression exp to evaluate
* @param resultType expected type
* @param <T> expected type
* @return value if expression is evaluated to this type, null otherwise
*/
@Nullable
public static <T> T evaluate(@Nullable final PyExpression expression, @NotNull final Class<T> resultType) {
return PyUtil.as(new PyEvaluator().evaluate(expression), resultType);
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.services.resources.admin;
import org.jboss.resteasy.annotations.cache.NoCache;
import org.jboss.resteasy.spi.NotFoundException;
import org.jboss.resteasy.spi.ResteasyProviderFactory;
import org.keycloak.common.ClientConnection;
import org.keycloak.models.AdminRoles;
import org.keycloak.models.ClientModel;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.ModelDuplicateException;
import org.keycloak.models.RealmModel;
import org.keycloak.models.RoleModel;
import org.keycloak.models.utils.ModelToRepresentation;
import org.keycloak.protocol.oidc.TokenManager;
import org.keycloak.representations.idm.RealmRepresentation;
import org.keycloak.services.ForbiddenException;
import org.keycloak.services.managers.RealmManager;
import org.keycloak.services.resources.KeycloakApplication;
import org.keycloak.services.ErrorResponse;
import org.keycloak.services.ServicesLogger;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.CacheControl;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
/**
* Top level resource for Admin REST API
*
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision: 1 $
*/
public class RealmsAdminResource {
protected static final ServicesLogger logger = ServicesLogger.ROOT_LOGGER;
protected AdminAuth auth;
protected TokenManager tokenManager;
@Context
protected KeycloakSession session;
@Context
protected KeycloakApplication keycloak;
@Context
protected ClientConnection clientConnection;
public RealmsAdminResource(AdminAuth auth, TokenManager tokenManager) {
this.auth = auth;
this.tokenManager = tokenManager;
}
public static final CacheControl noCache = new CacheControl();
static {
noCache.setNoCache(true);
}
/**
* Get accessible realms
*
* Returns a list of accessible realms. The list is filtered based on what realms the caller is allowed to view.
*
* @return
*/
@GET
@NoCache
@Produces(MediaType.APPLICATION_JSON)
public List<RealmRepresentation> getRealms() {
RealmManager realmManager = new RealmManager(session);
List<RealmRepresentation> reps = new ArrayList<RealmRepresentation>();
if (auth.getRealm().equals(realmManager.getKeycloakAdminstrationRealm())) {
List<RealmModel> realms = session.realms().getRealms();
for (RealmModel realm : realms) {
addRealmRep(reps, realm, realm.getMasterAdminClient());
}
} else {
ClientModel adminApp = auth.getRealm().getClientByClientId(realmManager.getRealmAdminClientId(auth.getRealm()));
addRealmRep(reps, auth.getRealm(), adminApp);
}
logger.debug(("getRealms()"));
return reps;
}
protected void addRealmRep(List<RealmRepresentation> reps, RealmModel realm, ClientModel realmManagementClient) {
if (auth.hasAppRole(realmManagementClient, AdminRoles.MANAGE_REALM)) {
reps.add(ModelToRepresentation.toRepresentation(realm, false));
} else if (auth.hasOneOfAppRole(realmManagementClient, AdminRoles.ALL_REALM_ROLES)) {
RealmRepresentation rep = new RealmRepresentation();
rep.setRealm(realm.getName());
reps.add(rep);
}
}
/**
* Import a realm
*
* Imports a realm from a full representation of that realm. Realm name must be unique.
*
* @param uriInfo
* @param rep JSON representation of the realm
* @return
*/
@POST
@Consumes(MediaType.APPLICATION_JSON)
public Response importRealm(@Context final UriInfo uriInfo, final RealmRepresentation rep) {
RealmManager realmManager = new RealmManager(session);
realmManager.setContextPath(keycloak.getContextPath());
if (!auth.getRealm().equals(realmManager.getKeycloakAdminstrationRealm())) {
throw new ForbiddenException();
}
if (!auth.hasRealmRole(AdminRoles.CREATE_REALM)) {
throw new ForbiddenException();
}
logger.debugv("importRealm: {0}", rep.getRealm());
try {
RealmModel realm = realmManager.importRealm(rep);
grantPermissionsToRealmCreator(realm);
URI location = AdminRoot.realmsUrl(uriInfo).path(realm.getName()).build();
logger.debugv("imported realm success, sending back: {0}", location.toString());
return Response.created(location).build();
} catch (ModelDuplicateException e) {
return ErrorResponse.exists("Realm " + rep.getRealm() + " already exists");
}
}
private void grantPermissionsToRealmCreator(RealmModel realm) {
if (auth.hasRealmRole(AdminRoles.ADMIN)) {
return;
}
RealmModel adminRealm = new RealmManager(session).getKeycloakAdminstrationRealm();
ClientModel realmAdminApp = realm.getMasterAdminClient();
for (String r : AdminRoles.ALL_REALM_ROLES) {
RoleModel role = realmAdminApp.getRole(r);
auth.getUser().grantRole(role);
}
}
/**
* Base path for the admin REST API for one particular realm.
*
* @param headers
* @param name realm name (not id!)
* @return
*/
@Path("{realm}")
public RealmAdminResource getRealmAdmin(@Context final HttpHeaders headers,
@PathParam("realm") final String name) {
RealmManager realmManager = new RealmManager(session);
RealmModel realm = realmManager.getRealmByName(name);
if (realm == null) throw new NotFoundException("Realm not found.");
if (!auth.getRealm().equals(realmManager.getKeycloakAdminstrationRealm())
&& !auth.getRealm().equals(realm)) {
throw new ForbiddenException();
}
RealmAuth realmAuth;
if (auth.getRealm().equals(realmManager.getKeycloakAdminstrationRealm())) {
realmAuth = new RealmAuth(auth, realm.getMasterAdminClient());
} else {
realmAuth = new RealmAuth(auth, realm.getClientByClientId(realmManager.getRealmAdminClientId(auth.getRealm())));
}
AdminEventBuilder adminEvent = new AdminEventBuilder(realm, auth, session, clientConnection);
session.getContext().setRealm(realm);
RealmAdminResource adminResource = new RealmAdminResource(realmAuth, realm, tokenManager, adminEvent);
ResteasyProviderFactory.getInstance().injectProperties(adminResource);
//resourceContext.initResource(adminResource);
return adminResource;
}
}
| |
/*
* #%L
* This file is part of a universal JAAS library, providing common functionality
* for a username / password style JAAS module.
* %%
* Copyright (C) 2014 - 2016 Michael Beiter <michael@beiter.org>
* %%
* All rights reserved.
* .
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the copyright holder nor the names of the
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
* .
* .
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package org.beiter.michael.authn.jaas.loginmodules.password.common;
import org.apache.commons.lang3.Validate;
import org.beiter.michael.array.Cleanser;
import org.beiter.michael.authn.jaas.common.CommonProperties;
import org.beiter.michael.authn.jaas.common.Events;
import org.beiter.michael.authn.jaas.common.FactoryException;
import org.beiter.michael.authn.jaas.common.Util;
import org.beiter.michael.authn.jaas.common.audit.Audit;
import org.beiter.michael.authn.jaas.common.audit.AuditFactory;
import org.beiter.michael.authn.jaas.common.audit.AuditHelper;
import org.beiter.michael.authn.jaas.common.messageq.MessageHelper;
import org.beiter.michael.authn.jaas.common.messageq.MessageQFactory;
import org.beiter.michael.authn.jaas.common.messageq.MessageQ;
import org.beiter.michael.authn.jaas.common.propsbuilder.JaasBasedCommonPropsBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.security.auth.Subject;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.callback.NameCallback;
import javax.security.auth.callback.PasswordCallback;
import javax.security.auth.callback.TextInputCallback;
import javax.security.auth.callback.UnsupportedCallbackException;
import javax.security.auth.login.LoginException;
import javax.security.auth.spi.LoginModule;
import java.security.Principal;
import java.util.Map;
/**
* This class implements a JAAS login module for username / password based authentication.
*/
// The JAAS workflow has an inherently high level of complexity - ignoring these warnings, as we cannot fix JAAS
// CHECKSTYLE:OFF
@SuppressWarnings({"PMD.CyclomaticComplexity", "PMD.StdCyclomaticComplexity", "PMD.ModifiedCyclomaticComplexity"})
// CHECKSTYLE:ON
public class PasswordLoginModule
implements LoginModule {
/**
* The logger object for this class
*/
private static final Logger LOG = LoggerFactory.getLogger(PasswordLoginModule.class);
/**
* The JAAS pSubject, which is part of the initial state and one of the provided arguments when the module is called
*/
private Subject pSubject;
/**
* The JAAS callback handler, which is part of the initial state and one of the provided arguments when the module
* is called
*/
private CallbackHandler pCallbackHandler;
/**
* The username is provided during the login process, and we store a copy here in case of cascaded invocation
*/
private String username;
/**
* The password is provided during the login process, and we store a copy here in case of cascaded invocation
*/
private char[] password;
/**
* The white label domain is provided during the login process, and we store a copy here in case of cascaded
* invocation
*/
private String domain;
/**
* The uncommitted principals (this is != null once the login succeeded)
*/
private Subject pendingSubject;
/**
* The committed principals (this is != null once the commit succeeded)
*/
private Subject committedSubject;
/**
* The audit object is initialized based on the JAAS module configuration
*/
private Audit audit;
/**
* The messageQ object is initialized based on the JAAS module configuration
*/
private MessageQ messageQ;
/**
* The pwValidator object is initialized based on the JAAS module configuration
*/
private PasswordValidator pwValidator;
/**
* The pwAuthenticator object is initialized based on the JAAS module configuration
*/
private PasswordAuthenticator pwAuthenticator;
/**
* {@inheritDoc}
*
* @throws NullPointerException When the {@code subject}, {@code callbackHandler}, {@code sharedState}, or
* {@code options} are {@code null}
*/
@Override
public final void initialize(final Subject subject, final CallbackHandler callbackHandler,
final Map<String, ?> sharedState, final Map<String, ?> options) {
LOG.debug("Initializing");
Validate.notNull(subject, "The validated object 'subject' is null");
Validate.notNull(callbackHandler, "The validated object 'callbackHandler' is null");
Validate.notNull(sharedState, "The validated object 'sharedState' is null");
Validate.notNull(options, "The validated object 'options' is null");
// keep a reference to the originally provided arguments (no defensive copy)
this.pSubject = subject;
this.pCallbackHandler = callbackHandler;
// It would be nice to parse the configuration only once, and store it for later use. However, we are
// deliberately NOT caching the parsed configuration, as JAAS does not offer a standard way to to reset the
// cached variable, and allow users of the login module to reset the parsed config in case an app does need to
// re-read its configuration.
final CommonProperties commonProps = JaasBasedCommonPropsBuilder.build(options);
// initialize the audit object
initAudit(commonProps);
// initialize the message object
initMessageQueue(commonProps);
// initialize the validator object
initPwValidator(commonProps);
// initialize the authenticator object
initPwAuthenticator(commonProps);
LOG.info("Initialization complete");
}
/**
* {@inheritDoc}
*
* @return {@code true} if authentication succeeds
* @throws LoginException if this {@code LoginModule} is unable to perform the authentication. Catch a
* {@link javax.security.auth.login.FailedLoginException} to determine if the authentication
* failed (wrong username or password)
*/
@Override
// Check is broken [LOG.info()]: PMD reports issues although log stmt is guarded. @todo revisit when upgrading PMD.
@SuppressWarnings("PMD.GuardLogStatementJavaUtil")
public final boolean login()
throws LoginException {
LOG.debug("Attempting login");
if (pCallbackHandler == null) {
final String error = "No CallbackHandler available to garner authentication information from the user";
LOG.error(error);
throw new LoginException(error);
}
Callback[] callbacks = new Callback[3];
callbacks[0] = new TextInputCallback("j_domain");
callbacks[1] = new NameCallback("j_username");
callbacks[2] = new PasswordCallback("j_password", false);
try {
pCallbackHandler.handle(callbacks);
// store the domain
domain = ((TextInputCallback) callbacks[0]).getText();
// store the username
username = ((NameCallback) callbacks[1]).getName();
// store the password (i.e. a copy of the password)
final char[] tempPassword = ((PasswordCallback) callbacks[2]).getPassword();
password = tempPassword.clone();
// clear the password in the callback
((PasswordCallback) callbacks[2]).clearPassword();
} catch (java.io.IOException e) {
cleanState();
final String error = "Encountered an I/O exception during login";
LOG.warn(error, e);
throw Util.newLoginException(error, e);
} catch (UnsupportedCallbackException e) {
cleanState();
final String error =
e.getCallback().toString() + " not available to garner authentication information from the user";
LOG.warn(error, e);
throw Util.newLoginException(error, e);
}
LOG.debug("Attempting login - discovered user '" + username + "@" + domain + "'");
// Using a try/catch construct for managing control flows is really a bad idea.
// Unfortunately, this is how JAAS works :-(
try {
// authenticate, and update state and pending subject if successful
pendingSubject = pwAuthenticator.authenticate(domain, username, password, pwValidator);
// then clear the password
Cleanser.wipe(password);
final String baseError = new StringBuilder().
append("Login successful for '").
append(username).
append("@").
append(domain).
toString();
AuditHelper.auditEvent(audit, domain, username, Events.AUTHN_ATTEMPT,
baseError + "', but cannot audit login attempt, and hence fail the operation");
MessageHelper.postMessage(messageQ, domain, username, Events.AUTHN_ATTEMPT,
baseError + "', but cannot post MQ login attempt event, and hence fail the operation");
// string concatenation is only executed if log level is actually enabled
if (LOG.isInfoEnabled()) {
LOG.info("Login complete for '" + username + "@" + domain + "'");
}
return true;
} catch (LoginException e) {
// the login failed
// cache the username and domain, for they will be purged by "cleanState()"
final String tempUsername = username;
final String tempDomain = domain;
cleanState();
final String baseError = new StringBuilder().
append("Login failed for '").
append(tempUsername).
append("@").
append(tempDomain).
toString();
AuditHelper.auditEvent(audit, tempDomain, tempUsername, Events.AUTHN_FAILURE,
baseError + "', but cannot audit login attempt");
MessageHelper.postMessage(messageQ, tempDomain, tempUsername, Events.AUTHN_FAILURE,
baseError + "', but cannot post MQ login attempt event");
final String error = "Login failed for '" + tempUsername + "@" + tempDomain + "'";
LOG.info(error, e);
throw e;
}
}
/**
* {@inheritDoc}
*/
@Override
// Check is broken [LOG.info()]: PMD reports issues although log stmt is guarded. @todo revisit when upgrading PMD.
@SuppressWarnings("PMD.GuardLogStatementJavaUtil")
public final boolean commit()
throws LoginException {
LOG.debug("Committing authentication");
if (pendingSubject == null) {
// the login method of this module has failed earlier, hence we do not need to clean up anything
// return 'false' to indicate that this module's login and/or commit method failed
// As the login method failed, the state of the module has already been cleared and we do not know
// the username / domain anymore. Hence no auditing / message queue notification, and not verbose logging.
LOG.debug("Not committing authentication, as the authentication has failed earlier (login method)");
return false;
} else {
// The login has succeeded!
// Store the principals from the pending subject both in the 'subject' object (because this is what JAAS
// will use later on), but also create a new 'committedSubject' object that this module uses to a) keep
// state and b) being able to remove the principals later
LOG.debug("Committing authentication: '" + username + "@" + domain + "'");
if (committedSubject == null) {
committedSubject = new Subject();
} else {
// cache the username and domain, for they will be purged by "cleanState()"
final String tempUsername = username;
final String tempDomain = domain;
cleanState();
final String baseError = new StringBuilder().
append("Login post-processing failed for '").
append(tempUsername).
append("@").
append(tempDomain).
toString();
AuditHelper.auditEvent(audit, tempDomain, tempUsername, Events.AUTHN_ERROR,
baseError + "', but cannot audit login attempt");
MessageHelper.postMessage(messageQ, tempDomain, tempUsername, Events.AUTHN_ERROR,
baseError + "', but cannot post MQ login attempt event");
final String error = "Expected the committed subject to be 'null' (yes, really <null>), but this was "
+ "not the case! Has the commit method been called multiple times on the same object instance?";
LOG.warn(error);
throw new LoginException(error);
}
for (final Principal p : pendingSubject.getPrincipals()) {
// 1. Add the principals to the 'subject' object
if (!pSubject.getPrincipals().contains(p)) {
LOG.debug("Added principal " + p.getName() + " to subject");
pSubject.getPrincipals().add(p);
}
// 2. Add the principals to the 'committedSubject' object
if (!committedSubject.getPrincipals().contains(p)) {
LOG.debug("Added principal " + p.getName() + " to committed subject");
committedSubject.getPrincipals().add(p);
}
}
final String baseError = new StringBuilder().
append("Login succeeded for '").
append(username).
append("@").
append(domain).
toString();
AuditHelper.auditEvent(audit, domain, username, Events.AUTHN_SUCCESS,
baseError + "', but cannot audit login success, and hence fail the operation");
MessageHelper.postMessage(messageQ, domain, username, Events.AUTHN_SUCCESS,
baseError + "', but cannot post MQ login success event, and hence fail the operation");
// string concatenation is only executed if log level is actually enabled
if (LOG.isInfoEnabled()) {
LOG.info("Authentication committed for '" + username + "@" + domain + "'");
}
// do not clean the state here, as we may still need it in case of an abort()
return true;
}
}
/**
* {@inheritDoc}
*/
@Override
// Check is broken [LOG.info()]: PMD reports issues although log stmt is guarded. @todo revisit when upgrading PMD.
@SuppressWarnings("PMD.GuardLogStatementJavaUtil")
public final boolean abort()
throws LoginException {
if (pendingSubject == null) {
// the login method of this module has failed earlier, hence we do not need to clean up anything
// return 'false' to indicate that this module's login and/or commit method failed
// As the login method failed, the state of the module has already been cleared and we do not know
// the username / domain anymore. Hence no auditing / message queue notification, and not verbose logging.
LOG.debug("Aborting authentication, as the authentication has failed earlier (login method)");
return false;
} else if (committedSubject == null) {
// the login method of this module succeeded, but the overall authentication failed
// string concatenation is only executed if log level is actually enabled
if (LOG.isDebugEnabled()) {
LOG.debug("Aborting authentication: '" + username + "@" + domain + "'");
}
// cache the username and domain, for they will be purged by "cleanState()"
final String tempUsername = username;
final String tempDomain = domain;
cleanState();
final String baseError = new StringBuilder().
append("Login post-processing failed after abort for '").
append(tempUsername).
append("@").
append(tempDomain).
toString();
AuditHelper.auditEvent(audit, tempDomain, tempUsername, Events.AUTHN_ABORT_COMMIT,
baseError + "', but cannot audit login attempt");
MessageHelper.postMessage(messageQ, tempDomain, tempUsername, Events.AUTHN_ABORT_COMMIT,
baseError + "', but cannot post MQ login attempt event");
// string concatenation is only executed if log level is actually enabled
if (LOG.isInfoEnabled()) {
LOG.info("Authentication aborted for '" + tempUsername + "@" + tempDomain + "'");
}
return true;
} else {
// overall authentication succeeded and commit succeeded, but someone else's commit failed
final String baseError = new StringBuilder().
append("Login post-processing failed after abort for '").
append(username).
append("@").
append(domain).
toString();
AuditHelper.auditEvent(audit, domain, username, Events.AUTHN_ABORT_CHAIN,
baseError + "', but cannot audit login attempt");
MessageHelper.postMessage(messageQ, domain, username, Events.AUTHN_ABORT_CHAIN,
baseError + "', but cannot post MQ login attempt event");
// cache the username and domain, for they will be purged by "logout()"
final String tempUsername = username;
final String tempDomain = domain;
logout();
// string concatenation is only executed if log level is actually enabled
if (LOG.isInfoEnabled()) {
LOG.info("Authentication aborted for '" + tempUsername + "@" + tempDomain + "'");
}
return true;
}
}
/**
* {@inheritDoc}
*/
@Override
// Check is broken [LOG.info()]: PMD reports issues although log stmt is guarded. @todo revisit when upgrading PMD.
@SuppressWarnings("PMD.GuardLogStatementJavaUtil")
public final boolean logout()
throws LoginException {
final StringBuilder principals = new StringBuilder(":");
// remove all the principals that we added in the commit() method from the 'subject' object
// (that's why we stored our principals in the 'committedSubject' object...)
if (committedSubject != null && committedSubject.getPrincipals() != null) {
final StringBuilder stringBuilder = new StringBuilder();
for (final Principal p : committedSubject.getPrincipals()) {
pSubject.getPrincipals().remove(p);
// string concatenation is only executed if log level is actually enabled
if (LOG.isDebugEnabled()) {
LOG.debug("Logging out subject: '" + p.getName() + "'");
}
principals.append(p.getName()).append(':');
stringBuilder.delete(0, stringBuilder.length());
final String baseError = stringBuilder.
append("Logout successful for '").
append(username).
append("@").
append(domain).
toString();
AuditHelper.auditEvent(audit, domain, username, Events.AUTHN_LOGOUT,
baseError + "', but cannot audit logout attempt");
MessageHelper.postMessage(messageQ, domain, username, Events.AUTHN_LOGOUT,
baseError + "', but cannot post MQ logout attempt event");
}
}
cleanState();
// string concatenation is only executed if log level is actually enabled
if (LOG.isInfoEnabled()) {
LOG.info("Principals logged out: '" + principals + "'");
}
return true;
}
/**
* Clean up any state associated with the current login attempt.
*/
@SuppressWarnings("PMD.NullAssignment")
private void cleanState() {
// null-assignments for de-referencing objects are okay
domain = null;
username = null;
Cleanser.wipe(password);
pendingSubject = null;
committedSubject = null;
}
/**
* Initialize the instance-global audit object
*
* @param commonProps The parsed JAAS configuration, plus additional raw values
*/
@SuppressWarnings("PMD.ConfusingTernary")
private void initAudit(final CommonProperties commonProps) {
try {
final String auditClassName = commonProps.getAuditClassName();
// this would be harder to read when following PMD's advice - ignoring the PMD warning
if (!commonProps.isAuditEnabled()) {
final String error = "Auditing has been disabled in the JAAS configuration";
LOG.info(error);
} else if (auditClassName == null) {
final String error =
"Auditing has been enabled in the JAAS configuration, but no audit class has been configured";
LOG.error(error);
throw new IllegalStateException(error);
} else {
if (commonProps.isAuditSingleton()) {
LOG.debug("Requesting singleton audit class instance of '" + auditClassName
+ "' from the audit factory");
this.audit = AuditFactory.getSingleton(auditClassName, commonProps);
} else {
LOG.debug("Requesting non-singleton audit class instance of '" + auditClassName
+ "' from the audit factory");
this.audit = AuditFactory.getInstance(auditClassName, commonProps);
}
}
} catch (FactoryException e) {
final String error = "The audit class cannot be instantiated. This is most likely a configuration"
+ " problem. Is the configured class available in the classpath?";
LOG.error(error, e);
throw new IllegalStateException(error, e);
}
}
/**
* Initialize the instance-global message queue object
*
* @param commonProps The parsed JAAS configuration, plus additional raw values
*/
@SuppressWarnings("PMD.ConfusingTernary")
private void initMessageQueue(final CommonProperties commonProps) {
try {
final String messageClassName = commonProps.getMessageQueueClassName();
// this would be harder to read when following PMD's advice - ignoring the PMD warning
if (!commonProps.isMessageQueueEnabled()) {
final String error = "Message queue has been disabled in the JAAS configuration";
LOG.info(error);
} else if (messageClassName == null) {
final String error = "Message queue has been enabled in the JAAS configuration, "
+ "but no message queue class has been configured";
LOG.error(error);
throw new IllegalStateException(error);
} else {
if (commonProps.isMessageQueueSingleton()) {
LOG.debug("Requesting singleton message class instance of '" + messageClassName
+ "' from the message factory");
this.messageQ = MessageQFactory.getSingleton(messageClassName, commonProps);
} else {
LOG.debug("Requesting non-singleton message class instance of '" + messageClassName
+ "' from the message factory");
this.messageQ = MessageQFactory.getInstance(messageClassName, commonProps);
}
}
} catch (FactoryException e) {
final String error = "The message class cannot be instantiated. This is most likely a configuration"
+ " problem. Is the configured class available in the classpath?";
LOG.error(error, e);
throw new IllegalStateException(error, e);
}
}
/**
* Initialize the instance-global password validator object
*
* @param commonProps The parsed JAAS configuration, plus additional raw values
*/
private void initPwValidator(final CommonProperties commonProps) {
try {
final String validatorClass = commonProps.getPasswordValidatorClassName();
if (validatorClass == null) {
final String error = "No password validator class has been configured in the JAAS configuration";
LOG.error(error);
throw new IllegalStateException(error);
} else {
if (commonProps.isPasswordValidatorSingleton()) {
// Fortify will report a violation here because of disclosure of potentially confidential
// information. However, the class name is not confidential, which makes this a non-issue / false
// positive.
LOG.debug("Requesting singleton validator class instance of '" + validatorClass
+ "' from the validator factory");
this.pwValidator = PasswordValidatorFactory.getSingleton(validatorClass, commonProps);
} else {
// Fortify will report a violation here because of disclosure of potentially confidential
// information. However, the class name is not confidential, which makes this a non-issue / false
// positive.
LOG.debug("Requesting non-singleton validator class instance of '" + validatorClass
+ "' from the validator factory");
this.pwValidator = PasswordValidatorFactory.getInstance(validatorClass, commonProps);
}
}
} catch (FactoryException e) {
final String error = "The validator class cannot be instantiated. This is most likely a configuration"
+ " problem. Is the configured class available in the classpath?";
LOG.error(error, e);
throw new IllegalStateException(error, e);
}
}
/**
* Initialize the instance-global password authenticator object
*
* @param commonProps The parsed JAAS configuration, plus additional raw values
*/
private void initPwAuthenticator(final CommonProperties commonProps) {
try {
final String authNticatorClass = commonProps.getPasswordAuthenticatorClassName();
if (authNticatorClass == null) {
final String error = "No password authenticator class has been configured in the JAAS configuration";
LOG.error(error);
throw new IllegalStateException(error);
} else {
if (commonProps.isPasswordAuthenticatorSingleton()) {
// Fortify will report a violation here because of disclosure of potentially confidential
// information. However, the class name is not confidential, which makes this a non-issue / false
// positive.
LOG.debug("Requesting singleton authenticator class instance of '" + authNticatorClass
+ "' from the authenticator factory");
this.pwAuthenticator = PasswordAuthenticatorFactory.getSingleton(authNticatorClass, commonProps);
} else {
// Fortify will report a violation here because of disclosure of potentially confidential
// information. However, the class name is not confidential, which makes this a non-issue / false
// positive.
LOG.debug("Requesting non-singleton authenticator class instance of '" + authNticatorClass
+ "' from the authenticator factory");
this.pwAuthenticator = PasswordAuthenticatorFactory.getInstance(authNticatorClass, commonProps);
}
}
} catch (FactoryException e) {
final String error = "The validator class cannot be instantiated. This is most likely a configuration"
+ " problem. Is the configured class available in the classpath?";
LOG.error(error, e);
throw new IllegalStateException(error, e);
}
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.forms.login.freemarker;
import org.jboss.logging.Logger;
import org.keycloak.OAuth2Constants;
import org.keycloak.authentication.AuthenticationFlowContext;
import org.keycloak.authentication.authenticators.browser.OTPFormAuthenticator;
import org.keycloak.authentication.requiredactions.util.UpdateProfileContext;
import org.keycloak.authentication.requiredactions.util.UserUpdateProfileContext;
import org.keycloak.broker.provider.BrokeredIdentityContext;
import org.keycloak.common.util.ObjectUtil;
import org.keycloak.forms.login.LoginFormsPages;
import org.keycloak.forms.login.LoginFormsProvider;
import org.keycloak.forms.login.freemarker.model.AuthenticationContextBean;
import org.keycloak.forms.login.freemarker.model.ClientBean;
import org.keycloak.forms.login.freemarker.model.CodeBean;
import org.keycloak.forms.login.freemarker.model.IdentityProviderBean;
import org.keycloak.forms.login.freemarker.model.LoginBean;
import org.keycloak.forms.login.freemarker.model.OAuthGrantBean;
import org.keycloak.forms.login.freemarker.model.ProfileBean;
import org.keycloak.forms.login.freemarker.model.RealmBean;
import org.keycloak.forms.login.freemarker.model.RegisterBean;
import org.keycloak.forms.login.freemarker.model.RequiredActionUrlFormatterMethod;
import org.keycloak.forms.login.freemarker.model.SAMLPostFormBean;
import org.keycloak.forms.login.freemarker.model.TotpBean;
import org.keycloak.forms.login.freemarker.model.TotpLoginBean;
import org.keycloak.forms.login.freemarker.model.UrlBean;
import org.keycloak.forms.login.freemarker.model.X509ConfirmBean;
import org.keycloak.models.ClientModel;
import org.keycloak.models.ClientScopeModel;
import org.keycloak.models.Constants;
import org.keycloak.models.IdentityProviderModel;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.RealmModel;
import org.keycloak.models.UserModel;
import org.keycloak.models.utils.FormMessage;
import org.keycloak.services.Urls;
import org.keycloak.services.messages.Messages;
import org.keycloak.services.resources.LoginActionsService;
import org.keycloak.sessions.AuthenticationSessionModel;
import org.keycloak.theme.FreeMarkerException;
import org.keycloak.theme.FreeMarkerUtil;
import org.keycloak.theme.Theme;
import org.keycloak.theme.beans.AdvancedMessageFormatterMethod;
import org.keycloak.theme.beans.LocaleBean;
import org.keycloak.theme.beans.MessageBean;
import org.keycloak.theme.beans.MessageFormatterMethod;
import org.keycloak.theme.beans.MessageType;
import org.keycloak.theme.beans.MessagesPerFieldBean;
import org.keycloak.utils.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
import javax.ws.rs.core.UriInfo;
import java.io.IOException;
import java.net.URI;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
import static org.keycloak.models.UserModel.RequiredAction.UPDATE_PASSWORD;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public class FreeMarkerLoginFormsProvider implements LoginFormsProvider {
private static final Logger logger = Logger.getLogger(FreeMarkerLoginFormsProvider.class);
protected String accessCode;
protected Response.Status status;
protected javax.ws.rs.core.MediaType contentType;
protected List<ClientScopeModel> clientScopesRequested;
protected Map<String, String> httpResponseHeaders = new HashMap<>();
protected URI actionUri;
protected String execution;
protected AuthenticationFlowContext context;
protected List<FormMessage> messages = null;
protected MessageType messageType = MessageType.ERROR;
protected MultivaluedMap<String, String> formData;
protected KeycloakSession session;
/** authenticationSession can be null for some renderings, mainly error pages */
protected AuthenticationSessionModel authenticationSession;
protected RealmModel realm;
protected ClientModel client;
protected UriInfo uriInfo;
protected FreeMarkerUtil freeMarker;
protected UserModel user;
protected final Map<String, Object> attributes = new HashMap<>();
public FreeMarkerLoginFormsProvider(KeycloakSession session, FreeMarkerUtil freeMarker) {
this.session = session;
this.freeMarker = freeMarker;
this.attributes.put("scripts", new LinkedList<>());
this.realm = session.getContext().getRealm();
this.client = session.getContext().getClient();
this.uriInfo = session.getContext().getUri();
}
@SuppressWarnings("unchecked")
@Override
public void addScript(String scriptUrl) {
List<String> scripts = (List<String>) this.attributes.get("scripts");
scripts.add(scriptUrl);
}
@Override
public Response createResponse(UserModel.RequiredAction action) {
String actionMessage;
LoginFormsPages page;
switch (action) {
case CONFIGURE_TOTP:
actionMessage = Messages.CONFIGURE_TOTP;
page = LoginFormsPages.LOGIN_CONFIG_TOTP;
break;
case UPDATE_PROFILE:
UpdateProfileContext userBasedContext = new UserUpdateProfileContext(realm, user);
this.attributes.put(UPDATE_PROFILE_CONTEXT_ATTR, userBasedContext);
actionMessage = Messages.UPDATE_PROFILE;
page = LoginFormsPages.LOGIN_UPDATE_PROFILE;
break;
case UPDATE_PASSWORD:
boolean isRequestedByAdmin = user.getRequiredActions().stream().filter(Objects::nonNull).anyMatch(UPDATE_PASSWORD.toString()::contains);
actionMessage = isRequestedByAdmin ? Messages.UPDATE_PASSWORD : Messages.RESET_PASSWORD;
page = LoginFormsPages.LOGIN_UPDATE_PASSWORD;
break;
case VERIFY_EMAIL:
actionMessage = Messages.VERIFY_EMAIL;
page = LoginFormsPages.LOGIN_VERIFY_EMAIL;
break;
default:
return Response.serverError().build();
}
if (messages == null) {
setMessage(MessageType.WARNING, actionMessage);
}
return createResponse(page);
}
@SuppressWarnings("incomplete-switch")
protected Response createResponse(LoginFormsPages page) {
Theme theme;
try {
theme = getTheme();
} catch (IOException e) {
logger.error("Failed to create theme", e);
return Response.serverError().build();
}
Locale locale = session.getContext().resolveLocale(user);
Properties messagesBundle = handleThemeResources(theme, locale);
handleMessages(locale, messagesBundle);
// for some reason Resteasy 2.3.7 doesn't like query params and form params with the same name and will null out the code form param
UriBuilder uriBuilder = prepareBaseUriBuilder(page == LoginFormsPages.OAUTH_GRANT);
createCommonAttributes(theme, locale, messagesBundle, uriBuilder, page);
attributes.put("login", new LoginBean(formData));
if (status != null) {
attributes.put("statusCode", status.getStatusCode());
}
switch (page) {
case LOGIN_CONFIG_TOTP:
attributes.put("totp", new TotpBean(session, realm, user, uriInfo.getRequestUriBuilder()));
break;
case LOGIN_UPDATE_PROFILE:
UpdateProfileContext userCtx = (UpdateProfileContext) attributes.get(LoginFormsProvider.UPDATE_PROFILE_CONTEXT_ATTR);
attributes.put("user", new ProfileBean(userCtx, formData));
break;
case LOGIN_IDP_LINK_CONFIRM:
case LOGIN_IDP_LINK_EMAIL:
BrokeredIdentityContext brokerContext = (BrokeredIdentityContext) this.attributes.get(IDENTITY_PROVIDER_BROKER_CONTEXT);
String idpAlias = brokerContext.getIdpConfig().getAlias();
idpAlias = ObjectUtil.capitalize(idpAlias);
attributes.put("brokerContext", brokerContext);
attributes.put("idpAlias", idpAlias);
break;
case LOGIN_TOTP:
attributes.put("otpLogin", new TotpLoginBean(session, realm, user, (String) this.attributes.get(OTPFormAuthenticator.SELECTED_OTP_CREDENTIAL_ID)));
break;
case REGISTER:
attributes.put("register", new RegisterBean(formData));
break;
case OAUTH_GRANT:
attributes.put("oauth",
new OAuthGrantBean(accessCode, client, clientScopesRequested));
attributes.put("advancedMsg", new AdvancedMessageFormatterMethod(locale, messagesBundle));
break;
case CODE:
attributes.put(OAuth2Constants.CODE, new CodeBean(accessCode, messageType == MessageType.ERROR ? getFirstMessageUnformatted() : null));
break;
case X509_CONFIRM:
attributes.put("x509", new X509ConfirmBean(formData));
break;
case SAML_POST_FORM:
attributes.put("samlPost", new SAMLPostFormBean(formData));
break;
}
return processTemplate(theme, Templates.getTemplate(page), locale);
}
@Override
public Response createForm(String form) {
Theme theme;
try {
theme = getTheme();
} catch (IOException e) {
logger.error("Failed to create theme", e);
return Response.serverError().build();
}
Locale locale = session.getContext().resolveLocale(user);
Properties messagesBundle = handleThemeResources(theme, locale);
handleMessages(locale, messagesBundle);
UriBuilder uriBuilder = prepareBaseUriBuilder(false);
createCommonAttributes(theme, locale, messagesBundle, uriBuilder, null);
return processTemplate(theme, form, locale);
}
/**
* Prepare base uri builder for later use
*
* @param resetRequestUriParams - for some reason Resteasy 2.3.7 doesn't like query params and form params with the same name and will null out the code form param, so we have to reset them for some pages
* @return base uri builder
*/
protected UriBuilder prepareBaseUriBuilder(boolean resetRequestUriParams) {
String requestURI = uriInfo.getBaseUri().getPath();
UriBuilder uriBuilder = UriBuilder.fromUri(requestURI);
if (resetRequestUriParams) {
uriBuilder.replaceQuery(null);
}
if (client != null) {
uriBuilder.queryParam(Constants.CLIENT_ID, client.getClientId());
}
if (authenticationSession != null) {
uriBuilder.queryParam(Constants.TAB_ID, authenticationSession.getTabId());
}
return uriBuilder;
}
/**
* Get Theme used for page rendering.
*
* @return theme for page rendering, never null
* @throws IOException in case of Theme loading problem
*/
protected Theme getTheme() throws IOException {
return session.theme().getTheme(Theme.Type.LOGIN);
}
/**
* Load message bundle and place it into <code>msg</code> template attribute. Also load Theme properties and place them into <code>properties</code> template attribute.
*
* @param theme actual Theme to load bundle from
* @param locale to load bundle for
* @return message bundle for other use
*/
protected Properties handleThemeResources(Theme theme, Locale locale) {
Properties messagesBundle;
try {
messagesBundle = theme.getMessages(locale);
attributes.put("msg", new MessageFormatterMethod(locale, messagesBundle));
} catch (IOException e) {
logger.warn("Failed to load messages", e);
messagesBundle = new Properties();
}
try {
attributes.put("properties", theme.getProperties());
} catch (IOException e) {
logger.warn("Failed to load properties", e);
}
return messagesBundle;
}
/**
* Handle messages to be shown on the page - set them to template attributes
*
* @param locale to be used for message text loading
* @param messagesBundle to be used for message text loading
* @see #messageType
* @see #messages
*/
protected void handleMessages(Locale locale, Properties messagesBundle) {
MessagesPerFieldBean messagesPerField = new MessagesPerFieldBean();
if (messages != null) {
MessageBean wholeMessage = new MessageBean(null, messageType);
for (FormMessage message : this.messages) {
String formattedMessageText = formatMessage(message, messagesBundle, locale);
if (formattedMessageText != null) {
wholeMessage.appendSummaryLine(formattedMessageText);
messagesPerField.addMessage(message.getField(), formattedMessageText, messageType);
}
}
attributes.put("message", wholeMessage);
} else {
attributes.put("message", null);
}
attributes.put("messagesPerField", messagesPerField);
}
@Override
public String getMessage(String message) {
Theme theme;
try {
theme = getTheme();
} catch (IOException e) {
logger.error("Failed to create theme", e);
throw new RuntimeException("Failed to create theme");
}
Locale locale = session.getContext().resolveLocale(user);
Properties messagesBundle = handleThemeResources(theme, locale);
FormMessage msg = new FormMessage(null, message);
return formatMessage(msg, messagesBundle, locale);
}
@Override
public String getMessage(String message, String... parameters) {
Theme theme;
try {
theme = getTheme();
} catch (IOException e) {
logger.error("Failed to create theme", e);
throw new RuntimeException("Failed to create theme");
}
Locale locale = session.getContext().resolveLocale(user);
Properties messagesBundle = handleThemeResources(theme, locale);
FormMessage msg = new FormMessage(message, (Object[]) parameters);
return formatMessage(msg, messagesBundle, locale);
}
/**
* Create common attributes used in all templates.
*
* @param theme actual Theme used (provided by <code>getTheme()</code>)
* @param locale actual locale
* @param messagesBundle actual message bundle (provided by <code>handleThemeResources()</code>)
* @param baseUriBuilder actual base uri builder (provided by <code>prepareBaseUriBuilder()</code>)
* @param page in case if common page is rendered, is null if called from <code>createForm()</code>
*
*/
protected void createCommonAttributes(Theme theme, Locale locale, Properties messagesBundle, UriBuilder baseUriBuilder, LoginFormsPages page) {
URI baseUri = baseUriBuilder.build();
if (accessCode != null) {
baseUriBuilder.queryParam(LoginActionsService.SESSION_CODE, accessCode);
}
URI baseUriWithCodeAndClientId = baseUriBuilder.build();
if (client != null) {
attributes.put("client", new ClientBean(session, client));
}
if (realm != null) {
attributes.put("realm", new RealmBean(realm));
List<IdentityProviderModel> identityProviders = realm.getIdentityProviders();
identityProviders = LoginFormsUtil.filterIdentityProviders(identityProviders, session, realm, attributes, formData, context);
attributes.put("social", new IdentityProviderBean(realm, session, identityProviders, baseUriWithCodeAndClientId));
attributes.put("url", new UrlBean(realm, theme, baseUri, this.actionUri));
attributes.put("requiredActionUrl", new RequiredActionUrlFormatterMethod(realm, baseUri));
attributes.put("auth", new AuthenticationContextBean(context, page));
attributes.put(Constants.EXECUTION, execution);
if (realm.isInternationalizationEnabled()) {
UriBuilder b;
if (page != null) {
switch (page) {
case LOGIN:
case LOGIN_USERNAME:
case X509_CONFIRM:
b = UriBuilder.fromUri(Urls.realmLoginPage(baseUri, realm.getName()));
break;
case REGISTER:
b = UriBuilder.fromUri(Urls.realmRegisterPage(baseUri, realm.getName()));
break;
default:
b = UriBuilder.fromUri(baseUri).path(uriInfo.getPath());
break;
}
} else {
b = UriBuilder.fromUri(baseUri)
.path(uriInfo.getPath());
}
if (execution != null) {
b.queryParam(Constants.EXECUTION, execution);
}
if (authenticationSession != null && authenticationSession.getAuthNote(Constants.KEY) != null) {
b.queryParam(Constants.KEY, authenticationSession.getAuthNote(Constants.KEY));
}
attributes.put("locale", new LocaleBean(realm, locale, b, messagesBundle));
}
}
if (realm != null && user != null && session != null) {
attributes.put("authenticatorConfigured", new AuthenticatorConfiguredMethod(realm, user, session));
}
if (authenticationSession != null && authenticationSession.getClientNote(Constants.KC_ACTION_EXECUTING) != null) {
attributes.put("isAppInitiatedAction", true);
}
}
/**
* Process FreeMarker template and prepare Response. Some fields are used for rendering also.
*
* @param theme to be used (provided by <code>getTheme()</code>)
* @param templateName name of the template to be rendered
* @param locale to be used
* @return Response object to be returned to the browser, never null
*/
protected Response processTemplate(Theme theme, String templateName, Locale locale) {
try {
String result = freeMarker.processTemplate(attributes, templateName, theme);
javax.ws.rs.core.MediaType mediaType = contentType == null ? MediaType.TEXT_HTML_UTF_8_TYPE : contentType;
Response.ResponseBuilder builder = Response.status(status == null ? Response.Status.OK : status).type(mediaType).language(locale).entity(result);
for (Map.Entry<String, String> entry : httpResponseHeaders.entrySet()) {
builder.header(entry.getKey(), entry.getValue());
}
return builder.build();
} catch (FreeMarkerException e) {
logger.error("Failed to process template", e);
return Response.serverError().build();
}
}
@Override
public Response createLoginUsernamePassword() {
return createResponse(LoginFormsPages.LOGIN);
}
public Response createLoginUsername(){
return createResponse(LoginFormsPages.LOGIN_USERNAME);
};
public Response createLoginPassword(){
return createResponse(LoginFormsPages.LOGIN_PASSWORD);
};
@Override
public Response createPasswordReset() {
return createResponse(LoginFormsPages.LOGIN_RESET_PASSWORD);
}
@Override
public Response createLoginTotp() {
return createResponse(LoginFormsPages.LOGIN_TOTP);
}
@Override
public Response createLoginWebAuthn() {
return createResponse(LoginFormsPages.LOGIN_WEBAUTHN);
}
@Override
public Response createRegistration() {
return createResponse(LoginFormsPages.REGISTER);
}
@Override
public Response createInfoPage() {
return createResponse(LoginFormsPages.INFO);
}
@Override
public Response createUpdateProfilePage() {
// Don't display initial message if we already have some errors
if (messageType != MessageType.ERROR) {
setMessage(MessageType.WARNING, Messages.UPDATE_PROFILE);
}
return createResponse(LoginFormsPages.LOGIN_UPDATE_PROFILE);
}
@Override
public Response createIdpLinkConfirmLinkPage() {
return createResponse(LoginFormsPages.LOGIN_IDP_LINK_CONFIRM);
}
@Override
public Response createLoginExpiredPage() {
return createResponse(LoginFormsPages.LOGIN_PAGE_EXPIRED);
}
@Override
public Response createIdpLinkEmailPage() {
BrokeredIdentityContext brokerContext = (BrokeredIdentityContext) this.attributes.get(IDENTITY_PROVIDER_BROKER_CONTEXT);
String idpAlias = brokerContext.getIdpConfig().getAlias();
idpAlias = ObjectUtil.capitalize(idpAlias);
setMessage(MessageType.WARNING, Messages.LINK_IDP, idpAlias);
return createResponse(LoginFormsPages.LOGIN_IDP_LINK_EMAIL);
}
@Override
public Response createErrorPage(Response.Status status) {
this.status = status;
return createResponse(LoginFormsPages.ERROR);
}
@Override
public Response createWebAuthnErrorPage() {
return createResponse(LoginFormsPages.ERROR_WEBAUTHN);
}
@Override
public Response createOAuthGrant() {
return createResponse(LoginFormsPages.OAUTH_GRANT);
}
@Override
public Response createSelectAuthenticator() {
return createResponse(LoginFormsPages.LOGIN_SELECT_AUTHENTICATOR);
}
@Override
public Response createCode() {
return createResponse(LoginFormsPages.CODE);
}
@Override
public Response createX509ConfirmPage() {
return createResponse(LoginFormsPages.X509_CONFIRM);
}
@Override
public Response createSamlPostForm() {
return createResponse(LoginFormsPages.SAML_POST_FORM);
}
protected void setMessage(MessageType type, String message, Object... parameters) {
messageType = type;
messages = new ArrayList<>();
messages.add(new FormMessage(null, message, parameters));
}
protected String getFirstMessageUnformatted() {
if (messages != null && !messages.isEmpty()) {
return messages.get(0).getMessage();
}
return null;
}
protected String formatMessage(FormMessage message, Properties messagesBundle, Locale locale) {
if (message == null)
return null;
if (messagesBundle.containsKey(message.getMessage())) {
return new MessageFormat(messagesBundle.getProperty(message.getMessage()), locale).format(message.getParameters());
} else {
return message.getMessage();
}
}
@Override
public FreeMarkerLoginFormsProvider setError(String message, Object... parameters) {
setMessage(MessageType.ERROR, message, parameters);
return this;
}
@Override
public LoginFormsProvider setErrors(List<FormMessage> messages) {
if (messages == null)
return this;
this.messageType = MessageType.ERROR;
this.messages = new ArrayList<>(messages);
return this;
}
@Override
public LoginFormsProvider addError(FormMessage errorMessage) {
if (this.messageType != MessageType.ERROR) {
this.messageType = null;
this.messages = null;
}
if (messages == null) {
this.messageType = MessageType.ERROR;
this.messages = new LinkedList<>();
}
this.messages.add(errorMessage);
return this;
}
@Override
public LoginFormsProvider addSuccess(FormMessage errorMessage) {
if (this.messageType != MessageType.SUCCESS) {
this.messageType = null;
this.messages = null;
}
if (messages == null) {
this.messageType = MessageType.SUCCESS;
this.messages = new LinkedList<>();
}
this.messages.add(errorMessage);
return this;
}
@Override
public FreeMarkerLoginFormsProvider setSuccess(String message, Object... parameters) {
setMessage(MessageType.SUCCESS, message, parameters);
return this;
}
@Override
public FreeMarkerLoginFormsProvider setInfo(String message, Object... parameters) {
setMessage(MessageType.INFO, message, parameters);
return this;
}
@Override
public LoginFormsProvider setAuthenticationSession(AuthenticationSessionModel authenticationSession) {
this.authenticationSession = authenticationSession;
return this;
}
@Override
public FreeMarkerLoginFormsProvider setUser(UserModel user) {
this.user = user;
return this;
}
@Override
public FreeMarkerLoginFormsProvider setFormData(MultivaluedMap<String, String> formData) {
this.formData = formData;
return this;
}
@Override
public LoginFormsProvider setClientSessionCode(String accessCode) {
this.accessCode = accessCode;
return this;
}
@Override
public LoginFormsProvider setAccessRequest(List<ClientScopeModel> clientScopesRequested) {
this.clientScopesRequested = clientScopesRequested;
return this;
}
@Override
public LoginFormsProvider setAttribute(String name, Object value) {
this.attributes.put(name, value);
return this;
}
@Override
public LoginFormsProvider setStatus(Response.Status status) {
this.status = status;
return this;
}
@Override
public LoginFormsProvider setMediaType(javax.ws.rs.core.MediaType type) {
this.contentType = type;
return this;
}
@Override
public LoginFormsProvider setActionUri(URI actionUri) {
this.actionUri = actionUri;
return this;
}
@Override
public LoginFormsProvider setExecution(String execution) {
this.execution = execution;
return this;
}
@Override
public LoginFormsProvider setResponseHeader(String headerName, String headerValue) {
this.httpResponseHeaders.put(headerName, headerValue);
return this;
}
public LoginFormsProvider setAuthContext(AuthenticationFlowContext context){
this.context = context;
return this;
}
@Override
public void close() {
}
}
| |
/*
* Copyright DataStax, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datastax.oss.driver.internal.core.channel;
import static com.datastax.oss.driver.Assertions.assertThat;
import static com.datastax.oss.driver.Assertions.assertThatStage;
import static org.mockito.Mockito.when;
import com.datastax.oss.driver.api.core.DefaultProtocolVersion;
import com.datastax.oss.driver.api.core.UnsupportedProtocolVersionException;
import com.datastax.oss.driver.api.core.config.DefaultDriverOption;
import com.datastax.oss.driver.internal.core.TestResponses;
import com.datastax.oss.driver.internal.core.metrics.NoopNodeMetricUpdater;
import com.datastax.oss.protocol.internal.Frame;
import com.datastax.oss.protocol.internal.ProtocolConstants;
import com.datastax.oss.protocol.internal.request.Options;
import com.datastax.oss.protocol.internal.response.Error;
import com.datastax.oss.protocol.internal.response.Ready;
import com.tngtech.java.junit.dataprovider.DataProvider;
import com.tngtech.java.junit.dataprovider.UseDataProvider;
import java.util.Optional;
import java.util.concurrent.CompletionStage;
import org.junit.Test;
public class ChannelFactoryProtocolNegotiationTest extends ChannelFactoryTestBase {
@Test
public void should_succeed_if_version_specified_and_supported_by_server() {
// Given
when(defaultProfile.isDefined(DefaultDriverOption.PROTOCOL_VERSION)).thenReturn(true);
when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_VERSION)).thenReturn("V4");
when(protocolVersionRegistry.fromName("V4")).thenReturn(DefaultProtocolVersion.V4);
ChannelFactory factory = newChannelFactory();
// When
CompletionStage<DriverChannel> channelFuture =
factory.connect(
SERVER_ADDRESS, DriverChannelOptions.DEFAULT, NoopNodeMetricUpdater.INSTANCE);
completeSimpleChannelInit();
// Then
assertThatStage(channelFuture)
.isSuccess(channel -> assertThat(channel.getClusterName()).isEqualTo("mockClusterName"));
assertThat(factory.protocolVersion).isEqualTo(DefaultProtocolVersion.V4);
}
@Test
@UseDataProvider("unsupportedProtocolCodes")
public void should_fail_if_version_specified_and_not_supported_by_server(int errorCode) {
// Given
when(defaultProfile.isDefined(DefaultDriverOption.PROTOCOL_VERSION)).thenReturn(true);
when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_VERSION)).thenReturn("V4");
when(protocolVersionRegistry.fromName("V4")).thenReturn(DefaultProtocolVersion.V4);
ChannelFactory factory = newChannelFactory();
// When
CompletionStage<DriverChannel> channelFuture =
factory.connect(
SERVER_ADDRESS, DriverChannelOptions.DEFAULT, NoopNodeMetricUpdater.INSTANCE);
Frame requestFrame = readOutboundFrame();
assertThat(requestFrame.message).isInstanceOf(Options.class);
writeInboundFrame(requestFrame, TestResponses.supportedResponse("mock_key", "mock_value"));
requestFrame = readOutboundFrame();
assertThat(requestFrame.protocolVersion).isEqualTo(DefaultProtocolVersion.V4.getCode());
// Server does not support v4
writeInboundFrame(
requestFrame, new Error(errorCode, "Invalid or unsupported protocol version"));
// Then
assertThatStage(channelFuture)
.isFailed(
e -> {
assertThat(e)
.isInstanceOf(UnsupportedProtocolVersionException.class)
.hasMessageContaining("Host does not support protocol version V4");
assertThat(((UnsupportedProtocolVersionException) e).getAttemptedVersions())
.containsExactly(DefaultProtocolVersion.V4);
});
}
@Test
public void should_fail_if_version_specified_and_considered_beta_by_server() {
// Given
when(defaultProfile.isDefined(DefaultDriverOption.PROTOCOL_VERSION)).thenReturn(true);
when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_VERSION)).thenReturn("V5");
when(protocolVersionRegistry.fromName("V5")).thenReturn(DefaultProtocolVersion.V5);
ChannelFactory factory = newChannelFactory();
// When
CompletionStage<DriverChannel> channelFuture =
factory.connect(
SERVER_ADDRESS, DriverChannelOptions.DEFAULT, NoopNodeMetricUpdater.INSTANCE);
Frame requestFrame = readOutboundFrame();
assertThat(requestFrame.message).isInstanceOf(Options.class);
writeInboundFrame(requestFrame, TestResponses.supportedResponse("mock_key", "mock_value"));
requestFrame = readOutboundFrame();
assertThat(requestFrame.protocolVersion).isEqualTo(DefaultProtocolVersion.V5.getCode());
// Server considers v5 beta, e.g. C* 3.10 or 3.11
writeInboundFrame(
requestFrame,
new Error(
ProtocolConstants.ErrorCode.PROTOCOL_ERROR,
"Beta version of the protocol used (5/v5-beta), but USE_BETA flag is unset"));
// Then
assertThatStage(channelFuture)
.isFailed(
e -> {
assertThat(e)
.isInstanceOf(UnsupportedProtocolVersionException.class)
.hasMessageContaining("Host does not support protocol version V5");
assertThat(((UnsupportedProtocolVersionException) e).getAttemptedVersions())
.containsExactly(DefaultProtocolVersion.V5);
});
}
@Test
public void should_succeed_if_version_not_specified_and_server_supports_latest_supported() {
// Given
when(defaultProfile.isDefined(DefaultDriverOption.PROTOCOL_VERSION)).thenReturn(false);
when(protocolVersionRegistry.highestNonBeta()).thenReturn(DefaultProtocolVersion.V4);
ChannelFactory factory = newChannelFactory();
// When
CompletionStage<DriverChannel> channelFuture =
factory.connect(
SERVER_ADDRESS, DriverChannelOptions.DEFAULT, NoopNodeMetricUpdater.INSTANCE);
Frame requestFrame = readOutboundFrame();
assertThat(requestFrame.message).isInstanceOf(Options.class);
writeInboundFrame(requestFrame, TestResponses.supportedResponse("mock_key", "mock_value"));
requestFrame = readOutboundFrame();
assertThat(requestFrame.protocolVersion).isEqualTo(DefaultProtocolVersion.V4.getCode());
writeInboundFrame(requestFrame, new Ready());
requestFrame = readOutboundFrame();
writeInboundFrame(requestFrame, TestResponses.clusterNameResponse("mockClusterName"));
// Then
assertThatStage(channelFuture)
.isSuccess(channel -> assertThat(channel.getClusterName()).isEqualTo("mockClusterName"));
assertThat(factory.protocolVersion).isEqualTo(DefaultProtocolVersion.V4);
}
@Test
@UseDataProvider("unsupportedProtocolCodes")
public void should_negotiate_if_version_not_specified_and_server_supports_legacy(int errorCode) {
// Given
when(defaultProfile.isDefined(DefaultDriverOption.PROTOCOL_VERSION)).thenReturn(false);
when(protocolVersionRegistry.highestNonBeta()).thenReturn(DefaultProtocolVersion.V4);
when(protocolVersionRegistry.downgrade(DefaultProtocolVersion.V4))
.thenReturn(Optional.of(DefaultProtocolVersion.V3));
ChannelFactory factory = newChannelFactory();
// When
CompletionStage<DriverChannel> channelFuture =
factory.connect(
SERVER_ADDRESS, DriverChannelOptions.DEFAULT, NoopNodeMetricUpdater.INSTANCE);
Frame requestFrame = readOutboundFrame();
assertThat(requestFrame.message).isInstanceOf(Options.class);
writeInboundFrame(requestFrame, TestResponses.supportedResponse("mock_key", "mock_value"));
requestFrame = readOutboundFrame();
assertThat(requestFrame.protocolVersion).isEqualTo(DefaultProtocolVersion.V4.getCode());
// Server does not support v4
writeInboundFrame(
requestFrame, new Error(errorCode, "Invalid or unsupported protocol version"));
// Then
// Factory should initialize a new connection, that retries with the lower version
requestFrame = readOutboundFrame();
assertThat(requestFrame.message).isInstanceOf(Options.class);
writeInboundFrame(requestFrame, TestResponses.supportedResponse("mock_key", "mock_value"));
requestFrame = readOutboundFrame();
assertThat(requestFrame.protocolVersion).isEqualTo(DefaultProtocolVersion.V3.getCode());
writeInboundFrame(requestFrame, new Ready());
requestFrame = readOutboundFrame();
writeInboundFrame(requestFrame, TestResponses.clusterNameResponse("mockClusterName"));
assertThatStage(channelFuture)
.isSuccess(channel -> assertThat(channel.getClusterName()).isEqualTo("mockClusterName"));
assertThat(factory.protocolVersion).isEqualTo(DefaultProtocolVersion.V3);
}
@Test
@UseDataProvider("unsupportedProtocolCodes")
public void should_fail_if_negotiation_finds_no_matching_version(int errorCode) {
// Given
when(defaultProfile.isDefined(DefaultDriverOption.PROTOCOL_VERSION)).thenReturn(false);
when(protocolVersionRegistry.highestNonBeta()).thenReturn(DefaultProtocolVersion.V4);
when(protocolVersionRegistry.downgrade(DefaultProtocolVersion.V4))
.thenReturn(Optional.of(DefaultProtocolVersion.V3));
when(protocolVersionRegistry.downgrade(DefaultProtocolVersion.V3)).thenReturn(Optional.empty());
ChannelFactory factory = newChannelFactory();
// When
CompletionStage<DriverChannel> channelFuture =
factory.connect(
SERVER_ADDRESS, DriverChannelOptions.DEFAULT, NoopNodeMetricUpdater.INSTANCE);
Frame requestFrame = readOutboundFrame();
assertThat(requestFrame.message).isInstanceOf(Options.class);
writeInboundFrame(requestFrame, TestResponses.supportedResponse("mock_key", "mock_value"));
requestFrame = readOutboundFrame();
assertThat(requestFrame.protocolVersion).isEqualTo(DefaultProtocolVersion.V4.getCode());
// Server does not support v4
writeInboundFrame(
requestFrame, new Error(errorCode, "Invalid or unsupported protocol version"));
// Client retries with v3
requestFrame = readOutboundFrame();
assertThat(requestFrame.message).isInstanceOf(Options.class);
writeInboundFrame(requestFrame, TestResponses.supportedResponse("mock_key", "mock_value"));
requestFrame = readOutboundFrame();
assertThat(requestFrame.protocolVersion).isEqualTo(DefaultProtocolVersion.V3.getCode());
// Server does not support v3
writeInboundFrame(
requestFrame, new Error(errorCode, "Invalid or unsupported protocol version"));
// Then
assertThatStage(channelFuture)
.isFailed(
e -> {
assertThat(e)
.isInstanceOf(UnsupportedProtocolVersionException.class)
.hasMessageContaining(
"Protocol negotiation failed: could not find a common version "
+ "(attempted: [V4, V3])");
assertThat(((UnsupportedProtocolVersionException) e).getAttemptedVersions())
.containsExactly(DefaultProtocolVersion.V4, DefaultProtocolVersion.V3);
});
}
/**
* Depending on the Cassandra version, an "unsupported protocol" response can use different error
* codes, so we test all of them.
*/
@DataProvider
public static Object[][] unsupportedProtocolCodes() {
return new Object[][] {
new Object[] {ProtocolConstants.ErrorCode.PROTOCOL_ERROR},
// C* 2.1 reports a server error instead of protocol error, see CASSANDRA-9451.
new Object[] {ProtocolConstants.ErrorCode.SERVER_ERROR}
};
}
}
| |
/*
* ProGuard -- shrinking, optimization, obfuscation, and preverification
* of Java bytecode.
*
* Copyright (c) 2002-2011 Eric Lafortune (eric@graphics.cornell.edu)
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package proguard.gui;
import proguard.util.ListUtil;
import javax.swing.*;
import javax.swing.border.*;
import java.awt.*;
import java.awt.event.*;
import java.util.List;
/**
* This <code>JDialog</code> allows the user to enter a String.
*
* @author Eric Lafortune
*/
public class FilterDialog extends JDialog
{
/**
* Return value if the dialog is canceled (with the Cancel button or by
* closing the dialog window).
*/
public static final int CANCEL_OPTION = 1;
/**
* Return value if the dialog is approved (with the Ok button).
*/
public static final int APPROVE_OPTION = 0;
private static final String DEFAULT_FILTER = "**";
private static final String DEFAULT_JAR_FILTER = "**.jar";
private static final String DEFAULT_WAR_FILTER = "**.war";
private static final String DEFAULT_EAR_FILTER = "**.ear";
private static final String DEFAULT_ZIP_FILTER = "**.zip";
private final JTextField filterTextField = new JTextField(40);
private final JTextField jarFilterTextField = new JTextField(40);
private final JTextField warFilterTextField = new JTextField(40);
private final JTextField earFilterTextField = new JTextField(40);
private final JTextField zipFilterTextField = new JTextField(40);
private int returnValue;
public FilterDialog(JFrame owner,
String explanation)
{
super(owner, true);
setResizable(true);
// Create some constraints that can be reused.
GridBagConstraints textConstraints = new GridBagConstraints();
textConstraints.gridwidth = GridBagConstraints.REMAINDER;
textConstraints.fill = GridBagConstraints.HORIZONTAL;
textConstraints.weightx = 1.0;
textConstraints.weighty = 1.0;
textConstraints.anchor = GridBagConstraints.NORTHWEST;
textConstraints.insets = new Insets(10, 10, 10, 10);
GridBagConstraints labelConstraints = new GridBagConstraints();
labelConstraints.anchor = GridBagConstraints.WEST;
labelConstraints.insets = new Insets(1, 2, 1, 2);
GridBagConstraints textFieldConstraints = new GridBagConstraints();
textFieldConstraints.gridwidth = GridBagConstraints.REMAINDER;
textFieldConstraints.fill = GridBagConstraints.HORIZONTAL;
textFieldConstraints.weightx = 1.0;
textFieldConstraints.anchor = GridBagConstraints.WEST;
textFieldConstraints.insets = labelConstraints.insets;
GridBagConstraints panelConstraints = new GridBagConstraints();
panelConstraints.gridwidth = GridBagConstraints.REMAINDER;
panelConstraints.fill = GridBagConstraints.HORIZONTAL;
panelConstraints.weightx = 1.0;
panelConstraints.weighty = 0.0;
panelConstraints.anchor = GridBagConstraints.NORTHWEST;
panelConstraints.insets = labelConstraints.insets;
GridBagConstraints okButtonConstraints = new GridBagConstraints();
okButtonConstraints.weightx = 1.0;
okButtonConstraints.weighty = 1.0;
okButtonConstraints.anchor = GridBagConstraints.SOUTHEAST;
okButtonConstraints.insets = new Insets(4, 4, 8, 4);
GridBagConstraints cancelButtonConstraints = new GridBagConstraints();
cancelButtonConstraints.gridwidth = GridBagConstraints.REMAINDER;
cancelButtonConstraints.weighty = 1.0;
cancelButtonConstraints.anchor = GridBagConstraints.SOUTHEAST;
cancelButtonConstraints.insets = okButtonConstraints.insets;
GridBagLayout layout = new GridBagLayout();
Border etchedBorder = BorderFactory.createEtchedBorder(EtchedBorder.RAISED);
// Create the panel with the explanation.
JTextArea explanationTextArea = new JTextArea(explanation, 3, 0);
explanationTextArea.setOpaque(false);
explanationTextArea.setEditable(false);
explanationTextArea.setLineWrap(true);
explanationTextArea.setWrapStyleWord(true);
// Create the filter labels.
JLabel filterLabel = new JLabel(msg("nameFilter"));
JLabel jarFilterLabel = new JLabel(msg("jarNameFilter"));
JLabel warFilterLabel = new JLabel(msg("warNameFilter"));
JLabel earFilterLabel = new JLabel(msg("earNameFilter"));
JLabel zipFilterLabel = new JLabel(msg("zipNameFilter"));
// Create the filter panel.
JPanel filterPanel = new JPanel(layout);
filterPanel.setBorder(BorderFactory.createTitledBorder(etchedBorder,
msg("filters")));
filterPanel.add(explanationTextArea, textConstraints);
filterPanel.add(tip(filterLabel, "nameFilterTip"), labelConstraints);
filterPanel.add(tip(filterTextField, "fileNameFilterTip"), textFieldConstraints);
filterPanel.add(tip(jarFilterLabel, "jarNameFilterTip"), labelConstraints);
filterPanel.add(tip(jarFilterTextField, "fileNameFilterTip"), textFieldConstraints);
filterPanel.add(tip(warFilterLabel, "warNameFilterTip"), labelConstraints);
filterPanel.add(tip(warFilterTextField, "fileNameFilterTip"), textFieldConstraints);
filterPanel.add(tip(earFilterLabel, "earNameFilterTip"), labelConstraints);
filterPanel.add(tip(earFilterTextField, "fileNameFilterTip"), textFieldConstraints);
filterPanel.add(tip(zipFilterLabel, "zipNameFilterTip"), labelConstraints);
filterPanel.add(tip(zipFilterTextField, "fileNameFilterTip"), textFieldConstraints);
JButton okButton = new JButton(msg("ok"));
okButton.addActionListener(new ActionListener()
{
public void actionPerformed(ActionEvent e)
{
returnValue = APPROVE_OPTION;
hide();
}
});
JButton cancelButton = new JButton(msg("cancel"));
cancelButton.addActionListener(new ActionListener()
{
public void actionPerformed(ActionEvent e)
{
hide();
}
});
// Add all panels to the main panel.
JPanel mainPanel = new JPanel(layout);
mainPanel.add(filterPanel, panelConstraints);
mainPanel.add(okButton, okButtonConstraints);
mainPanel.add(cancelButton, cancelButtonConstraints);
getContentPane().add(mainPanel);
}
/**
* Sets the filter to be represented in this dialog.
*/
public void setFilter(List filter)
{
filterTextField.setText(filter != null ? ListUtil.commaSeparatedString(filter, true) : DEFAULT_FILTER);
}
/**
* Returns the filter currently represented in this dialog.
*/
public List getFilter()
{
String filter = filterTextField.getText();
return filter.equals(DEFAULT_FILTER) ? null : ListUtil.commaSeparatedList(filter);
}
/**
* Sets the jar filter to be represented in this dialog.
*/
public void setJarFilter(List filter)
{
jarFilterTextField.setText(filter != null ? ListUtil.commaSeparatedString(filter, true) : DEFAULT_JAR_FILTER);
}
/**
* Returns the jar filter currently represented in this dialog.
*/
public List getJarFilter()
{
String filter = jarFilterTextField.getText();
return filter.equals(DEFAULT_JAR_FILTER) ? null : ListUtil.commaSeparatedList(filter);
}
/**
* Sets the war filter to be represented in this dialog.
*/
public void setWarFilter(List filter)
{
warFilterTextField.setText(filter != null ? ListUtil.commaSeparatedString(filter, true) : DEFAULT_WAR_FILTER);
}
/**
* Returns the war filter currently represented in this dialog.
*/
public List getWarFilter()
{
String filter = warFilterTextField.getText();
return filter.equals(DEFAULT_WAR_FILTER) ? null : ListUtil.commaSeparatedList(filter);
}
/**
* Sets the ear filter to be represented in this dialog.
*/
public void setEarFilter(List filter)
{
earFilterTextField.setText(filter != null ? ListUtil.commaSeparatedString(filter, true) : DEFAULT_EAR_FILTER);
}
/**
* Returns the ear filter currently represented in this dialog.
*/
public List getEarFilter()
{
String filter = earFilterTextField.getText();
return filter.equals(DEFAULT_EAR_FILTER) ? null : ListUtil.commaSeparatedList(filter);
}
/**
* Sets the zip filter to be represented in this dialog.
*/
public void setZipFilter(List filter)
{
zipFilterTextField.setText(filter != null ? ListUtil.commaSeparatedString(filter, true) : DEFAULT_ZIP_FILTER);
}
/**
* Returns the zip filter currently represented in this dialog.
*/
public List getZipFilter()
{
String filter = zipFilterTextField.getText();
return filter.equals(DEFAULT_ZIP_FILTER) ? null : ListUtil.commaSeparatedList(filter);
}
/**
* Shows this dialog. This method only returns when the dialog is closed.
*
* @return <code>CANCEL_OPTION</code> or <code>APPROVE_OPTION</code>,
* depending on the choice of the user.
*/
public int showDialog()
{
returnValue = CANCEL_OPTION;
// Open the dialog in the right place, then wait for it to be closed,
// one way or another.
pack();
setLocationRelativeTo(getOwner());
show();
return returnValue;
}
/**
* Attaches the tool tip from the GUI resources that corresponds to the
* given key, to the given component.
*/
private static JComponent tip(JComponent component, String messageKey)
{
component.setToolTipText(msg(messageKey));
return component;
}
/**
* Returns the message from the GUI resources that corresponds to the given
* key.
*/
private static String msg(String messageKey)
{
return GUIResources.getMessage(messageKey);
}
}
| |
/*
* Copyright (c) 2011-2014 Gilbert Peffer, Barbara Llacay
*
* The source code and software releases are available at http://code.google.com/p/systemic-risk/
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package info.financialecology.finance.utilities.output;
import info.financialecology.finance.utilities.datastruct.DoubleTimeSeries;
import info.financialecology.finance.utilities.datastruct.DoubleTimeSeriesList;
import info.financialecology.finance.utilities.datastruct.VersatileDataTable;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import cern.colt.list.DoubleArrayList;
import cern.colt.list.IntArrayList;
import cern.colt.matrix.*;
import cern.colt.matrix.impl.DenseDoubleMatrix2D;
import au.com.bytecode.opencsv.CSVReader;
/**
* Read the values from a CSV (comma-separated values) file and store them in objects of different types
* Data can be written to objects of the following types:
* <ul>
* <li> {@link DoubleArrayList}
* <li> {@link DoubleTimeSeries}
* <li> {@link DoubleTimeSeriesList}
* <li> {@link VersatileDataTable}
* </ul>
*
* @author Gilbert Peffer
*
*/
public class CsvResultReader {
private static final String TICK_HEADER = "tick";
private static final char SEPARATOR = ',';
private CSVReader w;
public enum Format {ROW, COL}
/**
* Constructor. Uses the default separator {@link #SEPARATOR} for the CSV values.
*
* @param fileName name of the CSV file
*/
public CsvResultReader(String fileName) {
try {
w = new CSVReader(new FileReader(fileName), SEPARATOR);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* Constructor. The separator is provided as an argument.
*
* @param fileName name of the input CSV file
* @param separator the separator of the CSV file
*/
public CsvResultReader(String fileName, char separator) {
try {
w = new CSVReader(new FileReader(fileName), separator);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* Read the CSV file and store the values in a {@code ArrayList<IntArrayList>} object. Row and column
* names in the CSV are ignored, but it needs to be indicated whether they are present using the
* {@code isRowNames} and {@code isColNames}. The columns are stored in the {@code IntArrayLists}.
*
* @param isRowNames true if the CSV file contains row names (it is not safe to detect this
* automatically)
* @param isColNames true if the CSV file contains column names (it is not safe to detect this
*/
public ArrayList<IntArrayList> readIntMatrix2D(Boolean isRowNames, Boolean isColNames) {
ArrayList<ArrayList<String>> csvTable = readCSVTable(isRowNames, isColNames); // read the CSV values from the file into the table csvTable (list(0) = row names, list(1) = col names, list (2..n) = values
int numValueRows = csvTable.get(2).size(); // the third array list in the csvTable is the
int numValueCols = csvTable.size() - 2;
ArrayList<IntArrayList> matrix = new ArrayList<IntArrayList>();
for (int c = 0; c < numValueCols; c++) {
ArrayList<String> col = csvTable.get(c + 2);
IntArrayList intCol = new IntArrayList();
for (int r = 0; r < numValueRows; r++)
intCol.add(Integer.valueOf(col.get(r)));
matrix.add(intCol);
}
return matrix;
}
/**
* Read the CSV file and store the values in a {@code DoubleMatrix2D} object. Row and column
* names in the CSV are ignored, but it needs to be indicated whether they are present using
* the {@code isRowNames} and {@code isColNames}.
*
* @param isRowNames true if the CSV file contains row names (it is not safe to detect this
* automatically)
* @param isColNames true if the CSV file contains column names (it is not safe to detect this
*/
public DoubleMatrix2D readDoubleMatrix2D(Boolean isRowNames, Boolean isColNames) {
ArrayList<ArrayList<String>> csvTable = readCSVTable(isRowNames, isColNames); // read the CSV values from the file into the table csvTable (list(0) = row names, list(1) = col names, list (2..n) = values
int numValueRows = csvTable.get(2).size(); // the third array list in the csvTable is the
int numValueCols = csvTable.size() - 2;
DoubleMatrix2D matrix = new DenseDoubleMatrix2D(numValueRows, numValueCols);
for (int c = 0; c < numValueCols; c++) {
ArrayList<String> col = csvTable.get(c + 2);
for (int r = 0; r < numValueRows; r++)
matrix.set(r, c, Double.valueOf(col.get(r)));
}
return matrix;
}
/**
* Read the CSV file and store the values in a {@code DoubleTimeSeries} object.
*
* We expect the first column of the CSV values to contain the tick and the second column the
* values. Row names are ignored. If a column name is provided, the name of the second column
* is assigned to the identifier of the time series.
*
* @param isRowNames true if the CSV file contains row names (it is not safe to detect this
* automatically)
* @param isColNames true if the CSV file contains column names (it is not safe to detect this
* automatically)
*/
public DoubleTimeSeries readDoubleTimeSeries(Boolean isRowNames, Boolean isColNames) {
ArrayList<ArrayList<String>> csvTable = readCSVTable(isRowNames, isColNames); // read the CSV values from the file into the table csvTable (list(0) = row names, list(1) = col names, list (2..n) = values
int numValueRows = csvTable.get(2).size();
DoubleTimeSeries dts = new DoubleTimeSeries();
ArrayList<String> ticks = csvTable.get(2);
ArrayList<String> values = csvTable.get(3);
for (int r = 0; r < numValueRows; r++)
dts.add(Integer.valueOf(ticks.get(r)), Double.valueOf(values.get(r)));
if (isColNames) dts.setId(csvTable.get(1).get(0));
return dts;
}
/**
* Read the CSV file and store the values in a {@link DoubleTimeSeriesList} object. Every
* column of CSV values is stored in a {@link DoubleTimeSeries}.
*
* We expect the first column of the CSV values (excluding row names) to contain the tick and
* the second column the values. Row names are ignored. If a column names are provided, they are
* assigned to the identifiers of the time series.
*
* @param isRowNames true if the CSV file contains row names (it is not safe to detect this
* automatically)
* @param isColNames true if the CSV file contains column names (it is not safe to detect this
* automatically)
*/
public DoubleTimeSeriesList readDoubleTimeSeriesList(Boolean isRowNames, Boolean isColNames) {
ArrayList<ArrayList<String>> csvTable = readCSVTable(isRowNames, isColNames); // read the CSV values from the file into the table csvTable (list(0) = row names, list(1) = col names, list (2..n) = values
int numValueRows = csvTable.get(2).size();
int numValueCols = csvTable.size() - 3;
DoubleTimeSeriesList dtsl = new DoubleTimeSeriesList();
ArrayList<String> ticks = csvTable.get(2); // the ticks are in array list '2'
for (int c = 0; c < numValueCols; c++) {
ArrayList<String> values = csvTable.get(c + 3); // the first variable is in array list '3'
DoubleTimeSeries dts = new DoubleTimeSeries();
for (int r = 0; r < numValueRows; r++) {
dts.add(Integer.valueOf(ticks.get(r)), Double.valueOf(values.get(r)));
if (isColNames) dts.setId(csvTable.get(1).get(c));
}
dtsl.add(dts);
}
return dtsl;
}
/**
* Read the CSV file and store the values in a {@link VersatileDataTable} object. Row and column
* names are both stored in the object.
*
* @param isRowNames true if the CSV file contains row names (it is not safe to detect this
* automatically)
* @param isColNames true if the CSV file contains column names (it is not safe to detect this
* automatically)
*/
public VersatileDataTable readVersatileDataTable(Boolean isRowNames, Boolean isColNames) {
ArrayList<ArrayList<String>> csvTable = readCSVTable(isRowNames, isColNames); // read the CSV values from the file into the table csvTable (list(0) = row names, list(1) = col names, list (2..n) = values
int numValueRows = csvTable.get(2).size();
int numValueCols = csvTable.size() - 2;
VersatileDataTable table = new VersatileDataTable("unlabeled");
for (int c = 0; c < numValueCols; c++)
for (int r = 0; r < numValueRows; r++)
table.addValue(Double.valueOf(csvTable.get(c + 2).get(r)), csvTable.get(0).get(r), csvTable.get(1).get(c));
return table;
}
/**
* Reads the values of a CSV file, including row and column names, if provided. It stores the
* strings in am array list of array lists. The first array list contains the row names, the
* second the column names, and the remaining array lists contain the columns of the CSV table.
*
* @author Gilbert Peffer
*/
private ArrayList<ArrayList<String>> readCSVTable(Boolean isRowNames, Boolean isColNames) {
ArrayList<ArrayList<String>> rowsAndColsAndValues = new ArrayList<ArrayList<String>>(); // array storing the row and column names and the values
rowsAndColsAndValues.add(new ArrayList<String>()); // array for the row names
rowsAndColsAndValues.add(new ArrayList<String>()); // array for the column names
List<String []> csvLines = null; // the object holding all the CSV values
try {
csvLines = w.readAll();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
int numRows = csvLines.size(); // number of rows in the CSV file
int numCols = csvLines.get(0).length; // number of columns in the CSV file
int startIndexRows = 0; // if the col names are provided the CSV values start in row '1'
int startIndexCols = 0; // if the row names are provided the CSV values start in column '1'
if (isRowNames) startIndexCols = 1;
if (isColNames) startIndexRows = 1;
if (isRowNames) { // write the row names if they are given
for (int i = startIndexRows; i < numRows; i++)
rowsAndColsAndValues.get(0).add(csvLines.get(i)[0]); // row names are stored in array '0'
}
if (isColNames) { // write the column names if they are given
for (int i = startIndexCols; i < numCols; i++)
rowsAndColsAndValues.get(1).add(csvLines.get(0)[i]); // column names are stored in array '1'
}
// Generate empty arrays to hold the variables (columns)
for (int i = 0; i < numCols; i++)
rowsAndColsAndValues.add(new ArrayList<String>());
// Generate the value arrays, column by column
for (int i = startIndexCols; i < numCols; i++) // loop over the column of the CSV 'table'
for (int j = startIndexRows; j < numRows; j++)
rowsAndColsAndValues.get(i - startIndexCols + 2).add(csvLines.get(j)[i]); // add CSV values to column arrays, starting at array '2'
return rowsAndColsAndValues;
}
}
| |
/*
* Copyright (c) 2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.developerstudio.eclipse.carbonserver44.util;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import javax.xml.namespace.NamespaceContext;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.Result;
import javax.xml.transform.Source;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.TransformerFactoryConfigurationError;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.Platform;
import org.eclipse.jst.server.generic.core.internal.GenericServer;
import org.eclipse.jst.server.generic.servertype.definition.ServerRuntime;
import org.eclipse.wst.server.core.IServer;
import org.eclipse.wst.server.core.ServerPort;
import org.eclipse.wst.server.core.model.ServerDelegate;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.wso2.developerstudio.eclipse.carbonserver.base.manager.CarbonServerManager;
import org.wso2.developerstudio.eclipse.carbonserver44.Activator;
import org.wso2.developerstudio.eclipse.carbonserver44.internal.CarbonServer44;
import org.wso2.developerstudio.eclipse.logging.core.IDeveloperStudioLog;
import org.wso2.developerstudio.eclipse.logging.core.Logger;
import org.wso2.developerstudio.eclipse.utils.file.FileUtils;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
@SuppressWarnings("restriction")
public class CarbonServer44Utils {
private static IDeveloperStudioLog log=Logger.getLog(Activator.PLUGIN_ID);
public static ServerPort[] getServerPorts(String serverHome){
String transportsXml = FileUtils.addNodesToPath(serverHome, new String[]{"conf","carbon.xml"});
XPathFactory factory = XPathFactory.newInstance();
File xmlDocument = new File(transportsXml);
ServerPort[] serverPorts=new ServerPort[2];
try {
InputSource inputSource = new InputSource(new FileInputStream(xmlDocument));
XPath xPath=factory.newXPath();
XPathExpression xPathExpression=xPath.compile("/:Server/:Ports/:ServletTransports/:HTTP");
String evaluate = xPathExpression.evaluate(inputSource);
serverPorts[0]=new ServerPort("server","",Integer.parseInt(evaluate),"http");
inputSource = new InputSource(new FileInputStream(xmlDocument));
xPathExpression=xPath.compile("/:Server/:Ports/:ServletTransports/:HTTPS");
evaluate = xPathExpression.evaluate(inputSource);
serverPorts[1]=new ServerPort("server","",Integer.parseInt(evaluate),"https");
} catch (FileNotFoundException e) {
log.error(e);
} catch (XPathExpressionException e) {
log.error(e);
}
return serverPorts;
}
public static NamespaceContext getCarbonNamespace(){
NamespaceContext ctx = new NamespaceContext() {
public String getNamespaceURI(String prefix) {
return "http://wso2.org/projects/carbon/carbon.xml";
}
public String getPrefix(String arg0) {
return null;
}
public Iterator getPrefixes(String namespaceURI) {
// TODO Auto-generated method stub
return null;
}
};
return ctx;
}
public static String getWebContextRoot(IServer server){
String transportsXml = FileUtils.addNodesToPath(CarbonServerManager.getServerHome(server).toOSString(), new String[]{"repository","conf","carbon.xml"});
XPathFactory factory = XPathFactory.newInstance();
File xmlDocument = new File(transportsXml);
String webContextRoot =null;
NamespaceContext ctx = new NamespaceContext() {
public String getNamespaceURI(String prefix) {
return "http://wso2.org/projects/carbon/carbon.xml";
}
public String getPrefix(String arg0) {
return null;
}
public Iterator getPrefixes(String arg0) {
return null;
}
};
try {
InputSource inputSource = new InputSource(new FileInputStream(xmlDocument));
XPath xPath=factory.newXPath();
xPath.setNamespaceContext(ctx);
XPathExpression xPathExpression=xPath.compile("/:Server/:WebContextRoot");
webContextRoot = xPathExpression.evaluate(inputSource);
webContextRoot = webContextRoot.equals("/")?"":webContextRoot;
} catch (FileNotFoundException e) {
log.error(e);
} catch (XPathExpressionException e) {
log.error(e);
}
return webContextRoot;
}
public static void setTrustoreProperties(IServer server){
String transportsXml = FileUtils.addNodesToPath(CarbonServerManager.getServerHome(server).toOSString(), new String[]{"conf","server.xml"});
XPathFactory factory = XPathFactory.newInstance();
File xmlDocument = new File(transportsXml);
try {
InputSource inputSource = new InputSource(new FileInputStream(xmlDocument));
XPath xPath=factory.newXPath();
XPathExpression xPathExpression=xPath.compile("/Server/Security/KeyStore/Location");
String evaluate = xPathExpression.evaluate(inputSource);
String trustoreLocation = resolveProperties(server,evaluate);
inputSource = new InputSource(new FileInputStream(xmlDocument));
xPathExpression=xPath.compile("/Server/Security/KeyStore/Password");
evaluate = xPathExpression.evaluate(inputSource);
String trustStorePassword=resolveProperties(server,evaluate);
System.setProperty("javax.net.ssl.trustStore", trustoreLocation);
System.setProperty("javax.net.ssl.trustStorePassword", trustStorePassword);
} catch (FileNotFoundException e) {
log.error(e);
} catch (XPathExpressionException e) {
log.error(e);
}
}
public static ServerPort[] getServerPorts(IServer server){
//return getServerPorts(CommonOperations.getWSASHome(server).toOSString());
return server.getServerPorts(new NullProgressMonitor());
}
public static String getRepositoryPath(String serverXmlPath){
DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder docBuilder;
String nodeValue="";
try {
docBuilder = docFactory.newDocumentBuilder();
Document doc = docBuilder.parse(serverXmlPath);
NodeList nodeList = doc.getElementsByTagName("RepositoryLocation");
Node node = nodeList.item(0);
nodeValue = node.getFirstChild().getNodeValue();
} catch (ParserConfigurationException e) {
log.error(e);
} catch (SAXException e) {
log.error(e);
} catch (IOException e) {
log.error(e);
} catch (TransformerFactoryConfigurationError e) {
log.error(e);
}
return nodeValue;
}
public static boolean updateAndSaveCarbonXml(String serverXmlPath, String repoPath, IServer server){
DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder docBuilder;
try {
docBuilder = docFactory.newDocumentBuilder();
Document doc = docBuilder.parse(serverXmlPath);
NodeList nodeList = doc.getElementsByTagName("RepositoryLocation");
Node node = nodeList.item(0);
node.getFirstChild().setNodeValue(repoPath);
Transformer t = TransformerFactory.newInstance().newTransformer();
File confPath = new File((new File(serverXmlPath)).getParent());
if (!confPath.exists()) confPath.mkdirs();
Result result = new StreamResult(new File(serverXmlPath));
Source source = new DOMSource(doc);
t.transform(source, result);
return true;
} catch (ParserConfigurationException e) {
log.error(e);
} catch (SAXException e) {
log.error(e);
} catch (IOException e) {
log.error(e);
} catch (TransformerConfigurationException e) {
log.error(e);
} catch (TransformerFactoryConfigurationError e) {
log.error(e);
} catch (TransformerException e) {
log.error(e);
}
return false;
}
public static String getServerXmlPathFromLocalWorkspaceRepo(String workspaceRepo){
return FileUtils.addNodesToPath(getConfPathFromLocalWorkspaceRepo(workspaceRepo),new String[]{"carbon.xml"});
}
public static String getCatelinaXmlPathFromLocalWorkspaceRepo(String workspaceRepo){
return FileUtils.addNodesToPath(getConfPathFromLocalWorkspaceRepo(workspaceRepo),new String[]{"tomcat","catalina-server.xml"});
}
public static String getConfPathFromLocalWorkspaceRepo(String workspaceRepo){
return FileUtils.addNodesToPath(workspaceRepo,new String[]{"repository","conf"});
}
public static String getRepositoryPathFromLocalWorkspaceRepo(String workspaceRepo){
return FileUtils.addNodesToPath(workspaceRepo,new String[]{"repository","deployment","server"});
}
public static String getTransportsXmlPathFromLocalWorkspaceRepo(String workspaceRepo){
return FileUtils.addNodesToPath(getConfPathFromLocalWorkspaceRepo(workspaceRepo),new String[]{"mgt-transports.xml"});
}
public static String getCarbonXmlPathFromLocalWorkspaceRepo(String workspaceRepo){
return FileUtils.addNodesToPath(getConfPathFromLocalWorkspaceRepo(workspaceRepo),new String[]{"carbon.xml"});
}
public static String getAxis2XmlPathFromLocalWorkspaceRepo(String workspaceRepo){
return FileUtils.addNodesToPath(getConfPathFromLocalWorkspaceRepo(workspaceRepo),new String[]{"axis2","axis2.xml"});
}
public static String resolveProperties(IServer server, String property){
String propertyValue;
if (CarbonServer44Utils.getServerConfigMapValue(server,property)!=null){
return CarbonServer44Utils.getServerConfigMapValue(server,property).toString();
}
GenericServer gserver = (GenericServer)server.loadAdapter(ServerDelegate.class, null);
if (gserver==null || gserver.getServerDefinition()==null || gserver.getServerDefinition().getResolver()==null) return null;
if (!property.startsWith("${"))
property="${"+property+"}";
ServerRuntime serverDefinition = gserver.getServerDefinition();
propertyValue = serverDefinition.getResolver().resolveProperties(property);
return propertyValue;
}
public static boolean updateAndSaveAxis2Ports(String axis2Xml,IServer server){
//loadServerInstanceProperties(server);
XPathFactory factory = XPathFactory.newInstance();
ServerPort[] serverPorts=CarbonServerManager.getInstance().getServerPorts(server);
try {
File xmlDocument = new File(axis2Xml);
if (xmlDocument.exists()) {
DocumentBuilder builder = DocumentBuilderFactory.newInstance()
.newDocumentBuilder();
Document document = builder.parse(xmlDocument);
XPath xPath = factory.newXPath();
Node httpNode = (Node) xPath
.evaluate(
"/axisconfig/transportReceiver[@name='http']/parameter[@name='port']",
document, XPathConstants.NODE);
Node httpsNode = (Node) xPath
.evaluate(
"/axisconfig/transportReceiver[@name='https']/parameter[@name='port']",
document, XPathConstants.NODE);
for (ServerPort serverPort : serverPorts) {
ServerPort port = serverPort;
int i = CarbonServerConstants.portCaptions
.indexOf(serverPort.getName());
if (i != -1) {
port = new ServerPort(CarbonServerConstants.portIds
.get(i), serverPort.getName(), serverPort
.getPort(), serverPort.getProtocol());
}
if (port.getId().equalsIgnoreCase("synapse.transport.http"))
httpNode.setTextContent(Integer.toString(serverPort
.getPort()));
if (port.getId()
.equalsIgnoreCase("synapse.transport.https"))
httpsNode.setTextContent(Integer.toString(serverPort
.getPort()));
}
Transformer t = TransformerFactory.newInstance()
.newTransformer();
Result result = new StreamResult(new File(axis2Xml));
Source source = new DOMSource(document);
t.transform(source, result);
return true;
}
} catch (FileNotFoundException e) {
log.error(e);
} catch (XPathExpressionException e) {
log.error(e);
} catch (ParserConfigurationException e) {
log.error(e);
} catch (SAXException e) {
log.error(e);
} catch (IOException e) {
log.error(e);
} catch (TransformerConfigurationException e) {
log.error(e);
} catch (TransformerFactoryConfigurationError e) {
log.error(e);
} catch (TransformerException e) {
log.error(e);
}
return false;
}
public static boolean updateAndSaveTransportsPorts(String carbonXml, String catelinaXml, IServer server){
// return true;
//loadServerInstanceProperties(server);
NamespaceContext cntx = CarbonServer44Utils.getCarbonNamespace();
XPathFactory factory = XPathFactory.newInstance();
ServerPort[] serverPorts=CarbonServerManager.getInstance().getServerPorts(server);
try {
File xmlDocument = new File(carbonXml);
File catelinaXmlDocument = new File(catelinaXml);
if (xmlDocument.exists() && catelinaXmlDocument.exists()) {
DocumentBuilder builder = DocumentBuilderFactory.newInstance()
.newDocumentBuilder();
DocumentBuilder catelinaBuilder = DocumentBuilderFactory.newInstance()
.newDocumentBuilder();
Document document = builder.parse(xmlDocument);
Document catelinaDocument = catelinaBuilder.parse(catelinaXmlDocument);
XPath xPath = factory.newXPath();
XPath catelinaXPath = factory.newXPath();
xPath.setNamespaceContext(cntx);
Node httpNode = (Node) catelinaXPath.evaluate(
"/Server/Service/Connector[1]/@port", catelinaDocument,
XPathConstants.NODE);
Node httpsNode = (Node) catelinaXPath.evaluate(
"/Server/Service/Connector[@sslProtocol=\"TLS\"]/@port", catelinaDocument,
XPathConstants.NODE);
Node offSet=(Node) xPath.evaluate("/Server/Ports/Offset", document,XPathConstants.NODE);
for (ServerPort serverPort : serverPorts) {
ServerPort port = serverPort;
int i = CarbonServerConstants.portCaptions
.indexOf(serverPort.getName());
if (i != -1) {
port = new ServerPort(CarbonServerConstants.portIds
.get(i), serverPort.getName(), serverPort
.getPort(), serverPort.getProtocol());
}
if (port.getId().equalsIgnoreCase("carbon.http"))
httpNode.setTextContent(Integer.toString(serverPort
.getPort()));
if (port.getId().equalsIgnoreCase("carbon.https"))
httpsNode.setTextContent(Integer.toString(serverPort
.getPort()));
if (port.getId().equalsIgnoreCase("carbon.offset")){
int port2 = serverPort.getPort();
String strPort = Integer.toString(port2);
offSet.setTextContent(strPort);
/*offSet.setTextContent(Integer.toString(serverPort
.getPort()));*/
}
}
Transformer t = TransformerFactory.newInstance()
.newTransformer();
Transformer t1 = TransformerFactory.newInstance()
.newTransformer();
File confPath = new File((new File(carbonXml)).getParent());
File catelinaConfPath = new File((new File(catelinaXml)).getParent());
if (!confPath.exists()){
confPath.mkdirs();
}
if (!catelinaConfPath.exists()){
catelinaConfPath.mkdirs();
}
Result result = new StreamResult(new File(carbonXml));
Result result1 = new StreamResult(new File(catelinaXml));
Source source = new DOMSource(document);
Source source1 = new DOMSource(catelinaDocument);
t.transform(source, result);
t1.transform(source1, result1);
return true;
}
} catch (FileNotFoundException e) {
log.error(e);
} catch (XPathExpressionException e) {
log.error(e);
} catch (ParserConfigurationException e) {
log.error(e);
} catch (SAXException e) {
log.error(e);
} catch (IOException e) {
log.error(e);
} catch (TransformerConfigurationException e) {
log.error(e);
} catch (TransformerFactoryConfigurationError e) {
log.error(e);
} catch (TransformerException e) {
log.error(e);
}
return false;
}
public static boolean updateAxis2XML(IServer server){
// return true;
String serverLocalWorkspacePath = CarbonServerManager.getServerLocalWorkspacePath(server);
return CarbonServer44Utils.updateAndSaveAxis2Ports(CarbonServer44Utils.getAxis2XmlPathFromLocalWorkspaceRepo(serverLocalWorkspacePath),server);
}
public static boolean updateTransportPorts(IServer server){
// return true;
String serverLocalWorkspacePath = CarbonServerManager.getServerLocalWorkspacePath(server);
return CarbonServer44Utils.updateAndSaveTransportsPorts(CarbonServer44Utils.getCarbonXmlPathFromLocalWorkspaceRepo(serverLocalWorkspacePath), CarbonServer44Utils.getCatelinaXmlPathFromLocalWorkspaceRepo(serverLocalWorkspacePath),server);
}
private static boolean isHotUpdateEnabled(IServer server){
String axis2Xml=getAxis2FilePath(server);
XPathFactory factory = XPathFactory.newInstance();
try {
File xmlDocument = new File(axis2Xml);
if (xmlDocument.exists()) {
DocumentBuilder builder = DocumentBuilderFactory.newInstance()
.newDocumentBuilder();
Document document = builder.parse(xmlDocument);
XPath xPath = factory.newXPath();
Node httpNode = (Node) xPath.evaluate(
"/axisconfig/parameter[@name='hotupdate']", document,
XPathConstants.NODE);
return httpNode.getTextContent().toString().equalsIgnoreCase(
"true");
}else{
return false;
}
} catch (FileNotFoundException e) {
log.error(e);
} catch (XPathExpressionException e) {
log.error(e);
} catch (ParserConfigurationException e) {
log.error(e);
} catch (SAXException e) {
log.error(e);
} catch (IOException e) {
log.error(e);
} catch (TransformerFactoryConfigurationError e) {
log.error(e);
}
return false;
}
public static String getAxis2FilePath(IServer server){
IPath serverHome = CarbonServerManager.getServerHome(server);
String axis2Xml=FileUtils.addNodesToPath(serverHome.toOSString(),new String[]{"repository","conf","axis2","axis2.xml"});
return axis2Xml;
}
public static void setServerHotUpdate(IServer server,boolean enable){
if (enable){
setServerConfigMapValue(server,"carbon.hotupdate","true");
}else{
setServerConfigMapValue(server,"carbon.hotupdate","false");
}
setHotUpdateEnabled(server, enable);
}
public static Boolean isServerHotUpdate(IServer server){
String value = CarbonServer44Utils.getServerConfigMapValue(server,"carbon.hotupdate");
if (value==null){
return null;
}
boolean enabled=value.toString().equalsIgnoreCase("true");
if (enabled!=isHotUpdateEnabled(server)){
setHotUpdateEnabled(server, enabled);
}
return enabled;
}
private static boolean setHotUpdateEnabled(IServer server,boolean enabled){
if (isHotUpdateEnabled(server)==enabled) return true;
String axis2Xml=getAxis2FilePath(server);
XPathFactory factory = XPathFactory.newInstance();
try {
File xmlDocument = new File(axis2Xml);
DocumentBuilder builder =
DocumentBuilderFactory.newInstance().newDocumentBuilder();
Document document = builder.parse(xmlDocument);
XPath xPath=factory.newXPath();
Node httpNode = (Node)xPath.evaluate("/axisconfig/parameter[@name='hotupdate']", document, XPathConstants.NODE);
httpNode.setTextContent(enabled ? "true":"false");
Transformer t = TransformerFactory.newInstance().newTransformer();
File confPath = new File((new File(axis2Xml)).getParent());
if (!confPath.exists()) confPath.mkdirs();
Result result = new StreamResult(new File(axis2Xml));
Source source = new DOMSource(document);
t.transform(source, result);
return true;
} catch (FileNotFoundException e) {
log.error(e);
} catch (XPathExpressionException e) {
log.error(e);
} catch (ParserConfigurationException e) {
log.error(e);
} catch (SAXException e) {
log.error(e);
} catch (IOException e) {
log.error(e);
} catch (TransformerConfigurationException e) {
log.error(e);
} catch (TransformerFactoryConfigurationError e) {
log.error(e);
} catch (TransformerException e) {
log.error(e);
}
return false;
}
public static String getServerConfigMapValue(IServer server, String key){
String loaded="loaded";
GenericServer gserver = (GenericServer) server.getAdapter(GenericServer.class);
if (gserver==null ||gserver.getServerInstanceProperties()==null) return null;
/*if (gserver.getServerInstanceProperties().get(loaded)==null){
loadServerInstanceProperties(server);
}*/
Object object = gserver.getServerInstanceProperties().get(key);
if (object!=null)
return object.toString();
return null;
}
public static void setServerStartWithOSGiConsole(IServer server,boolean showConsole){
if (showConsole){
setServerConfigMapValue(server,"osgi.console","true");
}else{
setServerConfigMapValue(server,"osgi.console","false");
}
}
public static Boolean isServerStartWithOSGiConsole(IServer server){
String value = CarbonServer44Utils.getServerConfigMapValue(server,"osgi.console");
return value==null? null: value.toString().equalsIgnoreCase("true");
}
public static void setServerStartBrowserPopup(IServer server,boolean popup){
if (popup){
setServerConfigMapValue(server,"carbon.browser","true");
}else{
setServerConfigMapValue(server,"carbon.browser","false");
}
}
public static Boolean isServerStartBrowserPopup(IServer server){
String value = CarbonServer44Utils.getServerConfigMapValue(server,"carbon.browser");
return value==null? null: value.toString().equalsIgnoreCase("true");
}
@SuppressWarnings("unchecked")
public static void setServerConfigMapValue(IServer server, String key, String value){
GenericServer gserver = (GenericServer) server.getAdapter(GenericServer.class);
if (gserver!=null){
Map<String, String> serverInstanceProperties = gserver.getServerInstanceProperties();
serverInstanceProperties.put(key,value);
// gserver.setServerInstanceProperties(serverInstanceProperties);
gserver.configurationChanged();
saveServerInstanceProperties(server);
// try {
// gserver.getServerDefinition().getResolver().setPropertyValues(gserver.getServerInstanceProperties());
// //gserver.getServerDefinition().setPropertyValues(gserver.getServerInstanceProperties());
// gserver.saveConfiguration(new NullProgressMonitor());
// gserver.configurationChanged();
// } catch (Exception e) {
// log.error(e);
// }
}
}
@SuppressWarnings("unchecked")
private static void loadServerInstanceProperties(IServer server){
GenericServer gserver = (GenericServer) server.getAdapter(GenericServer.class);
if (gserver==null) {
return;
}
ObjectInputStream obj_in=null;
try {
String serverLocalWorkspacePath = CarbonServerManager.getServerLocalWorkspacePath(server);
String objConfigPath = FileUtils.addNodesToPath(serverLocalWorkspacePath, new String[]{"repository","conf","config"});
if (new File(objConfigPath).exists()){
FileInputStream f_in = new FileInputStream(objConfigPath);
obj_in = new ObjectInputStream (f_in);
Map<String, String> obj = (Map<String, String>)obj_in.readObject();
gserver.getServerInstanceProperties().putAll(obj);
}
} catch (IOException e) {
log.error(e);
} catch (ClassNotFoundException e) {
log.error(e);
}finally{
if(obj_in!=null){
try {
obj_in.close();
} catch (IOException e) {
log.error("Error while closing stream", e);
}
}
}
// gserver.getServerInstanceProperties().put("loaded", "true");
}
private static void saveServerInstanceProperties(IServer server){
GenericServer gserver = (GenericServer) server.getAdapter(GenericServer.class);
if (gserver==null) {
return;
}
FileOutputStream f_out=null;
ObjectOutputStream obj_out=null;
try {
String serverLocalWorkspacePath = CarbonServerManager.getServerLocalWorkspacePath(server);
String objConfigPath = FileUtils.addNodesToPath(serverLocalWorkspacePath, new String[]{"repository","conf","config"});
f_out = new FileOutputStream(objConfigPath);
obj_out = new ObjectOutputStream (f_out);
obj_out.writeObject ( gserver.getServerInstanceProperties());
} catch (IOException e) {
log.error(e);
}finally{
if(f_out!=null){
try {
f_out.close();
} catch (IOException e) {
log.error("Error while closing stream", e);
}
}
if(obj_out!=null){
try {
obj_out.close();
} catch (IOException e) {
log.error("Error while closing stream", e);
}
}
}
}
public static String getServerTimestamp(IServer server){
String timestampStr="carbon.timestamp";
String timestampVal = getServerConfigMapValue(server, timestampStr);
if (timestampVal==null){
timestampVal=String.valueOf((new Date()).getTime());
setServerConfigMapValue(server, timestampStr, timestampVal);
}
return timestampVal;
}
public static String getRegistryRoot(IServer server){
String timestampVal = getServerTimestamp(server);
String registryRoot="/eclipse/server_"+timestampVal;
return registryRoot;
}
public static Map<String,String> getServerCredentials(IServer server){
Map<String,String> credentials=new HashMap<String,String>();
String username = getServerConfigMapValue(server, CarbonServerConstants.ESB_USERNAME);
String password = getServerConfigMapValue(server, CarbonServerConstants.ESB_PASSWORD);
if (username==null){
username="admin";
setServerConfigMapValue(server, CarbonServerConstants.ESB_USERNAME, username);
}
if (password==null){
password="admin";
setServerConfigMapValue(server, CarbonServerConstants.ESB_PASSWORD, password);
}
credentials.put(CarbonServerConstants.ESB_USERNAME, username);
credentials.put(CarbonServerConstants.ESB_PASSWORD, password);
return credentials;
}
public static URL getServerURL(IServer server){
CarbonServer44 serverDelegate=(CarbonServer44)server.loadAdapter(CarbonServer44.class, null);
ServerPort[] serverPorts = getServerPorts(server);
int httpsPort = 9443;
int offset = 0;
for (ServerPort serverPort : serverPorts) {
if(serverPort.getName().equals("Carbon web console port (HTTPS)") && serverPort.getProtocol().equals("https")){
httpsPort=serverPort.getPort();
}else if(serverPort.getName().equals("Carbon Server Offset")){
offset=serverPort.getPort();
}
}
try {
return new URL("https://localhost:"+(httpsPort+offset));
} catch (MalformedURLException e) {
e.printStackTrace();
}
return null;
}
public static String getServerCookie(IServer server, String httpsPort) throws Exception{
// setEasySSLProtocolSocketFactory(Integer.parseInt(httpsPort));
// AuthenticationAdminAuthenticationAdminHttpsSoap11EndpointStub authenticationStub = new AuthenticationAdminAuthenticationAdminHttpsSoap11EndpointStub("https://localhost:"+ httpsPort + "/services/AuthenticationAdmin");
// authenticationStub._getServiceClient().getOptions().setManageSession(true);
// Map<String, String> serverCredentials = CarbonServerManager.getServerCredentials(server);
// boolean loginStatus = authenticationStub.login(serverCredentials.get(CarbonServerConstants.ESB_USERNAME), serverCredentials.get(CarbonServerConstants.ESB_PASSWORD), "localhost");
// if (!loginStatus) throw new InvalidCredentialsException();
// ServiceContext serviceContext = authenticationStub._getServiceClient().getLastOperationContext().getServiceContext();
// String sessionCookie = (String) serviceContext.getProperty(HTTPConstants.COOKIE_STRING);
//TODO
String sessionCookie="";
return sessionCookie;
}
public static void setServerUsername(IServer server,String username){
setServerConfigMapValue(server, CarbonServerConstants.ESB_USERNAME, username);
}
public static void setServerPassword(IServer server,String password){
setServerConfigMapValue(server, CarbonServerConstants.ESB_PASSWORD, password);
}
public static String getPortId(String name){
String id="carbon.https";
if (name.equalsIgnoreCase(CarbonServerConstants.ESB_CONSOLE_HTTPS_DESC)) id=CarbonServerConstants.ESB_CONSOLE_HTTPS;
if (name.equalsIgnoreCase(CarbonServerConstants.ESB_TRANSPORT_HTTP_DESC)) id=CarbonServerConstants.ESB_TRANSPORT_HTTP;
if (name.equalsIgnoreCase(CarbonServerConstants.ESB_TRANSPORT_HTTPS_DESC)) id=CarbonServerConstants.ESB_TRANSPORT_HTTPS;
return id;
}
// private static void setEasySSLProtocolSocketFactory(int port){
// Protocol.unregisterProtocol("https");
// try {
// Protocol.registerProtocol("https", new Protocol("https", new EasySSLProtocolSocketFactory(), port));
// } catch (GeneralSecurityException e2) {
// e2.printStackTrace();
// } catch (IOException e2) {
// e2.printStackTrace();
// }
// }
public static File getCappMonitorBundle(){
URL resource = Platform.getBundle(Activator.PLUGIN_ID).getResource("lib"+File.separator+"org.wso2.carbon.capp.monitor-3.0.0.jar");
IPath path = Activator.getDefault().getStateLocation();
IPath libFolder = path.append("lib");
String[] paths = resource.getFile().split(File.separator);
IPath library = libFolder.append(paths[paths.length-1]);
File libraryFile = new File(library.toOSString());
if (libraryFile.exists()){
return libraryFile;
}
try {
writeToFile(libraryFile, resource.openStream());
} catch (IOException e) {
log.error(e);
return null;
}
return libraryFile;
}
private static void writeToFile(File file, InputStream stream) throws IOException{
file.getParentFile().mkdirs();
OutputStream out=new FileOutputStream(file);
byte buf[]=new byte[1024];
int len;
while((len=stream.read(buf))>0)
out.write(buf,0,len);
out.close();
stream.close();
}
/**
* @return the serverVersion
*/
public static String getServerVersion() {
return CarbonServerConstants.CARBON_VERSION;
}
}
| |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.ads.googleads.v9.services.stub;
import com.google.ads.googleads.v9.resources.AdGroupAsset;
import com.google.ads.googleads.v9.services.GetAdGroupAssetRequest;
import com.google.ads.googleads.v9.services.MutateAdGroupAssetsRequest;
import com.google.ads.googleads.v9.services.MutateAdGroupAssetsResponse;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.grpc.GrpcCallSettings;
import com.google.api.gax.grpc.GrpcStubCallableFactory;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.common.collect.ImmutableMap;
import com.google.longrunning.stub.GrpcOperationsStub;
import io.grpc.MethodDescriptor;
import io.grpc.protobuf.ProtoUtils;
import java.io.IOException;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* gRPC stub implementation for the AdGroupAssetService service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
public class GrpcAdGroupAssetServiceStub extends AdGroupAssetServiceStub {
private static final MethodDescriptor<GetAdGroupAssetRequest, AdGroupAsset>
getAdGroupAssetMethodDescriptor =
MethodDescriptor.<GetAdGroupAssetRequest, AdGroupAsset>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.ads.googleads.v9.services.AdGroupAssetService/GetAdGroupAsset")
.setRequestMarshaller(
ProtoUtils.marshaller(GetAdGroupAssetRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(AdGroupAsset.getDefaultInstance()))
.build();
private static final MethodDescriptor<MutateAdGroupAssetsRequest, MutateAdGroupAssetsResponse>
mutateAdGroupAssetsMethodDescriptor =
MethodDescriptor.<MutateAdGroupAssetsRequest, MutateAdGroupAssetsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.ads.googleads.v9.services.AdGroupAssetService/MutateAdGroupAssets")
.setRequestMarshaller(
ProtoUtils.marshaller(MutateAdGroupAssetsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(MutateAdGroupAssetsResponse.getDefaultInstance()))
.build();
private final UnaryCallable<GetAdGroupAssetRequest, AdGroupAsset> getAdGroupAssetCallable;
private final UnaryCallable<MutateAdGroupAssetsRequest, MutateAdGroupAssetsResponse>
mutateAdGroupAssetsCallable;
private final BackgroundResource backgroundResources;
private final GrpcOperationsStub operationsStub;
private final GrpcStubCallableFactory callableFactory;
public static final GrpcAdGroupAssetServiceStub create(AdGroupAssetServiceStubSettings settings)
throws IOException {
return new GrpcAdGroupAssetServiceStub(settings, ClientContext.create(settings));
}
public static final GrpcAdGroupAssetServiceStub create(ClientContext clientContext)
throws IOException {
return new GrpcAdGroupAssetServiceStub(
AdGroupAssetServiceStubSettings.newBuilder().build(), clientContext);
}
public static final GrpcAdGroupAssetServiceStub create(
ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException {
return new GrpcAdGroupAssetServiceStub(
AdGroupAssetServiceStubSettings.newBuilder().build(), clientContext, callableFactory);
}
/**
* Constructs an instance of GrpcAdGroupAssetServiceStub, using the given settings. This is
* protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected GrpcAdGroupAssetServiceStub(
AdGroupAssetServiceStubSettings settings, ClientContext clientContext) throws IOException {
this(settings, clientContext, new GrpcAdGroupAssetServiceCallableFactory());
}
/**
* Constructs an instance of GrpcAdGroupAssetServiceStub, using the given settings. This is
* protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected GrpcAdGroupAssetServiceStub(
AdGroupAssetServiceStubSettings settings,
ClientContext clientContext,
GrpcStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory);
GrpcCallSettings<GetAdGroupAssetRequest, AdGroupAsset> getAdGroupAssetTransportSettings =
GrpcCallSettings.<GetAdGroupAssetRequest, AdGroupAsset>newBuilder()
.setMethodDescriptor(getAdGroupAssetMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("resource_name", String.valueOf(request.getResourceName()));
return params.build();
})
.build();
GrpcCallSettings<MutateAdGroupAssetsRequest, MutateAdGroupAssetsResponse>
mutateAdGroupAssetsTransportSettings =
GrpcCallSettings.<MutateAdGroupAssetsRequest, MutateAdGroupAssetsResponse>newBuilder()
.setMethodDescriptor(mutateAdGroupAssetsMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("customer_id", String.valueOf(request.getCustomerId()));
return params.build();
})
.build();
this.getAdGroupAssetCallable =
callableFactory.createUnaryCallable(
getAdGroupAssetTransportSettings, settings.getAdGroupAssetSettings(), clientContext);
this.mutateAdGroupAssetsCallable =
callableFactory.createUnaryCallable(
mutateAdGroupAssetsTransportSettings,
settings.mutateAdGroupAssetsSettings(),
clientContext);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
public GrpcOperationsStub getOperationsStub() {
return operationsStub;
}
@Override
public UnaryCallable<GetAdGroupAssetRequest, AdGroupAsset> getAdGroupAssetCallable() {
return getAdGroupAssetCallable;
}
@Override
public UnaryCallable<MutateAdGroupAssetsRequest, MutateAdGroupAssetsResponse>
mutateAdGroupAssetsCallable() {
return mutateAdGroupAssetsCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
| |
import javax.swing.*;
import java.awt.*;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
/**
* @author Konstantin Bulenkov
*/
public class Game2048 extends JPanel {
private static final Color BG_COLOR = new Color(0xbbada0);
private static final String FONT_NAME = "Arial";
private static final int TILE_SIZE = 64;
private static final int TILES_MARGIN = 16;
private Tile[] myTiles;
boolean myWin = false;
boolean myLose = false;
int myScore = 0;
public Game2048() {
setFocusable(true);
addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(KeyEvent e) {
if (e.getKeyCode() == KeyEvent.VK_ESCAPE) {
resetGame();
}
if (!canMove()) {
myLose = true;
}
if (!myWin && !myLose) {
switch (e.getKeyCode()) {
case KeyEvent.VK_LEFT:
left();
break;
case KeyEvent.VK_RIGHT:
right();
break;
case KeyEvent.VK_DOWN:
down();
break;
case KeyEvent.VK_UP:
up();
break;
}
}
if (!myWin && !canMove()) {
myLose = true;
}
repaint();
}
});
resetGame();
}
public void resetGame() {
myScore = 0;
myWin = false;
myLose = false;
myTiles = new Tile[4 * 4];
for (int i = 0; i < myTiles.length; i++) {
myTiles[i] = new Tile();
}
addTile();
addTile();
}
public void left() {
boolean needAddTile = false;
for (int i = 0; i < 4; i++) {
Tile[] line = getLine(i);
Tile[] merged = mergeLine(moveLine(line));
setLine(i, merged);
if (!needAddTile && !compare(line, merged)) {
needAddTile = true;
}
}
if (needAddTile) {
addTile();
}
}
public void right() {
myTiles = rotate(180);
left();
myTiles = rotate(180);
}
public void up() {
myTiles = rotate(270);
left();
myTiles = rotate(90);
}
public void down() {
myTiles = rotate(90);
left();
myTiles = rotate(270);
}
private Tile tileAt(int x, int y) {
return myTiles[x + y * 4];
}
private void addTile() {
List<Tile> list = availableSpace();
if (!availableSpace().isEmpty()) {
int index = (int) (Math.random() * list.size()) % list.size();
Tile emptyTime = list.get(index);
emptyTime.value = Math.random() < 0.9 ? 2 : 4;
}
}
private List<Tile> availableSpace() {
final List<Tile> list = new ArrayList<Tile>(16);
for (Tile t : myTiles) {
if (t.isEmpty()) {
list.add(t);
}
}
return list;
}
private boolean isFull() {
return availableSpace().size() == 0;
}
boolean canMove() {
if (!isFull()) {
return true;
}
for (int x = 0; x < 4; x++) {
for (int y = 0; y < 4; y++) {
Tile t = tileAt(x, y);
if ((x < 3 && t.value == tileAt(x + 1, y).value)
|| ((y < 3) && t.value == tileAt(x, y + 1).value)) {
return true;
}
}
}
return false;
}
private boolean compare(Tile[] line1, Tile[] line2) {
if (line1 == line2) {
return true;
} else if (line1.length != line2.length) {
return false;
}
for (int i = 0; i < line1.length; i++) {
if (line1[i].value != line2[i].value) {
return false;
}
}
return true;
}
private Tile[] rotate(int angle) {
Tile[] newTiles = new Tile[4 * 4];
int offsetX = 3, offsetY = 3;
if (angle == 90) {
offsetY = 0;
} else if (angle == 270) {
offsetX = 0;
}
double rad = Math.toRadians(angle);
int cos = (int) Math.cos(rad);
int sin = (int) Math.sin(rad);
for (int x = 0; x < 4; x++) {
for (int y = 0; y < 4; y++) {
int newX = (x * cos) - (y * sin) + offsetX;
int newY = (x * sin) + (y * cos) + offsetY;
newTiles[(newX) + (newY) * 4] = tileAt(x, y);
}
}
return newTiles;
}
private Tile[] moveLine(Tile[] oldLine) {
LinkedList<Tile> l = new LinkedList<Tile>();
for (int i = 0; i < 4; i++) {
if (!oldLine[i].isEmpty())
l.addLast(oldLine[i]);
}
if (l.size() == 0) {
return oldLine;
} else {
Tile[] newLine = new Tile[4];
ensureSize(l, 4);
for (int i = 0; i < 4; i++) {
newLine[i] = l.removeFirst();
}
return newLine;
}
}
private Tile[] mergeLine(Tile[] oldLine) {
LinkedList<Tile> list = new LinkedList<Tile>();
for (int i = 0; i < 4 && !oldLine[i].isEmpty(); i++) {
int num = oldLine[i].value;
if (i < 3 && oldLine[i].value == oldLine[i + 1].value) {
num *= 2;
myScore += num;
int ourTarget = 2048;
if (num == ourTarget) {
myWin = true;
}
i++;
}
list.add(new Tile(num));
}
if (list.size() == 0) {
return oldLine;
} else {
ensureSize(list, 4);
return list.toArray(new Tile[4]);
}
}
private static void ensureSize(java.util.List<Tile> l, int s) {
while (l.size() != s) {
l.add(new Tile());
}
}
private Tile[] getLine(int index) {
Tile[] result = new Tile[4];
for (int i = 0; i < 4; i++) {
result[i] = tileAt(i, index);
}
return result;
}
private void setLine(int index, Tile[] re) {
System.arraycopy(re, 0, myTiles, index * 4, 4);
}
@Override
public void paint(Graphics g) {
super.paint(g);
g.setColor(BG_COLOR);
g.fillRect(0, 0, this.getSize().width, this.getSize().height);
for (int y = 0; y < 4; y++) {
for (int x = 0; x < 4; x++) {
drawTile(g, myTiles[x + y * 4], x, y);
}
}
}
private void drawTile(Graphics g2, Tile tile, int x, int y) {
Graphics2D g = ((Graphics2D) g2);
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
g.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE);
int value = tile.value;
int xOffset = offsetCoors(x);
int yOffset = offsetCoors(y);
g.setColor(tile.getBackground());
g.fillRoundRect(xOffset, yOffset, TILE_SIZE, TILE_SIZE, 14, 14);
g.setColor(tile.getForeground());
final int size = value < 100 ? 36 : value < 1000 ? 32 : 24;
final Font font = new Font(FONT_NAME, Font.BOLD, size);
g.setFont(font);
String s = String.valueOf(value);
final FontMetrics fm = getFontMetrics(font);
final int w = fm.stringWidth(s);
final int h = -(int) fm.getLineMetrics(s, g).getBaselineOffsets()[2];
if (value != 0)
g.drawString(s, xOffset + (TILE_SIZE - w) / 2, yOffset + TILE_SIZE - (TILE_SIZE - h) / 2 - 2);
if (myWin || myLose) {
g.setColor(new Color(255, 255, 255, 30));
g.fillRect(0, 0, getWidth(), getHeight());
g.setColor(new Color(78, 139, 202));
g.setFont(new Font(FONT_NAME, Font.BOLD, 48));
if (myWin) {
g.drawString("You won!", 68, 150);
}
if (myLose) {
g.drawString("Game over!", 50, 130);
g.drawString("You lose!", 64, 200);
}
if (myWin || myLose) {
g.setFont(new Font(FONT_NAME, Font.PLAIN, 16));
g.setColor(new Color(128, 128, 128, 128));
g.drawString("Press ESC to play again", 80, getHeight() - 40);
}
}
g.setFont(new Font(FONT_NAME, Font.PLAIN, 18));
g.drawString("Score: " + myScore, 200, 365);
}
private static int offsetCoors(int arg) {
return arg * (TILES_MARGIN + TILE_SIZE) + TILES_MARGIN;
}
static class Tile {
int value;
public Tile() {
this(0);
}
public Tile(int num) {
value = num;
}
public boolean isEmpty() {
return value == 0;
}
public Color getForeground() {
return value < 16 ? new Color(0x776e65) : new Color(0xf9f6f2);
}
public Color getBackground() {
switch (value) {
case 2: return new Color(0xeee4da);
case 4: return new Color(0xede0c8);
case 8: return new Color(0xf2b179);
case 16: return new Color(0xf59563);
case 32: return new Color(0xf67c5f);
case 64: return new Color(0xf65e3b);
case 128: return new Color(0xedcf72);
case 256: return new Color(0xedcc61);
case 512: return new Color(0xedc850);
case 1024: return new Color(0xedc53f);
case 2048: return new Color(0xedc22e);
}
return new Color(0xcdc1b4);
}
}
public static void main(String[] args) {
JFrame game = new JFrame();
game.setTitle("2048 Game");
game.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
game.setSize(340, 400);
game.setResizable(false);
game.add(new Game2048());
game.setLocationRelativeTo(null);
game.setVisible(true);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.