gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright 2017 IBM Corp. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.ibm.watson.developer_cloud.discovery.v1.model; import java.util.ArrayList; import java.util.List; import com.ibm.watson.developer_cloud.service.model.GenericModel; import com.ibm.watson.developer_cloud.util.Validator; /** * the createConfiguration options. */ public class CreateConfigurationOptions extends GenericModel { /** the ID of your environment. */ private String environmentId; /** The document conversion settings for the configuration. */ private Conversions conversions; /** The name of the configuration. */ private String name; /** The description of the configuration, if available. */ private String description; /** * Defines operations that can be used to transform the final output JSON into a normalized form. Operations are * executed in the order that they appear in the array. */ private List<NormalizationOperation> normalizations; /** An array of document enrichment settings for the configuration. */ private List<Enrichment> enrichments; /** * Builder. */ public static class Builder { private String environmentId; private Conversions conversions; private String name; private String description; private List<NormalizationOperation> normalizations; private List<Enrichment> enrichments; private Builder(CreateConfigurationOptions createConfigurationOptions) { environmentId = createConfigurationOptions.environmentId; conversions = createConfigurationOptions.conversions; name = createConfigurationOptions.name; description = createConfigurationOptions.description; normalizations = createConfigurationOptions.normalizations; enrichments = createConfigurationOptions.enrichments; } /** * Instantiates a new builder. */ public Builder() { } /** * Instantiates a new builder with required properties. * * @param environmentId the environmentId */ public Builder(String environmentId) { this.environmentId = environmentId; } /** * Builds a CreateConfigurationOptions. * * @return the createConfigurationOptions */ public CreateConfigurationOptions build() { return new CreateConfigurationOptions(this); } /** * Adds an normalization to normalizations. * * @param normalization the new normalization * @return the CreateConfigurationOptions builder */ public Builder addNormalization(NormalizationOperation normalization) { Validator.notNull(normalization, "normalization cannot be null"); if (this.normalizations == null) { this.normalizations = new ArrayList<NormalizationOperation>(); } this.normalizations.add(normalization); return this; } /** * Adds an enrichment to enrichments. * * @param enrichment the new enrichment * @return the CreateConfigurationOptions builder */ public Builder addEnrichment(Enrichment enrichment) { Validator.notNull(enrichment, "enrichment cannot be null"); if (this.enrichments == null) { this.enrichments = new ArrayList<Enrichment>(); } this.enrichments.add(enrichment); return this; } /** * Set the environmentId. * * @param environmentId the environmentId * @return the CreateConfigurationOptions builder */ public Builder environmentId(String environmentId) { this.environmentId = environmentId; return this; } /** * Set the conversions. * * @param conversions the conversions * @return the CreateConfigurationOptions builder */ public Builder conversions(Conversions conversions) { this.conversions = conversions; return this; } /** * Set the name. * * @param name the name * @return the CreateConfigurationOptions builder */ public Builder name(String name) { this.name = name; return this; } /** * Set the description. * * @param description the description * @return the CreateConfigurationOptions builder */ public Builder description(String description) { this.description = description; return this; } /** * Set the normalizations. * Existing normalizations will be replaced. * * @param normalizations the normalizations * @return the CreateConfigurationOptions builder */ public Builder normalizations(List<NormalizationOperation> normalizations) { this.normalizations = normalizations; return this; } /** * Set the enrichments. * Existing enrichments will be replaced. * * @param enrichments the enrichments * @return the CreateConfigurationOptions builder */ public Builder enrichments(List<Enrichment> enrichments) { this.enrichments = enrichments; return this; } /** * Set the configuration. * * @param configuration the configuration * @return the CreateConfigurationOptions builder */ public Builder configuration(Configuration configuration) { this.conversions = configuration.getConversions(); this.name = configuration.getName(); this.description = configuration.getDescription(); this.normalizations = configuration.getNormalizations(); this.enrichments = configuration.getEnrichments(); return this; } } private CreateConfigurationOptions(Builder builder) { Validator.notNull(builder.environmentId, "environmentId cannot be null"); environmentId = builder.environmentId; conversions = builder.conversions; name = builder.name; description = builder.description; normalizations = builder.normalizations; enrichments = builder.enrichments; } /** * New builder. * * @return a CreateConfigurationOptions builder */ public Builder newBuilder() { return new Builder(this); } /** * Gets the environmentId. * * @return the environmentId */ public String environmentId() { return environmentId; } /** * Gets the conversions. * * @return the conversions */ public Conversions conversions() { return conversions; } /** * Gets the name. * * @return the name */ public String name() { return name; } /** * Gets the description. * * @return the description */ public String description() { return description; } /** * Gets the normalizations. * * @return the normalizations */ public List<NormalizationOperation> normalizations() { return normalizations; } /** * Gets the enrichments. * * @return the enrichments */ public List<Enrichment> enrichments() { return enrichments; } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.vfs; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.fileTypes.FileTypeRegistry; import com.intellij.openapi.util.*; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.encoding.EncodingRegistry; import com.intellij.testFramework.LightVirtualFile; import com.intellij.util.LineSeparator; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.charset.Charset; /** * Represents a file in <code>{@link VirtualFileSystem}</code>. A particular file is represented by equal * {@code VirtualFile} instances for the entire lifetime of the IntelliJ IDEA process, unless the file * is deleted, in which case {@link #isValid()} will return {@code false}. * <p/> * VirtualFile instances are created on request, so there can be several instances corresponding to the same file. * All of them are equal, have the same hashCode and use shared storage for all related data, including user data (see {@link UserDataHolder}). * <p/> * If an in-memory implementation of VirtualFile is required, {@link LightVirtualFile} * can be used. * <p/> * Please see <a href="http://confluence.jetbrains.net/display/IDEADEV/IntelliJ+IDEA+Virtual+File+System">IntelliJ IDEA Virtual File System</a> * for high-level overview. * * @see VirtualFileSystem * @see VirtualFileManager */ public abstract class VirtualFile extends UserDataHolderBase implements ModificationTracker { public static final Key<Object> REQUESTOR_MARKER = Key.create("REQUESTOR_MARKER"); public static final VirtualFile[] EMPTY_ARRAY = new VirtualFile[0]; /** * Used as a property name in the {@link VirtualFilePropertyEvent} fired when the name of a * {@link VirtualFile} changes. * * @see VirtualFileListener#propertyChanged * @see VirtualFilePropertyEvent#getPropertyName */ public static final String PROP_NAME = "name"; /** * Used as a property name in the {@link VirtualFilePropertyEvent} fired when the encoding of a * {@link VirtualFile} changes. * * @see VirtualFileListener#propertyChanged * @see VirtualFilePropertyEvent#getPropertyName */ public static final String PROP_ENCODING = "encoding"; /** * Used as a property name in the {@link VirtualFilePropertyEvent} fired when the write permission of a * {@link VirtualFile} changes. * * @see VirtualFileListener#propertyChanged * @see VirtualFilePropertyEvent#getPropertyName */ public static final String PROP_WRITABLE = "writable"; /** * Used as a property name in the {@link VirtualFilePropertyEvent} fired when a visibility of a * {@link VirtualFile} changes. * * @see VirtualFileListener#propertyChanged * @see VirtualFilePropertyEvent#getPropertyName */ public static final String PROP_HIDDEN = VFileProperty.HIDDEN.getName(); /** * Used as a property name in the {@link VirtualFilePropertyEvent} fired when a symlink target of a * {@link VirtualFile} changes. * * @see VirtualFileListener#propertyChanged * @see VirtualFilePropertyEvent#getPropertyName */ public static final String PROP_SYMLINK_TARGET = "symlink"; private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.vfs.VirtualFile"); private static final Key<byte[]> BOM_KEY = Key.create("BOM"); private static final Key<Charset> CHARSET_KEY = Key.create("CHARSET"); protected VirtualFile() { } /** * Gets the name of this file. * * @see #getNameSequence() */ @NotNull public abstract String getName(); @NotNull public CharSequence getNameSequence() { return getName(); } /** * Gets the {@link VirtualFileSystem} this file belongs to. * * @return the {@link VirtualFileSystem} */ @NotNull public abstract VirtualFileSystem getFileSystem(); /** * Gets the path of this file. Path is a string which uniquely identifies file within given * <code>{@link VirtualFileSystem}</code>. Format of the path depends on the concrete file system. * For <code>{@link com.intellij.openapi.vfs.LocalFileSystem}</code> it is an absolute file path with file separator characters * (File.separatorChar) replaced to the forward slash ('/'). * * @return the path */ @SuppressWarnings("JavadocReference") @NotNull public abstract String getPath(); /** * Gets the URL of this file. The URL is a string which uniquely identifies file in all file systems. * It has the following format: {@code <protocol>://<path>}. * <p> * File can be found by its URL using {@link VirtualFileManager#findFileByUrl} method. * <p> * Please note these URLs are intended for use withing VFS - meaning they are not necessarily RFC-compliant. * * @return the URL consisting of protocol and path * @see VirtualFileManager#findFileByUrl * @see VirtualFile#getPath * @see VirtualFileSystem#getProtocol */ @NotNull public String getUrl() { return VirtualFileManager.constructUrl(getFileSystem().getProtocol(), getPath()); } /** * Fetches "presentable URL" of this file. "Presentable URL" is a string to be used for displaying this * file in the UI. * * @return the presentable URL. * @see VirtualFileSystem#extractPresentableUrl */ @NotNull public final String getPresentableUrl() { return getFileSystem().extractPresentableUrl(getPath()); } /** * Gets the extension of this file. If file name contains '.' extension is the substring from the last '.' * to the end of the name, otherwise extension is null. * * @return the extension or null if file name doesn't contain '.' */ @Nullable public String getExtension() { String name = getName(); int index = name.lastIndexOf('.'); if (index < 0) return null; return name.substring(index + 1); } /** * Gets the file name without the extension. If file name contains '.' the substring till the last '.' is returned. * Otherwise the same value as <code>{@link #getName}</code> method returns is returned. * * @return the name without extension * if there is no '.' in it */ @NotNull public String getNameWithoutExtension() { return StringUtil.trimExtension(getName()); } /** * Renames this file to the {@code newName}.<p> * This method should be only called within write-action. * See {@link Application#runWriteAction(Runnable)}. * * @param requestor any object to control who called this method. Note that * it is considered to be an external change if {@code requestor} is {@code null}. * See {@link VirtualFileEvent#getRequestor} * @param newName the new file name * @throws IOException if file failed to be renamed */ public void rename(Object requestor, @NotNull String newName) throws IOException { if (getName().equals(newName)) return; if (!getFileSystem().isValidName(newName)) { throw new IOException(VfsBundle.message("file.invalid.name.error", newName)); } getFileSystem().renameFile(requestor, this, newName); } /** * Checks whether this file has write permission. Note that this value may be cached and may differ from * the write permission of the physical file. * * @return {@code true} if this file is writable, {@code false} otherwise */ public abstract boolean isWritable(); public void setWritable(boolean writable) throws IOException { throw new IOException("Not supported"); } /** * Checks whether this file is a directory. * * @return {@code true} if this file is a directory, {@code false} otherwise */ public abstract boolean isDirectory(); /** * Checks whether this file has a specific property. * * @return {@code true} if the file has a specific property, {@code false} otherwise * @since 13.0 */ public boolean is(@NotNull VFileProperty property) { return false; } /** * Resolves all symbolic links containing in a path to this file and returns a path to a link target (in platform-independent format). * <p/> * <b>Note</b>: please use this method judiciously. In most cases VFS clients don't need to resolve links in paths and should * work with those provided by a user. * * @return {@code getPath()} if there are no symbolic links in a file's path; * {@code getCanonicalFile().getPath()} if the link was successfully resolved; * {@code null} otherwise * @since 11.1 */ @Nullable public String getCanonicalPath() { return getPath(); } /** * Resolves all symbolic links containing in a path to this file and returns a link target. * <p/> * <b>Note</b>: please use this method judiciously. In most cases VFS clients don't need to resolve links in paths and should * work with those provided by a user. * * @return {@code this} if there are no symbolic links in a file's path; * instance of {@code VirtualFile} if the link was successfully resolved; * {@code null} otherwise * @since 11.1 */ @Nullable public VirtualFile getCanonicalFile() { return this; } /** * Checks whether this {@code VirtualFile} is valid. File can be invalidated either by deleting it or one of its * parents with {@link #delete} method or by an external change. * If file is not valid only {@link #equals}, {@link #hashCode} and methods from * {@link UserDataHolder} can be called for it. Using any other methods for an invalid {@link VirtualFile} instance * produce unpredictable results. * * @return {@code true} if this is a valid file, {@code false} otherwise */ public abstract boolean isValid(); /** * Gets the parent {@code VirtualFile}. * * @return the parent file or {@code null} if this file is a root directory */ public abstract VirtualFile getParent(); /** * Gets the child files. * * @return array of the child files or {@code null} if this file is not a directory */ public abstract VirtualFile[] getChildren(); /** * Finds child of this file with the given name. * * @param name the file name to search by * @return the file if found any, {@code null} otherwise */ @Nullable public VirtualFile findChild(@NotNull String name) { VirtualFile[] children = getChildren(); if (children == null) return null; for (VirtualFile child : children) { if (child.nameEquals(name)) { return child; } } return null; } @NotNull public VirtualFile findOrCreateChildData(Object requestor, @NotNull String name) throws IOException { final VirtualFile child = findChild(name); if (child != null) return child; return createChildData(requestor, name); } /** * @return the {@link FileType} of this file. * When IDEA has no idea what the file type is (i.e. file type is not registered via {@link FileTypeRegistry}), * it returns {@link com.intellij.openapi.fileTypes.FileTypes#UNKNOWN} */ @SuppressWarnings("JavadocReference") @NotNull public FileType getFileType() { return FileTypeRegistry.getInstance().getFileTypeByFile(this); } /** * Finds file by path relative to this file. * * @param relPath the relative path with / used as separators * @return the file if found any, {@code null} otherwise */ @Nullable public VirtualFile findFileByRelativePath(@NotNull String relPath) { if (relPath.isEmpty()) return this; relPath = StringUtil.trimStart(relPath, "/"); int index = relPath.indexOf('/'); if (index < 0) index = relPath.length(); String name = relPath.substring(0, index); VirtualFile child; if (name.equals(".")) { child = this; } else if (name.equals("..")) { if (is(VFileProperty.SYMLINK)) { final VirtualFile canonicalFile = getCanonicalFile(); child = canonicalFile != null ? canonicalFile.getParent() : null; } else { child = getParent(); } } else { child = findChild(name); } if (child == null) return null; if (index < relPath.length()) { return child.findFileByRelativePath(relPath.substring(index + 1)); } return child; } /** * Creates a subdirectory in this directory. This method should be only called within write-action. * See {@link Application#runWriteAction}. * * @param requestor any object to control who called this method. Note that * it is considered to be an external change if {@code requestor} is {@code null}. * See {@link VirtualFileEvent#getRequestor} * @param name directory name * @return {@code VirtualFile} representing the created directory * @throws IOException if directory failed to be created */ @NotNull public VirtualFile createChildDirectory(Object requestor, @NotNull String name) throws IOException { if (!isDirectory()) { throw new IOException(VfsBundle.message("directory.create.wrong.parent.error")); } if (!isValid()) { throw new IOException(VfsBundle.message("invalid.directory.create.files")); } if (!getFileSystem().isValidName(name)) { throw new IOException(VfsBundle.message("directory.invalid.name.error", name)); } if (findChild(name) != null) { throw new IOException(VfsBundle.message("file.create.already.exists.error", getUrl(), name)); } return getFileSystem().createChildDirectory(requestor, this, name); } /** * Creates a new file in this directory. This method should be only called within write-action. * See {@link Application#runWriteAction}. * * @param requestor any object to control who called this method. Note that * it is considered to be an external change if {@code requestor} is {@code null}. * See {@link VirtualFileEvent#getRequestor} * @return {@code VirtualFile} representing the created file * @throws IOException if file failed to be created */ @NotNull public VirtualFile createChildData(Object requestor, @NotNull String name) throws IOException { if (!isDirectory()) { throw new IOException(VfsBundle.message("file.create.wrong.parent.error")); } if (!isValid()) { throw new IOException(VfsBundle.message("invalid.directory.create.files")); } if (!getFileSystem().isValidName(name)) { throw new IOException(VfsBundle.message("file.invalid.name.error", name)); } if (findChild(name) != null) { throw new IOException(VfsBundle.message("file.create.already.exists.error", getUrl(), name)); } return getFileSystem().createChildFile(requestor, this, name); } /** * Deletes this file. This method should be only called within write-action. * See {@link Application#runWriteAction}. * * @param requestor any object to control who called this method. Note that * it is considered to be an external change if {@code requestor} is {@code null}. * See {@link VirtualFileEvent#getRequestor} * @throws IOException if file failed to be deleted */ public void delete(Object requestor) throws IOException { LOG.assertTrue(isValid(), "Deleting invalid file"); getFileSystem().deleteFile(requestor, this); } /** * Moves this file to another directory. This method should be only called within write-action. * See {@link Application#runWriteAction}. * * @param requestor any object to control who called this method. Note that * it is considered to be an external change if {@code requestor} is {@code null}. * See {@link VirtualFileEvent#getRequestor} * @param newParent the directory to move this file to * @throws IOException if file failed to be moved */ public void move(final Object requestor, @NotNull final VirtualFile newParent) throws IOException { if (getFileSystem() != newParent.getFileSystem()) { throw new IOException(VfsBundle.message("file.move.error", newParent.getPresentableUrl())); } EncodingRegistry.doActionAndRestoreEncoding(this, new ThrowableComputable<VirtualFile, IOException>() { @Override public VirtualFile compute() throws IOException { getFileSystem().moveFile(requestor, VirtualFile.this, newParent); return VirtualFile.this; } }); } public VirtualFile copy(final Object requestor, @NotNull final VirtualFile newParent, @NotNull final String copyName) throws IOException { if (getFileSystem() != newParent.getFileSystem()) { throw new IOException(VfsBundle.message("file.copy.error", newParent.getPresentableUrl())); } if (!newParent.isDirectory()) { throw new IOException(VfsBundle.message("file.copy.target.must.be.directory")); } return EncodingRegistry.doActionAndRestoreEncoding(this, new ThrowableComputable<VirtualFile, IOException>() { @Override public VirtualFile compute() throws IOException { return getFileSystem().copyFile(requestor, VirtualFile.this, newParent, copyName); } }); } /** * @return Retrieve the charset file has been loaded with (if loaded) and would be saved with (if would). */ @NotNull public Charset getCharset() { Charset charset = getStoredCharset(); if (charset == null) { charset = EncodingRegistry.getInstance().getDefaultCharset(); setCharset(charset); } return charset; } @Nullable protected Charset getStoredCharset() { return getUserData(CHARSET_KEY); } protected void storeCharset(Charset charset) { putUserData(CHARSET_KEY, charset); } public void setCharset(final Charset charset) { setCharset(charset, null); } public void setCharset(final Charset charset, @Nullable Runnable whenChanged) { setCharset(charset, whenChanged, true); } public void setCharset(final Charset charset, @Nullable Runnable whenChanged, boolean fireEventsWhenChanged) { final Charset old = getStoredCharset(); storeCharset(charset); if (Comparing.equal(charset, old)) return; byte[] bom = charset == null ? null : CharsetToolkit.getMandatoryBom(charset); byte[] existingBOM = getBOM(); if (bom == null && charset != null && existingBOM != null) { bom = CharsetToolkit.canHaveBom(charset, existingBOM) ? existingBOM : null; } setBOM(bom); if (old != null) { //do not send on detect if (whenChanged != null) whenChanged.run(); if (fireEventsWhenChanged) { VirtualFileManager.getInstance().notifyPropertyChanged(this, PROP_ENCODING, old, charset); } } } public boolean isCharsetSet() { return getStoredCharset() != null; } public final void setBinaryContent(@NotNull byte[] content) throws IOException { setBinaryContent(content, -1, -1); } public void setBinaryContent(@NotNull byte[] content, long newModificationStamp, long newTimeStamp) throws IOException { setBinaryContent(content, newModificationStamp, newTimeStamp, this); } public void setBinaryContent(@NotNull byte[] content, long newModificationStamp, long newTimeStamp, Object requestor) throws IOException { ApplicationManager.getApplication().assertWriteAccessAllowed(); final OutputStream outputStream = getOutputStream(requestor, newModificationStamp, newTimeStamp); try { outputStream.write(content); outputStream.flush(); } finally { outputStream.close(); } } /** * Creates the {@code OutputStream} for this file. * Writes BOM first, if there is any. See <a href=http://unicode.org/faq/utf_bom.html>Unicode Byte Order Mark FAQ</a> for an explanation. * * @param requestor any object to control who called this method. Note that * it is considered to be an external change if {@code requestor} is {@code null}. * See {@link VirtualFileEvent#getRequestor} * @return {@code OutputStream} * @throws IOException if an I/O error occurs */ public final OutputStream getOutputStream(Object requestor) throws IOException { return getOutputStream(requestor, -1, -1); } /** * Gets the {@code OutputStream} for this file and sets modification stamp and time stamp to the specified values * after closing the stream.<p> * <p/> * Normally you should not use this method. * * Writes BOM first, if there is any. See <a href=http://unicode.org/faq/utf_bom.html>Unicode Byte Order Mark FAQ</a> for an explanation. * * @param requestor any object to control who called this method. Note that * it is considered to be an external change if {@code requestor} is {@code null}. * See {@link VirtualFileEvent#getRequestor} * @param newModificationStamp new modification stamp or -1 if no special value should be set * @param newTimeStamp new time stamp or -1 if no special value should be set * @return {@code OutputStream} * @throws IOException if an I/O error occurs * @see #getModificationStamp() */ @NotNull public abstract OutputStream getOutputStream(Object requestor, long newModificationStamp, long newTimeStamp) throws IOException; /** * Returns file content as an array of bytes. * Has the same effect as contentsToByteArray(true). * * @return file content * @throws IOException if an I/O error occurs * @see #contentsToByteArray(boolean) * @see #getInputStream() */ @NotNull public abstract byte[] contentsToByteArray() throws IOException; /** * Returns file content as an array of bytes. * * @param cacheContent set true to * @return file content * @throws IOException if an I/O error occurs * @see #contentsToByteArray() */ @NotNull public byte[] contentsToByteArray(boolean cacheContent) throws IOException { return contentsToByteArray(); } /** * Gets modification stamp value. Modification stamp is a value changed by any modification * of the content of the file. Note that it is not related to the file modification time. * * @return modification stamp * @see #getTimeStamp() */ public long getModificationStamp() { throw new UnsupportedOperationException(getClass().getName()); } /** * Gets the timestamp for this file. Note that this value may be cached and may differ from * the timestamp of the physical file. * * @return timestamp * @see File#lastModified */ public abstract long getTimeStamp(); /** * File length in bytes. * * @return the length of this file. */ public abstract long getLength(); /** * Refreshes the cached file information from the physical file system. If this file is not a directory * the timestamp value is refreshed and {@code contentsChanged} event is fired if it is changed.<p> * If this file is a directory the set of its children is refreshed. If recursive value is {@code true} all * children are refreshed recursively. * <p/> * When invoking synchronous refresh from a thread other than the event dispatch thread, the current thread must * NOT be in a read action, otherwise a deadlock may occur. * * @param asynchronous if {@code true}, the method will return immediately and the refresh will be processed * in the background. If {@code false}, the method will return only after the refresh * is done and the VFS change events caused by the refresh have been fired and processed * in the event dispatch thread. Instead of synchronous refreshes, it's recommended to use * asynchronous refreshes with a {@code postRunnable} whenever possible. * @param recursive whether to refresh all the files in this directory recursively */ public void refresh(boolean asynchronous, boolean recursive) { refresh(asynchronous, recursive, null); } /** * The same as {@link #refresh(boolean, boolean)} but also runs {@code postRunnable} * after the operation is completed. */ public abstract void refresh(boolean asynchronous, boolean recursive, @Nullable Runnable postRunnable); public String getPresentableName() { return getName(); } @Override public long getModificationCount() { return isValid() ? getTimeStamp() : -1; } /** * @return whether file name equals to this name * result depends on the filesystem specifics */ protected boolean nameEquals(@NotNull String name) { return getName().equals(name); } /** * Gets the {@code InputStream} for this file. * Skips BOM if there is any. See <a href=http://unicode.org/faq/utf_bom.html>Unicode Byte Order Mark FAQ</a> for an explanation. * * @return {@code InputStream} * @throws IOException if an I/O error occurs * @see #contentsToByteArray */ public abstract InputStream getInputStream() throws IOException; @Nullable public byte[] getBOM() { return getUserData(BOM_KEY); } public void setBOM(@Nullable byte[] BOM) { putUserData(BOM_KEY, BOM); } @Override public String toString() { return "VirtualFile: " + getPresentableUrl(); } public boolean exists() { return isValid(); } public boolean isInLocalFileSystem() { return false; } /** @deprecated use {@link VirtualFileSystem#isValidName(String)} (to be removed in IDEA 18) */ public static boolean isValidName(@NotNull String name) { return !name.isEmpty() && name.indexOf('\\') < 0 && name.indexOf('/') < 0; } private static final Key<String> DETECTED_LINE_SEPARATOR_KEY = Key.create("DETECTED_LINE_SEPARATOR_KEY"); /** * @return Line separator for this file. * It is always null for directories and binaries, and possibly null if a separator isn't yet known. * @see LineSeparator */ public String getDetectedLineSeparator() { return getUserData(DETECTED_LINE_SEPARATOR_KEY); } public void setDetectedLineSeparator(@Nullable String separator) { putUserData(DETECTED_LINE_SEPARATOR_KEY, separator); } public void setPreloadedContentHint(byte[] preloadedContentHint) { } }
/* * Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.internal.ascii; import com.hazelcast.cluster.ClusterState; import com.hazelcast.config.Config; import com.hazelcast.config.RestApiConfig; import com.hazelcast.config.RestServerEndpointConfig; import com.hazelcast.core.Hazelcast; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.core.LifecycleEvent; import com.hazelcast.core.LifecycleListener; import com.hazelcast.instance.BuildInfoProvider; import com.hazelcast.internal.ascii.HTTPCommunicator.ConnectionResponse; import com.hazelcast.internal.json.Json; import com.hazelcast.internal.json.JsonObject; import com.hazelcast.test.HazelcastParallelClassRunner; import com.hazelcast.test.HazelcastTestSupport; import com.hazelcast.test.TestAwareInstanceFactory; import com.hazelcast.test.annotation.QuickTest; import org.apache.http.NoHttpResponseException; import org.junit.After; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import java.io.IOException; import java.net.HttpURLConnection; import java.net.SocketException; import java.util.concurrent.CountDownLatch; import static com.hazelcast.test.HazelcastTestSupport.assertClusterStateEventually; import static com.hazelcast.test.HazelcastTestSupport.assertContains; import static com.hazelcast.test.HazelcastTestSupport.assertOpenEventually; import static com.hazelcast.test.HazelcastTestSupport.assertTrueEventually; import static com.hazelcast.test.HazelcastTestSupport.smallInstanceConfig; import static java.net.HttpURLConnection.HTTP_NOT_FOUND; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @RunWith(HazelcastParallelClassRunner.class) @Category(QuickTest.class) public class RestClusterTest { protected final TestAwareInstanceFactory factory = new TestAwareInstanceFactory(); @BeforeClass public static void beforeClass() { Hazelcast.shutdownAll(); } @After public void tearDown() { factory.terminateAll(); } protected Config createConfig() { return smallInstanceConfig(); } protected Config createConfigWithRestEnabled() { Config config = createConfig(); RestApiConfig restApiConfig = new RestApiConfig().setEnabled(true).enableAllGroups(); config.getNetworkConfig().setRestApiConfig(restApiConfig); return config; } protected String getPassword() { // Community version doesn't check the password. return ""; } @Test public void testDisabledRest() throws Exception { // REST should be disabled by default HazelcastInstance instance = factory.newHazelcastInstance(createConfig()); HTTPCommunicator communicator = new HTTPCommunicator(instance); try { communicator.getClusterInfo(); fail("Rest is disabled. Not expected to reach here!"); } catch (IOException ignored) { // ignored } } @Test public void testClusterInfo_whenAdvancedNetworkWithoutClientEndpoint() throws Exception { // when advanced network config is enabled and no client endpoint is defined // then client connections are reported as 0 Config config = createConfig(); config.getAdvancedNetworkConfig().setEnabled(true) .setRestEndpointConfig(new RestServerEndpointConfig() .setPort(9999) .enableAllGroups()); HazelcastInstance instance = factory.newHazelcastInstance(config); HTTPCommunicator communicator = new HTTPCommunicator(instance); String response = communicator.getClusterInfo(); JsonObject json = Json.parse(response).asObject(); assertEquals(0, json.getInt("connectionCount", -1)); } @Test public void testClusterShutdown() throws Exception { Config config = createConfigWithRestEnabled(); final HazelcastInstance instance1 = factory.newHazelcastInstance(config); final HazelcastInstance instance2 = factory.newHazelcastInstance(config); HTTPCommunicator communicator = new HTTPCommunicator(instance2); ConnectionResponse response = communicator.shutdownCluster(config.getClusterName(), getPassword()); assertSuccessJson(response); assertTrueEventually(() -> { assertFalse(instance1.getLifecycleService().isRunning()); assertFalse(instance2.getLifecycleService().isRunning()); }); } @Test public void testGetClusterState() throws Exception { Config config = createConfigWithRestEnabled(); HazelcastInstance instance1 = factory.newHazelcastInstance(config); HazelcastInstance instance2 = factory.newHazelcastInstance(config); String clusterName = config.getClusterName(); HTTPCommunicator communicator1 = new HTTPCommunicator(instance1); HTTPCommunicator communicator2 = new HTTPCommunicator(instance2); instance1.getCluster().changeClusterState(ClusterState.FROZEN); ConnectionResponse resp1 = communicator1.getClusterState(clusterName, getPassword()); assertSuccessJson(resp1, "state", "frozen"); instance1.getCluster().changeClusterState(ClusterState.PASSIVE); ConnectionResponse resp2 = communicator2.getClusterState(clusterName, getPassword()); assertSuccessJson(resp2, "state", "passive"); } @Test public void testChangeClusterState() throws Exception { Config config = createConfigWithRestEnabled(); final HazelcastInstance instance1 = factory.newHazelcastInstance(config); final HazelcastInstance instance2 = factory.newHazelcastInstance(config); HTTPCommunicator communicator = new HTTPCommunicator(instance1); String clusterName = config.getClusterName(); ConnectionResponse resp = communicator.changeClusterState(clusterName, getPassword(), "frozen"); assertSuccessJson(resp, "state", "frozen"); assertClusterStateEventually(ClusterState.FROZEN, instance1); assertClusterStateEventually(ClusterState.FROZEN, instance2); } @Test public void testGetClusterVersion() throws IOException { final HazelcastInstance instance = factory.newHazelcastInstance(createConfigWithRestEnabled()); final HTTPCommunicator communicator = new HTTPCommunicator(instance); assertJsonContains(communicator.getClusterVersion(), "status", "success", "version", instance.getCluster().getClusterVersion().toString()); } @Test public void testChangeClusterVersion() throws IOException { Config config = createConfigWithRestEnabled(); final HazelcastInstance instance = factory.newHazelcastInstance(config); final HTTPCommunicator communicator = new HTTPCommunicator(instance); String clusterName = config.getClusterName(); ConnectionResponse resp = communicator.changeClusterVersion(clusterName, getPassword(), instance.getCluster().getClusterVersion().toString()); assertSuccessJson(resp, "version", instance.getCluster().getClusterVersion().toString()); } @Test public void testHotBackup() throws IOException { Config config = createConfigWithRestEnabled(); final HazelcastInstance instance = factory.newHazelcastInstance(config); final HTTPCommunicator communicator = new HTTPCommunicator(instance); String clusterName = config.getClusterName(); ConnectionResponse resp = communicator.hotBackup(clusterName, getPassword()); assertSuccessJson(resp); ConnectionResponse resp1 = communicator.hotBackupInterrupt(clusterName, getPassword()); assertSuccessJson(resp1); } @Test public void testForceAndPartialStart() throws IOException { Config config = createConfigWithRestEnabled(); final HazelcastInstance instance = factory.newHazelcastInstance(config); final HTTPCommunicator communicator = new HTTPCommunicator(instance); String clusterName = config.getClusterName(); ConnectionResponse resp1 = communicator.forceStart(clusterName, getPassword()); assertEquals(HttpURLConnection.HTTP_OK, resp1.responseCode); assertJsonContains(resp1.response, "status", "fail"); ConnectionResponse resp2 = communicator.partialStart(clusterName, getPassword()); assertEquals(HttpURLConnection.HTTP_OK, resp2.responseCode); assertJsonContains(resp2.response, "status", "fail"); } @Test public void testListNodes() throws Exception { Config config = createConfigWithRestEnabled(); HazelcastInstance instance = factory.newHazelcastInstance(config); HTTPCommunicator communicator = new HTTPCommunicator(instance); HazelcastTestSupport.waitInstanceForSafeState(instance); String clusterName = config.getClusterName(); ConnectionResponse resp = communicator.listClusterNodes(clusterName, getPassword()); assertSuccessJson(resp, "response", String.format("[%s]\n%s\n%s", instance.getCluster().getLocalMember().toString(), BuildInfoProvider.getBuildInfo().getVersion(), System.getProperty("java.version"))); } @Test public void testShutdownNode() throws Exception { Config config = createConfigWithRestEnabled(); HazelcastInstance instance = factory.newHazelcastInstance(config); HTTPCommunicator communicator = new HTTPCommunicator(instance); final CountDownLatch shutdownLatch = new CountDownLatch(1); instance.getLifecycleService().addLifecycleListener(new LifecycleListener() { @Override public void stateChanged(LifecycleEvent event) { if (event.getState() == LifecycleEvent.LifecycleState.SHUTDOWN) { shutdownLatch.countDown(); } } }); String clusterName = config.getClusterName(); try { assertJsonContains(communicator.shutdownMember(clusterName, getPassword()).response, "status", "success"); } catch (SocketException ignored) { // if the node shuts down before response is received, a `SocketException` (or instance of its subclass) is expected } catch (NoHttpResponseException ignored) { // `NoHttpResponseException` is also a possible outcome when a node shut down before it has a chance // to send a response back to a client. } assertOpenEventually(shutdownLatch); assertFalse(instance.getLifecycleService().isRunning()); } @Test public void simpleHealthCheck() throws Exception { HazelcastInstance instance = factory.newHazelcastInstance(createConfigWithRestEnabled()); HTTPCommunicator communicator = new HTTPCommunicator(instance); String result = communicator.getClusterHealth(); JsonObject jsonResult = assertJsonContains(result, "nodeState", "ACTIVE", "clusterState", "ACTIVE"); assertTrue(jsonResult.getBoolean("clusterSafe", false)); assertEquals(0, jsonResult.getInt("migrationQueueSize", -1)); assertEquals(1, jsonResult.getInt("clusterSize", -1)); } @Test public void healthCheckWithPathParameters() throws Exception { HazelcastInstance instance = factory.newHazelcastInstance(createConfigWithRestEnabled()); HTTPCommunicator communicator = new HTTPCommunicator(instance); assertEquals("\"ACTIVE\"", communicator.getClusterHealth("/node-state")); assertEquals("\"ACTIVE\"", communicator.getClusterHealth("/cluster-state")); assertEquals(HttpURLConnection.HTTP_OK, communicator.getClusterHealthResponseCode("/cluster-safe")); assertEquals("0", communicator.getClusterHealth("/migration-queue-size")); assertEquals("1", communicator.getClusterHealth("/cluster-size")); } @Test public void healthCheckWithUnknownPathParameter() throws Exception { HazelcastInstance instance = factory.newHazelcastInstance(createConfigWithRestEnabled()); HTTPCommunicator communicator = new HTTPCommunicator(instance); assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, communicator.getClusterHealthResponseCode("/unknown-parameter")); } @Test(expected = IOException.class) public void fail_with_deactivatedHealthCheck() throws Exception { // Healthcheck REST URL is deactivated by default - no passed config on purpose HazelcastInstance instance = factory.newHazelcastInstance(null); HTTPCommunicator communicator = new HTTPCommunicator(instance); communicator.getClusterHealth(); } @Test public void fail_on_healthcheck_url_with_garbage() throws Exception { HazelcastInstance instance = factory.newHazelcastInstance(createConfigWithRestEnabled()); HTTPCommunicator communicator = new HTTPCommunicator(instance); assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, communicator.getFailingClusterHealthWithTrailingGarbage()); } @Test public void testHeadRequest_ClusterVersion() throws Exception { HazelcastInstance instance = factory.newHazelcastInstance(createConfigWithRestEnabled()); HTTPCommunicator communicator = new HTTPCommunicator(instance); assertEquals(HttpURLConnection.HTTP_OK, communicator.headRequestToClusterVersionURI().responseCode); } @Test public void testHeadRequest_ClusterInfo() throws Exception { HazelcastInstance instance = factory.newHazelcastInstance(createConfigWithRestEnabled()); HTTPCommunicator communicator = new HTTPCommunicator(instance); assertEquals(HttpURLConnection.HTTP_OK, communicator.headRequestToClusterInfoURI().responseCode); } @Test public void testHeadRequest_ClusterHealth() throws Exception { HazelcastInstance instance = factory.newHazelcastInstance(createConfigWithRestEnabled()); factory.newHazelcastInstance(createConfigWithRestEnabled()); HTTPCommunicator communicator = new HTTPCommunicator(instance); ConnectionResponse response = communicator.headRequestToClusterHealthURI(); assertEquals(HttpURLConnection.HTTP_OK, response.responseCode); assertEquals(response.responseHeaders.get("Hazelcast-NodeState").size(), 1); assertContains(response.responseHeaders.get("Hazelcast-NodeState"), "ACTIVE"); assertEquals(response.responseHeaders.get("Hazelcast-ClusterState").size(), 1); assertContains(response.responseHeaders.get("Hazelcast-ClusterState"), "ACTIVE"); assertEquals(response.responseHeaders.get("Hazelcast-ClusterSize").size(), 1); assertContains(response.responseHeaders.get("Hazelcast-ClusterSize"), "2"); assertEquals(response.responseHeaders.get("Hazelcast-MigrationQueueSize").size(), 1); assertContains(response.responseHeaders.get("Hazelcast-MigrationQueueSize"), "0"); } @Test public void testHeadRequest_GarbageClusterHealth() throws Exception { HazelcastInstance instance = factory.newHazelcastInstance(createConfigWithRestEnabled()); HTTPCommunicator communicator = new HTTPCommunicator(instance); assertEquals(HTTP_NOT_FOUND, communicator.headRequestToGarbageClusterHealthURI().responseCode); } @Test public void http_get_returns_response_code_200_when_member_is_ready_to_use() throws Exception { HazelcastInstance instance = factory.newHazelcastInstance(createConfigWithRestEnabled()); HTTPCommunicator communicator = new HTTPCommunicator(instance); int healthReadyResponseCode = communicator.getHealthReadyResponseCode(); assertEquals(HttpURLConnection.HTTP_OK, healthReadyResponseCode); } @Test public void testSetLicenseKey() throws Exception { Config config = createConfigWithRestEnabled(); final HazelcastInstance instance = factory.newHazelcastInstance(config); HTTPCommunicator communicator = new HTTPCommunicator(instance); ConnectionResponse response = communicator.setLicense(config.getClusterName(), getPassword(), "whatever"); assertSuccessJson(response); } private JsonObject assertJsonContains(String json, String... attributesAndValues) { JsonObject object = Json.parse(json).asObject(); for (int i = 0; i < attributesAndValues.length; ) { String key = attributesAndValues[i++]; String expectedValue = attributesAndValues[i++]; assertEquals(expectedValue, object.getString(key, null)); } return object; } private void assertSuccessJson(ConnectionResponse resp, String... attributesAndValues) { assertEquals(HttpURLConnection.HTTP_OK, resp.responseCode); assertJsonContains(resp.response, "status", "success"); if (attributesAndValues.length > 0) { assertJsonContains(resp.response, attributesAndValues); } } }
package com.betfair.marketing.affiliates.core; import com.betfair.marketing.affiliates.config.Settings; import com.betfair.marketing.affiliates.util.Utils; import org.apache.commons.io.IOUtils; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.json.simple.parser.ParseException; import org.owasp.esapi.ESAPI; import org.owasp.esapi.Encoder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.*; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.net.HttpURLConnection; import java.util.HashMap; import java.util.Map; import java.util.StringJoiner; import static com.betfair.marketing.affiliates.config.PropertyNames.*; import static com.betfair.marketing.affiliates.config.Settings.getSportsbookKey; import static com.betfair.marketing.affiliates.util.Utils.*; @Path("sportsbook/{brand}/{type}") @Produces(MediaType.APPLICATION_JSON) public class SportsbookApiHandler { private static final Logger LOG = LoggerFactory.getLogger(SportsbookApiHandler.class); private final static String LIST_EVENTS = "listEvents"; private final static String LIST_MARKET_PRICES = "listMarketPrices"; private final static String LIST_MARKET_CATALOGUE = "listMarketCatalogue"; private final static String PLACE_BETS = "placeBets"; private final static String IMPLY_BETS = "implyBets"; private final static String GET_ACCOUNT_FUNDS = "getAccountFunds"; private final static String GET_MARKET_PRICES_BY_EVENT = "getMarketPricesByEvent"; private static final String INVALID_JSON_REQUEST_BODY = "INVALID_JSON_REQUEST_BODY"; private static final String INVALID_REQUEST_PATH = "INVALID_REQUEST_PATH"; private static final String PRODUCT_TOKEN_NOT_FOUND = "PRODUCT_TOKEN_NOT_FOUND"; private static final String INVALID_BRAND_PATH = "INVALID_BRAND_PATH"; private static final String INVALID_PRODUCT_TOKEN = "INVALID_PRODUCT_TOKEN"; private static final String INVALID_API_REQUEST = "INVALID_API_REQUEST"; private Encoder encoder = ESAPI.encoder(); private HashMap<String, String> bodies = new HashMap<>(); private String productToken; private String brand; public SportsbookApiHandler() { bodies.put(LIST_EVENTS, "{\"listEventsRequestParams\":{\"marketFilter\":{}}}"); bodies.put(LIST_MARKET_CATALOGUE, "{\"listMarketCatalogueRequestParams\":{\"marketFilter\":{},\"maxResults\":100}}"); bodies.put(LIST_MARKET_PRICES, "{\"listMarketPricesRequestParams\":{}}"); bodies.put(PLACE_BETS, "{\"placeBetsRequestParams\":{\"betDefinitions\":[{\"stakePerLine\":0,\"betNumber\":1,\"betType\":\"SINGLE\",\"betLegs\":[{\"legType\":\"SIMPLE_SELECTION\",\"betRunners\":[{\"runner\":{\"marketId\":\"\",\"selectionId\":\"\"}}]}]}]}}"); bodies.put(IMPLY_BETS, "{\"implyBetsRequestParams\":{}}"); bodies.put(GET_ACCOUNT_FUNDS, "{}"); } @POST @Produces(MediaType.APPLICATION_JSON) public JSONObject handleRequest( @Context HttpServletRequest request, @PathParam("brand") String brandValue, @PathParam("type") String requestType, @CookieParam("productTokenBetfair") String productTokenBetfair, @CookieParam("productTokenPaddypower") String productTokenPaddypower) throws IOException, ParseException { JSONObject requestJsonObject; brandValue = encoder.encodeForJavaScript(brandValue); requestType = encoder.encodeForJavaScript(requestType); try { requestJsonObject = (JSONObject) new JSONParser().parse(new InputStreamReader(request.getInputStream())); } catch (IOException | ParseException e) { LOG.error("Invalid JSON parsed"); return badRequest(INVALID_JSON_REQUEST_BODY); } if (!setBrandProductToken(brandValue, productTokenBetfair, productTokenPaddypower)) { badRequest(INVALID_PRODUCT_TOKEN); } return executeRequest(brand, requestType, requestJsonObject); } private JSONObject executeRequest(String brandValue, String requestType, JSONObject requestJsonObject) throws ParseException, IOException { switch (requestType) { case LIST_EVENTS: return retrieveAllEvents(requestJsonObject); case LIST_MARKET_CATALOGUE: return retrieveMarketCatalogue(requestJsonObject); case LIST_MARKET_PRICES: return retrieveMarketPrices(requestJsonObject); case PLACE_BETS: return placeBet(requestJsonObject); case IMPLY_BETS: return implyBet(requestJsonObject); case GET_ACCOUNT_FUNDS: return requestFunds(brandValue); case GET_MARKET_PRICES_BY_EVENT: return retrieveMarketPricesByEvent(requestJsonObject); default: return badRequest(INVALID_API_REQUEST); } } private boolean setBrandProductToken(String brandValue, String productTokenBetfair, String productTokenPaddypower) { if (brandValue.equals(BETFAIR) && productTokenBetfair != null && Utils.productTokenValid(productTokenBetfair)) { brand = brandValue; productToken = productTokenBetfair; return true; } else if (brandValue.equals(PADDYPOWER) && productTokenPaddypower != null && Utils.productTokenValid(productTokenPaddypower)) { brand = brandValue; productToken = productTokenPaddypower; return true; } else if (brandValue.equals(BETFAIR) || brandValue.equals(PADDYPOWER)) { brand = brandValue; return true; } return false; } private JSONObject retrieveMarketPricesByEvent(JSONObject requestJsonObject) throws IOException, ParseException { JSONObject marketsCatalogueResponse = retrieveMarketCatalogue(requestJsonObject); JSONObject marketPricesRequestObject = new JSONObject(); JSONArray markets = (JSONArray) new JSONParser().parse(marketsCatalogueResponse.get("response").toString()); JSONArray requestArray = new JSONArray(); for (Object object : markets) { JSONObject jsonObject = (JSONObject) object; String marketId = jsonObject.get("marketId").toString(); requestArray.add(marketId); } marketPricesRequestObject.put("marketIds", requestArray); return retrieveMarketPrices(marketPricesRequestObject); } private JSONObject retrieveAllEvents(JSONObject requestJsonObject) { String requestBody = bodies.get(LIST_EVENTS); String affiliateProductToken = getAffiliateSession(brand); Map<Integer, String> sportsbookResponse = sportsbookAPIRequest(LIST_EVENTS, requestBody, affiliateProductToken, false); return buildResponseJson(sportsbookResponse); } private JSONObject retrieveMarketCatalogue(JSONObject requestJsonObject) throws ParseException { String affiliateProductToken = getAffiliateSession(brand); String eventId = (String) requestJsonObject.get("eventId"); JSONArray eventIds = new JSONArray(); eventIds.add(eventId); JSONObject marketFilter = new JSONObject(); marketFilter.put("eventIds", eventIds); JSONObject requestBody = (JSONObject) new JSONParser().parse(bodies.get(LIST_MARKET_CATALOGUE)); JSONObject listMarketCatalogueRequestParams = (JSONObject) requestBody.get("listMarketCatalogueRequestParams"); listMarketCatalogueRequestParams.put("marketFilter", marketFilter); requestBody.put("listMarketCatalogueRequestParams", listMarketCatalogueRequestParams); Map<Integer, String> sportsbookResponse = sportsbookAPIRequest(LIST_MARKET_CATALOGUE, requestBody.toJSONString(), affiliateProductToken, false); return buildResponseJson(sportsbookResponse); } private JSONObject retrieveMarketPrices(JSONObject requestJsonObject) throws IOException, ParseException { JSONObject requestBody = (JSONObject) new JSONParser().parse(bodies.get(LIST_MARKET_PRICES)); JSONArray marketsIds = (JSONArray) requestJsonObject.get("marketIds"); JSONObject listMarketPrices = new JSONObject(); listMarketPrices.put("marketIds", marketsIds); requestBody.put("listMarketPricesRequestParams", listMarketPrices); String affiliateProductToken = getAffiliateSession(brand); Map<Integer, String> sportsbookResponse = sportsbookAPIRequest(LIST_MARKET_PRICES, requestBody.toJSONString(), affiliateProductToken, false); return buildResponseJson(sportsbookResponse); } private JSONObject requestFunds(String brand) throws IOException { String requestBody = bodies.get(GET_ACCOUNT_FUNDS); Map<Integer, String> sportsbookResponse = sportsbookAPIRequest(GET_ACCOUNT_FUNDS, requestBody, null, true); return buildResponseJson(sportsbookResponse); } private JSONObject implyBet(JSONObject requestJsonObject) throws IOException, ParseException { JSONObject requestBody = new JSONObject(); JSONObject betLegs = new JSONObject(); String marketId = requestJsonObject.get("marketId").toString(); String selectionId = requestJsonObject.get("selectionId").toString(); betLegs.put("betLegs", buildBetLegs(marketId, selectionId)); requestBody.put("implyBetsRequestParams", betLegs); Map<Integer, String> sportsbookResponse = sportsbookAPIRequest(IMPLY_BETS, requestBody.toJSONString(), null, true); return buildResponseJson(sportsbookResponse); } private JSONObject placeBet(JSONObject requestJsonObject) throws IOException { JSONObject requestBody = new JSONObject(); String marketId = requestJsonObject.get("marketId").toString(); String selectionId = requestJsonObject.get("selectionId").toString(); String stake = requestJsonObject.get("stake").toString(); boolean useBonus = Boolean.parseBoolean(requestJsonObject.get("useBonus").toString()); requestBody.put("placeBetsRequestParams", buildBetDefinitions(marketId, selectionId, stake, useBonus)); Map<Integer, String> sportsbookResponse = sportsbookAPIRequest(PLACE_BETS, requestBody.toJSONString(), null, true); return buildResponseJson(sportsbookResponse); } private JSONObject badRequest(String type) { JSONObject response = new JSONObject(); switch (type) { case INVALID_JSON_REQUEST_BODY: response.put("status", "invalid_json"); response.put("message", "Invalid body request!"); break; case INVALID_REQUEST_PATH: response.put("status", "invalid_request_path"); response.put("message", "Invalid request path!"); break; case PRODUCT_TOKEN_NOT_FOUND: response.put("status", "token_not_found"); response.put("message", "Product token not found!!"); break; case INVALID_BRAND_PATH: response.put("status", "invalid_brand_path"); response.put("message", "The brand you are trying to access is not available!"); break; case INVALID_PRODUCT_TOKEN: response.put("status", "invalid_product_token"); response.put("message", "The productToken used is invalid!"); break; case INVALID_API_REQUEST: response.put("status", "invalid_api_request"); response.put("message", "Unknown error!"); break; default: response.put("status", "broken"); response.put("message", "Unknown error!"); break; } return response; } private Map<Integer, String> sportsbookAPIRequest(String type, String body, String productToken, boolean transactionalOperation) { Map<Integer, String> result; String requestPath = Settings.getSportsbookEndpoint(brand) + type + '/'; HttpURLConnection connection = createNewHttpConnection(requestPath, getSportsbookKey(brand), (productToken != null ? productToken : this.productToken), "json", transactionalOperation); try (OutputStream outputStream = connection.getOutputStream()) { outputStream.write(body.getBytes()); result = extractResponseIfSuccess(connection); } catch (IOException e) { throw new RuntimeException("Sportsbook Request Error!"); } return result; } private Map<Integer, String> extractResponseIfSuccess(HttpURLConnection connection) throws IOException { Map<Integer, String> result = new HashMap<>(); if (connection.getResponseCode() == 200) { try (InputStream inputStream = connection.getInputStream()) { result.put(connection.getResponseCode(), IOUtils.toString(inputStream)); } } else { result.put(connection.getResponseCode(), connection.getResponseMessage()); } return result; } private JSONObject buildResponseJson(Map<Integer, String> sportsbookResponse) { JSONObject response = new JSONObject(); if (sportsbookResponse.containsKey(200)) { response.put("status", "success"); response.put("response", sportsbookResponse.get(200)); } else { response.put("status", "fail"); response.put("error", sportsbookResponse.entrySet().iterator().next().getValue()); } return response; } private JSONArray buildBetLegs(String marketId, String selectionId) { JSONArray betLegs = new JSONArray(); JSONObject betLeg = new JSONObject(); JSONArray betRunners = buildBetRunners(marketId, selectionId); betLeg.put("legType", "SIMPLE_SELECTION"); betLeg.put("betRunners", betRunners); betLegs.add(betLeg); return betLegs; } private JSONArray buildBetRunners(String marketId, String selectionId) { JSONObject runnerInformation = new JSONObject(); runnerInformation.put("marketId", marketId); runnerInformation.put("selectionId", selectionId); JSONObject runner = new JSONObject(); runner.put("runner", runnerInformation); JSONArray betRunners = new JSONArray(); betRunners.add(runner); return betRunners; } private JSONObject buildBetDefinition(String marketId, String selectionId, String stake) { JSONObject betDefinition = new JSONObject(); JSONArray betLegsArray = buildBetLegs(marketId, selectionId); betDefinition.put("stakePerLine", Float.valueOf(stake)); betDefinition.put("betNumber", 1); betDefinition.put("betType", "SINGLE"); betDefinition.put("betLegs", betLegsArray); return betDefinition; } private JSONObject buildBetDefinitions(String marketId, String selectionId, String stake, boolean useBonus) { JSONObject betDefinition = new JSONObject(); JSONObject betDefinitionObj = buildBetDefinition(marketId, selectionId, stake); JSONArray betDefinitionsArray = new JSONArray(); betDefinitionsArray.add(betDefinitionObj); betDefinition.put("betDefinitions", betDefinitionsArray); betDefinition.put("useAvailableBonus", useBonus); return betDefinition; } private static String getAffiliateSession(String brand) { String urlString = Settings.getIdentityApiLoginUrl(brand); HttpURLConnection connection = createNewHttpConnection(urlString, getSportsbookKey(brand), null, "www", false); Map<String, String> requestBody = createRequestBody(brand); StringJoiner params = createParamsUrl(requestBody); try (OutputStream outputStream = connection.getOutputStream()) { outputStream.write(params.toString().getBytes()); InputStream inputStream = connection.getInputStream(); String identityResponse = IOUtils.toString(inputStream); inputStream.close(); JSONObject jsonResponse = (JSONObject) new JSONParser().parse(identityResponse); String affiliateProductToken = (String) jsonResponse.get(TOKEN_PARAM); if (Utils.productTokenValid(affiliateProductToken)) { return affiliateProductToken; } else { throw new IllegalStateException("Invalid affliate product token"); } } catch (ParseException | IOException e) { LOG.error("Unable to get Affiliate Main Session"); throw new RuntimeException("Unable to get Affiliate Main Session"); } } }
package com.diamondq.common.utils.parsing.properties; import java.util.Dictionary; import java.util.Map; import org.checkerframework.checker.nullness.qual.NonNull; import org.checkerframework.checker.nullness.qual.Nullable; public class PropertiesParsing { public static boolean getNonNullBoolean(Map<String, Object> pProps, String pKey, Boolean pDefault) { Object propObj = pProps.get(pKey); Boolean bool; if (propObj == null) bool = pDefault; else bool = Boolean.parseBoolean(propObj.toString()); return bool; } public static boolean getNonNullBoolean(Dictionary<String, Object> pProps, String pKey, Boolean pDefault) { Object propObj = pProps.get(pKey); Boolean bool; if (propObj == null) bool = pDefault; else bool = Boolean.parseBoolean(propObj.toString()); return bool; } public static @Nullable Boolean getNullableBoolean(Map<String, Object> pProps, String pKey) { Object propObj = pProps.get(pKey); @Nullable Boolean bool; if (propObj == null) bool = null; else bool = Boolean.parseBoolean(propObj.toString()); return bool; } public static @Nullable Boolean getNullableBoolean(Dictionary<String, Object> pProps, String pKey) { Object propObj = pProps.get(pKey); @Nullable Boolean bool; if (propObj == null) bool = null; else bool = Boolean.parseBoolean(propObj.toString()); return bool; } public static String getNonNullString(Map<String, Object> pProps, String pKey, String pDefault) { Object propObj = pProps.get(pKey); String val; if (propObj == null) val = pDefault; else val = propObj.toString(); return val; } public static String getNonNullString(Dictionary<String, Object> pProps, String pKey, String pDefault) { Object propObj = pProps.get(pKey); String val; if (propObj == null) val = pDefault; else val = propObj.toString(); return val; } public static @Nullable String getNullableString(Map<String, Object> pProps, String pKey) { Object propObj = pProps.get(pKey); @Nullable String val; if (propObj == null) val = null; else val = propObj.toString(); return val; } public static @Nullable String getNullableString(Dictionary<String, Object> pProps, String pKey) { Object propObj = pProps.get(pKey); @Nullable String val; if (propObj == null) val = null; else val = propObj.toString(); return val; } public static int getNonNullInt(Map<String, Object> pProps, String pKey, int pDefault) { Object propObj = pProps.get(pKey); int val; if (propObj == null) val = pDefault; else val = Integer.parseInt(propObj.toString()); return val; } public static int getNonNullInt(Dictionary<String, Object> pProps, String pKey, int pDefault) { Object propObj = pProps.get(pKey); int val; if (propObj == null) val = pDefault; else val = Integer.parseInt(propObj.toString()); return val; } public static @Nullable Integer getNullableInt(Map<String, Object> pProps, String pKey) { Object propObj = pProps.get(pKey); @Nullable Integer val; if (propObj == null) val = null; else val = Integer.parseInt(propObj.toString()); return val; } public static @Nullable Integer getNullableInt(Dictionary<String, Object> pProps, String pKey) { Object propObj = pProps.get(pKey); @Nullable Integer val; if (propObj == null) val = null; else val = Integer.parseInt(propObj.toString()); return val; } public static long getNonNullLong(Map<String, Object> pProps, String pKey, long pDefault) { Object propObj = pProps.get(pKey); long val; if (propObj == null) val = pDefault; else val = Long.parseLong(propObj.toString()); return val; } public static long getNonNullLong(Dictionary<String, Object> pProps, String pKey, long pDefault) { Object propObj = pProps.get(pKey); long val; if (propObj == null) val = pDefault; else val = Long.parseLong(propObj.toString()); return val; } public static @Nullable Long getNullableLong(Map<String, Object> pProps, String pKey) { Object propObj = pProps.get(pKey); @Nullable Long val; if (propObj == null) val = null; else val = Long.parseLong(propObj.toString()); return val; } public static @Nullable Long getNullableLong(Dictionary<String, Object> pProps, String pKey) { Object propObj = pProps.get(pKey); @Nullable Long val; if (propObj == null) val = null; else val = Long.parseLong(propObj.toString()); return val; } public static @NonNull String[] getStringArray(Map<String, Object> pProps, String pKey) { Object propObj = pProps.get(pKey); if (propObj == null) return new String[0]; String val = propObj.toString(); @NonNull String[] results = val.split(","); return results; } public static @NonNull String[] getStringArray(Dictionary<String, Object> pProps, String pKey) { Object propObj = pProps.get(pKey); if (propObj == null) return new String[0]; String val = propObj.toString(); @NonNull String[] results = val.split(","); return results; } public static void removeAll(Map<String, Object> pProps, String... pKeys) { for (String key : pKeys) pProps.remove(key); } public static void removeAll(Dictionary<String, Object> pProps, String... pKeys) { for (String key : pKeys) pProps.remove(key); } }
package test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.beans.VetoableChangeListener; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.junit.Before; import org.junit.Test; import com.scg.beans.BenefitEvent; import com.scg.beans.BenefitListener; import com.scg.beans.BenefitManager; import com.scg.beans.CompensationManager; import com.scg.beans.Eeoc; import com.scg.beans.HumanResourceManager; import com.scg.beans.StaffConsultant; import com.scg.domain.Consultant; import com.scg.util.PersonalName; /** * JUnit test for the change events and listeners. * * @author Russ Moul */ public final class Assignment06Test { /** Initial pay rate for coder. */ private static final int CODER_INITIAL_PAY_RATE = 9524; /** Initial pay rate for architect. */ private static final int ARCHITECT_INITIAL_PAY_RATE = 10000; /** Initial pay rate for tester. */ private static final int TESTER_INITIAL_PAY_RATE = 5000; /** Initial pay rate for engineer. */ private static final int ENGINEER_INITIAL_PAY_RATE = 7500; /** Initial value assigned to sick leave hours for all consultants. */ private static final int INITIAL_SICK_LEAVE_HOURS = 80; /** Test value for sick leave hours update. */ private static final int TEST_SICK_LEAVE_HOURS = 320; /** Initial value assigned to vacation hours for all consultants. */ private static final int INITIAL_VACATION_HOURS = 40; /** Test value for vacation hours update. */ private static final int TEST_VACATION_HOURS = 240; /** Test value for valid pay rate adjustment. */ private static final int VALID_RAISE = 10000; /** Test value for invalid pay rate adjustment. */ private static final int INVALID_RAISE = 10501; /** Property name for sickLeaveHours. */ private static final String SICK_LEAVE_HOURS_PROP = "sickLeaveHours"; /** Property name for vacationHours. */ private static final String VACATION_HOURS_PROP = "vacationHours"; /** Property name for payRate. */ private static final String PAY_RATE_PROP = "payRate"; /** Consultant enrolling in medical. */ private static final int MEDICAL_ENROLLEE = 0; /** Consultant canceling medical. */ private static final int MEDICAL_CANCEL = 1; /** Consultant enrolling in dental. */ private static final int DENTAL_ENROLLEE = 2; /** Consultant canceling dental. */ private static final int DENTAL_CANCEL = 3; /** Consultants for testing. */ private List<StaffConsultant> consultantList; /** A specific consultant for testing. */ private StaffConsultant staffConsultant; /** HR object for testing. */ private HumanResourceManager hrServer; /** EEOC object for testing. */ private Eeoc watchDog; /** Property listener for testing. */ private TestPropertyListener testPropertyListener; /** Benefit listener for testing. */ private TestBenefitListener testBenefitListener; /** Property listener for testing purposes, keeps track of the last event. */ private class TestPropertyListener implements PropertyChangeListener, VetoableChangeListener { /** The last event. */ private PropertyChangeEvent lastEvent; /** * Gets the last event and clears it. * * @return the last event, null if an event has not arrived since last called. */ public PropertyChangeEvent lastEvent() { final PropertyChangeEvent tmp = lastEvent; lastEvent = null; return tmp; } /** * Simply records the event. * * @param event the change event */ @Override public void propertyChange(final PropertyChangeEvent event) { this.lastEvent = event; } /** * Simply records the event. * * @param event the change event */ @Override public void vetoableChange(final PropertyChangeEvent event) { this.lastEvent = event; } } /** Benefit listener for testing purposes, keeps track of benefit enrollments. */ private class TestBenefitListener implements BenefitListener { Consultant medicalEnrollConsultant = null; Consultant medicalCancelConsultant = null; Consultant dentalEnrollConsultant = null; Consultant dentalCancelConsultant = null; @Override public void medicalEnrollment(BenefitEvent evnt) { medicalEnrollConsultant = evnt.getConsultant(); } @Override public void medicalCancellation(BenefitEvent evnt) { medicalCancelConsultant = evnt.getConsultant(); } @Override public void dentalEnrollment(BenefitEvent evnt) { dentalEnrollConsultant = evnt.getConsultant(); } @Override public void dentalCancellation(BenefitEvent evnt) { dentalCancelConsultant = evnt.getConsultant(); } } /** * Initialize all the objects used for testing. */ @Before public void setUp() { // Create some Consultants consultantList = new ArrayList<StaffConsultant>(); staffConsultant = new StaffConsultant( new PersonalName("Coder", "Kalvin"), CODER_INITIAL_PAY_RATE, INITIAL_SICK_LEAVE_HOURS, INITIAL_VACATION_HOURS); consultantList.add(staffConsultant); consultantList.add(new StaffConsultant( new PersonalName("Architect", "Amber", "K."), ARCHITECT_INITIAL_PAY_RATE, INITIAL_SICK_LEAVE_HOURS, INITIAL_VACATION_HOURS)); consultantList.add(new StaffConsultant( new PersonalName("Tester", "Teddy", "B."), TESTER_INITIAL_PAY_RATE, INITIAL_SICK_LEAVE_HOURS, INITIAL_VACATION_HOURS)); consultantList.add(new StaffConsultant( new PersonalName("Engineer", "Ernie"), ENGINEER_INITIAL_PAY_RATE, INITIAL_SICK_LEAVE_HOURS, INITIAL_VACATION_HOURS)); // create the server hrServer = new HumanResourceManager(); watchDog = new Eeoc(); hrServer.addTerminationListener(watchDog); final CompensationManager compMgr = new CompensationManager(); final BenefitManager bm = new BenefitManager(); hrServer.addBenefitListener(bm); testBenefitListener = new TestBenefitListener(); hrServer.addBenefitListener(testBenefitListener); testPropertyListener = new TestPropertyListener(); for (StaffConsultant sc : consultantList) { sc.addVetoableChangeListener(compMgr); sc.addPayRateListener(compMgr); sc.addSickLeaveHoursListener(bm); sc.addVacationHoursListener(bm); sc.addPropertyChangeListener(testPropertyListener); } } /** Test the vetoable property (payRate) */ @Test public void testVeto() { hrServer.adjustPayRate(staffConsultant, VALID_RAISE); assertEquals(VALID_RAISE, staffConsultant.getPayRate()); PropertyChangeEvent event = testPropertyListener.lastEvent(); assertEquals(PAY_RATE_PROP, event.getPropertyName()); assertEquals(VALID_RAISE, event.getNewValue()); assertEquals(staffConsultant, event.getSource()); // this should be vetoed, so the pay rate won't be set hrServer.adjustPayRate(staffConsultant, INVALID_RAISE); assertEquals(VALID_RAISE, staffConsultant.getPayRate()); event = testPropertyListener.lastEvent(); assertNull(event); } /** Test the termination events */ @Test public void testTerminations() { assertEquals(0, watchDog.voluntaryTerminationCount()); assertEquals(0, watchDog.forcedTerminationCount()); // Terminate two employees final Iterator<StaffConsultant> iter = consultantList.iterator(); if (iter.hasNext()) { final StaffConsultant consultant = iter.next(); hrServer.acceptResignation(consultant); } assertEquals(1, watchDog.voluntaryTerminationCount()); if (iter.hasNext()) { final StaffConsultant consultant = iter.next(); hrServer.terminate(consultant); } assertEquals(1, watchDog.forcedTerminationCount()); } /** Test the simple properties (sickLeaveHours and vacationHours) */ @Test public void testBenefits() { hrServer.adjustSickLeaveHours(staffConsultant, TEST_SICK_LEAVE_HOURS); PropertyChangeEvent event = testPropertyListener.lastEvent(); assertEquals(SICK_LEAVE_HOURS_PROP, event.getPropertyName()); assertEquals(TEST_SICK_LEAVE_HOURS, event.getNewValue()); assertEquals(staffConsultant, event.getSource()); hrServer.adjustVacationHours(staffConsultant, TEST_VACATION_HOURS); event = testPropertyListener.lastEvent(); assertEquals(VACATION_HOURS_PROP, event.getPropertyName()); assertEquals(TEST_VACATION_HOURS, event.getNewValue()); assertEquals(staffConsultant, event.getSource()); hrServer.enrollMedical(consultantList.get(MEDICAL_ENROLLEE)); hrServer.enrollDental(consultantList.get(MEDICAL_CANCEL)); hrServer.cancelMedical(consultantList.get(DENTAL_ENROLLEE)); hrServer.cancelDental(consultantList.get(DENTAL_CANCEL)); assertEquals(consultantList.get(MEDICAL_ENROLLEE),testBenefitListener.medicalEnrollConsultant); assertEquals(consultantList.get(MEDICAL_CANCEL),testBenefitListener.dentalEnrollConsultant); assertEquals(consultantList.get(DENTAL_ENROLLEE),testBenefitListener.medicalCancelConsultant); assertEquals(consultantList.get(DENTAL_CANCEL),testBenefitListener.dentalCancelConsultant); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.as2.api; import java.io.IOException; import java.security.KeyPair; import java.security.KeyPairGenerator; import java.security.SecureRandom; import java.security.Security; import java.security.cert.Certificate; import java.security.cert.X509Certificate; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.camel.component.as2.api.entity.AS2DispositionModifier; import org.apache.camel.component.as2.api.entity.AS2DispositionType; import org.apache.camel.component.as2.api.entity.AS2MessageDispositionNotificationEntity; import org.apache.camel.component.as2.api.entity.ApplicationEDIEntity; import org.apache.camel.component.as2.api.entity.ApplicationEDIFACTEntity; import org.apache.camel.component.as2.api.entity.ApplicationPkcs7MimeEntity; import org.apache.camel.component.as2.api.entity.ApplicationPkcs7SignatureEntity; import org.apache.camel.component.as2.api.entity.DispositionMode; import org.apache.camel.component.as2.api.entity.DispositionNotificationMultipartReportEntity; import org.apache.camel.component.as2.api.entity.MimeEntity; import org.apache.camel.component.as2.api.entity.MultipartSignedEntity; import org.apache.camel.component.as2.api.entity.TextPlainEntity; import org.apache.camel.component.as2.api.util.AS2Utils; import org.apache.camel.component.as2.api.util.EntityUtils; import org.apache.camel.component.as2.api.util.HttpMessageUtils; import org.apache.camel.component.as2.api.util.MicUtils; import org.apache.camel.component.as2.api.util.MicUtils.ReceivedContentMic; import org.apache.camel.test.AvailablePortFinder; import org.apache.http.HttpEntity; import org.apache.http.HttpEntityEnclosingRequest; import org.apache.http.HttpException; import org.apache.http.HttpRequest; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.HttpVersion; import org.apache.http.entity.ContentType; import org.apache.http.impl.EnglishReasonPhraseCatalog; import org.apache.http.message.BasicHttpEntityEnclosingRequest; import org.apache.http.message.BasicHttpResponse; import org.apache.http.protocol.HttpContext; import org.apache.http.protocol.HttpCoreContext; import org.apache.http.protocol.HttpDateGenerator; import org.apache.http.protocol.HttpRequestHandler; import org.bouncycastle.asn1.ASN1EncodableVector; import org.bouncycastle.asn1.cms.AttributeTable; import org.bouncycastle.asn1.cms.IssuerAndSerialNumber; import org.bouncycastle.asn1.smime.SMIMECapabilitiesAttribute; import org.bouncycastle.asn1.smime.SMIMECapability; import org.bouncycastle.asn1.smime.SMIMECapabilityVector; import org.bouncycastle.asn1.smime.SMIMEEncryptionKeyPreferenceAttribute; import org.bouncycastle.asn1.x500.X500Name; import org.bouncycastle.cert.jcajce.JcaCertStore; import org.bouncycastle.cms.jcajce.JcaSimpleSignerInfoGeneratorBuilder; import org.bouncycastle.jce.provider.BouncyCastleProvider; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; public class AS2MessageTest { public static final String EDI_MESSAGE = "UNB+UNOA:1+005435656:1+006415160:1+060515:1434+00000000000778'\n" + "UNH+00000000000117+INVOIC:D:97B:UN'\n" + "BGM+380+342459+9'\n" + "DTM+3:20060515:102'\n" + "RFF+ON:521052'\n" + "NAD+BY+792820524::16++CUMMINS MID-RANGE ENGINE PLANT'\n" + "NAD+SE+005435656::16++GENERAL WIDGET COMPANY'\n" + "CUX+1:USD'\n" + "LIN+1++157870:IN'\n" + "IMD+F++:::WIDGET'\n" + "QTY+47:1020:EA'\n" + "ALI+US'\n" + "MOA+203:1202.58'\n" + "PRI+INV:1.179'\n" + "LIN+2++157871:IN'\n" + "IMD+F++:::DIFFERENT WIDGET'\n" + "QTY+47:20:EA'\n" + "ALI+JP'\n" + "MOA+203:410'\n" + "PRI+INV:20.5'\n" + "UNS+S'\n" + "MOA+39:2137.58'\n" + "ALC+C+ABG'\n" + "MOA+8:525'\n" + "UNT+23+00000000000117'\n" + "UNZ+1+00000000000778'"; private static final Logger LOG = LoggerFactory.getLogger(AS2MessageTest.class); private static final String METHOD = "POST"; private static final String TARGET_HOST = "localhost"; private static final int TARGET_PORT = AvailablePortFinder.getNextAvailable(8080); private static final String RECIPIENT_DELIVERY_ADDRESS = "http://localhost:" + TARGET_PORT + "/handle-receipts"; private static final String AS2_VERSION = "1.1"; private static final String USER_AGENT = "Camel AS2 Endpoint"; private static final String REQUEST_URI = "/"; private static final String AS2_NAME = "878051556"; private static final String SUBJECT = "Test Case"; private static final String FROM = "mrAS@example.org"; private static final String CLIENT_FQDN = "client.example.org"; private static final String SERVER_FQDN = "server.example.org"; private static final String REPORTING_UA = "Server Responding with MDN"; private static final String DISPOSITION_NOTIFICATION_TO = "mrAS@example.org"; private static final String DISPOSITION_NOTIFICATION_OPTIONS = "signed-receipt-protocol=optional,pkcs7-signature; signed-receipt-micalg=optional,sha1"; private static final String[] SIGNED_RECEIPT_MIC_ALGORITHMS = new String[] {"sha1", "md5"}; private static final HttpDateGenerator DATE_GENERATOR = new HttpDateGenerator(); private static AS2ServerConnection testServer; private AS2SignedDataGenerator gen; private KeyPair issueKP; private X509Certificate issueCert; private KeyPair signingKP; private X509Certificate signingCert; private List<X509Certificate> certList; private void setupKeysAndCertificates() throws Exception { // // set up our certificates // KeyPairGenerator kpg = KeyPairGenerator.getInstance("RSA", "BC"); kpg.initialize(1024, new SecureRandom()); String issueDN = "O=Punkhorn Software, C=US"; issueKP = kpg.generateKeyPair(); issueCert = Utils.makeCertificate(issueKP, issueDN, issueKP, issueDN); // // certificate we sign against // String signingDN = "CN=William J. Collins, E=punkhornsw@gmail.com, O=Punkhorn Software, C=US"; signingKP = kpg.generateKeyPair(); signingCert = Utils.makeCertificate(signingKP, signingDN, issueKP, issueDN); certList = new ArrayList<>(); certList.add(signingCert); certList.add(issueCert); } @BeforeClass public static void setUpOnce() throws Exception { Security.addProvider(new BouncyCastleProvider()); // // set up our certificates // KeyPairGenerator kpg = KeyPairGenerator.getInstance("RSA", "BC"); kpg.initialize(1024, new SecureRandom()); String issueDN = "O=Punkhorn Software, C=US"; KeyPair issueKP = kpg.generateKeyPair(); X509Certificate issueCert = Utils.makeCertificate(issueKP, issueDN, issueKP, issueDN); // // certificate we sign against // String signingDN = "CN=William J. Collins, E=punkhornsw@gmail.com, O=Punkhorn Software, C=US"; KeyPair signingKP = kpg.generateKeyPair(); X509Certificate signingCert = Utils.makeCertificate(signingKP, signingDN, issueKP, issueDN); List<X509Certificate> certList = new ArrayList<>(); certList.add(signingCert); certList.add(issueCert); testServer = new AS2ServerConnection(AS2_VERSION, "MyServer-HTTP/1.1", SERVER_FQDN, TARGET_PORT, AS2SignatureAlgorithm.SHA256WITHRSA, certList.toArray(new Certificate[0]), signingKP.getPrivate()); testServer.listen("*", new HttpRequestHandler() { @Override public void handle(HttpRequest request, HttpResponse response, HttpContext context) throws HttpException, IOException { try { org.apache.camel.component.as2.api.entity.EntityParser.parseAS2MessageEntity(request); context.setAttribute(AS2ServerManager.SUBJECT, SUBJECT); context.setAttribute(AS2ServerManager.FROM, AS2_NAME); LOG.debug(AS2Utils.printMessage(request)); } catch (Exception e) { throw new HttpException("Failed to parse AS2 Message Entity", e); } } }); } @AfterClass public static void tearDownOnce() throws Exception { testServer.close(); } @Before public void setUp() throws Exception { Security.addProvider(new BouncyCastleProvider()); setupKeysAndCertificates(); // Create and populate certificate store. JcaCertStore certs = new JcaCertStore(certList); // Create capabilities vector SMIMECapabilityVector capabilities = new SMIMECapabilityVector(); capabilities.addCapability(SMIMECapability.dES_EDE3_CBC); capabilities.addCapability(SMIMECapability.rC2_CBC, 128); capabilities.addCapability(SMIMECapability.dES_CBC); // Create signing attributes ASN1EncodableVector attributes = new ASN1EncodableVector(); attributes.add(new SMIMEEncryptionKeyPreferenceAttribute(new IssuerAndSerialNumber( new X500Name(signingCert.getIssuerDN().getName()), signingCert.getSerialNumber()))); attributes.add(new SMIMECapabilitiesAttribute(capabilities)); for (String signingAlgorithmName : AS2SignedDataGenerator .getSupportedSignatureAlgorithmNamesForKey(signingKP.getPrivate())) { try { this.gen = new AS2SignedDataGenerator(); this.gen.addSignerInfoGenerator(new JcaSimpleSignerInfoGeneratorBuilder().setProvider("BC") .setSignedAttributeGenerator(new AttributeTable(attributes)) .build(signingAlgorithmName, signingKP.getPrivate(), signingCert)); this.gen.addCertificates(certs); break; } catch (Exception e) { this.gen = null; continue; } } if (this.gen == null) { throw new Exception("failed to create signing generator"); } } @Test public void plainEDIMessageTest() throws Exception { AS2ClientConnection clientConnection = new AS2ClientConnection(AS2_VERSION, USER_AGENT, CLIENT_FQDN, TARGET_HOST, TARGET_PORT); AS2ClientManager clientManager = new AS2ClientManager(clientConnection); HttpCoreContext httpContext = clientManager.send(EDI_MESSAGE, REQUEST_URI, SUBJECT, FROM, AS2_NAME, AS2_NAME, AS2MessageStructure.PLAIN, ContentType.create(AS2MediaType.APPLICATION_EDIFACT, AS2Charset.US_ASCII), null, null, null, null, DISPOSITION_NOTIFICATION_TO, SIGNED_RECEIPT_MIC_ALGORITHMS, null, null, null); HttpRequest request = httpContext.getRequest(); assertEquals("Unexpected method value", METHOD, request.getRequestLine().getMethod()); assertEquals("Unexpected request URI value", REQUEST_URI, request.getRequestLine().getUri()); assertEquals("Unexpected HTTP version value", HttpVersion.HTTP_1_1, request.getRequestLine().getProtocolVersion()); assertEquals("Unexpected subject value", SUBJECT, request.getFirstHeader(AS2Header.SUBJECT).getValue()); assertEquals("Unexpected from value", FROM, request.getFirstHeader(AS2Header.FROM).getValue()); assertEquals("Unexpected AS2 version value", AS2_VERSION, request.getFirstHeader(AS2Header.AS2_VERSION).getValue()); assertEquals("Unexpected AS2 from value", AS2_NAME, request.getFirstHeader(AS2Header.AS2_FROM).getValue()); assertEquals("Unexpected AS2 to value", AS2_NAME, request.getFirstHeader(AS2Header.AS2_TO).getValue()); assertTrue("Unexpected message id value", request.getFirstHeader(AS2Header.MESSAGE_ID).getValue().endsWith(CLIENT_FQDN + ">")); assertEquals("Unexpected target host value", TARGET_HOST + ":" + TARGET_PORT, request.getFirstHeader(AS2Header.TARGET_HOST).getValue()); assertEquals("Unexpected user agent value", USER_AGENT, request.getFirstHeader(AS2Header.USER_AGENT).getValue()); assertNotNull("Date value missing", request.getFirstHeader(AS2Header.DATE)); assertNotNull("Content length value missing", request.getFirstHeader(AS2Header.CONTENT_LENGTH)); assertTrue("Unexpected content type for message", request.getFirstHeader(AS2Header.CONTENT_TYPE).getValue().startsWith(AS2MediaType.APPLICATION_EDIFACT)); assertTrue("Request does not contain entity", request instanceof BasicHttpEntityEnclosingRequest); HttpEntity entity = ((BasicHttpEntityEnclosingRequest) request).getEntity(); assertNotNull("Request does not contain entity", entity); assertTrue("Unexpected request entity type", entity instanceof ApplicationEDIFACTEntity); ApplicationEDIFACTEntity ediEntity = (ApplicationEDIFACTEntity) entity; assertTrue("Unexpected content type for entity", ediEntity.getContentType().getValue().startsWith(AS2MediaType.APPLICATION_EDIFACT)); assertTrue("Entity not set as main body of request", ediEntity.isMainBody()); } @Test public void multipartSignedMessageTest() throws Exception { AS2ClientConnection clientConnection = new AS2ClientConnection(AS2_VERSION, USER_AGENT, CLIENT_FQDN, TARGET_HOST, TARGET_PORT); AS2ClientManager clientManager = new AS2ClientManager(clientConnection); HttpCoreContext httpContext = clientManager.send(EDI_MESSAGE, REQUEST_URI, SUBJECT, FROM, AS2_NAME, AS2_NAME, AS2MessageStructure.SIGNED, ContentType.create(AS2MediaType.APPLICATION_EDIFACT, AS2Charset.US_ASCII), null, AS2SignatureAlgorithm.SHA256WITHRSA, certList.toArray(new Certificate[0]), signingKP.getPrivate(), DISPOSITION_NOTIFICATION_TO, SIGNED_RECEIPT_MIC_ALGORITHMS, null, null, null); HttpRequest request = httpContext.getRequest(); assertEquals("Unexpected method value", METHOD, request.getRequestLine().getMethod()); assertEquals("Unexpected request URI value", REQUEST_URI, request.getRequestLine().getUri()); assertEquals("Unexpected HTTP version value", HttpVersion.HTTP_1_1, request.getRequestLine().getProtocolVersion()); assertEquals("Unexpected subject value", SUBJECT, request.getFirstHeader(AS2Header.SUBJECT).getValue()); assertEquals("Unexpected from value", FROM, request.getFirstHeader(AS2Header.FROM).getValue()); assertEquals("Unexpected AS2 version value", AS2_VERSION, request.getFirstHeader(AS2Header.AS2_VERSION).getValue()); assertEquals("Unexpected AS2 from value", AS2_NAME, request.getFirstHeader(AS2Header.AS2_FROM).getValue()); assertEquals("Unexpected AS2 to value", AS2_NAME, request.getFirstHeader(AS2Header.AS2_TO).getValue()); assertTrue("Unexpected message id value", request.getFirstHeader(AS2Header.MESSAGE_ID).getValue().endsWith(CLIENT_FQDN + ">")); assertEquals("Unexpected target host value", TARGET_HOST + ":" + TARGET_PORT, request.getFirstHeader(AS2Header.TARGET_HOST).getValue()); assertEquals("Unexpected user agent value", USER_AGENT, request.getFirstHeader(AS2Header.USER_AGENT).getValue()); assertNotNull("Date value missing", request.getFirstHeader(AS2Header.DATE)); assertNotNull("Content length value missing", request.getFirstHeader(AS2Header.CONTENT_LENGTH)); assertTrue("Unexpected content type for message", request.getFirstHeader(AS2Header.CONTENT_TYPE).getValue().startsWith(AS2MediaType.MULTIPART_SIGNED)); assertTrue("Request does not contain entity", request instanceof BasicHttpEntityEnclosingRequest); HttpEntity entity = ((BasicHttpEntityEnclosingRequest) request).getEntity(); assertNotNull("Request does not contain entity", entity); assertTrue("Unexpected request entity type", entity instanceof MultipartSignedEntity); MultipartSignedEntity signedEntity = (MultipartSignedEntity) entity; assertTrue("Entity not set as main body of request", signedEntity.isMainBody()); assertTrue("Request contains invalid number of mime parts", signedEntity.getPartCount() == 2); // Validated first mime part. assertTrue("First mime part incorrect type ", signedEntity.getPart(0) instanceof ApplicationEDIFACTEntity); ApplicationEDIFACTEntity ediEntity = (ApplicationEDIFACTEntity) signedEntity.getPart(0); assertTrue("Unexpected content type for first mime part", ediEntity.getContentType().getValue().startsWith(AS2MediaType.APPLICATION_EDIFACT)); assertFalse("First mime type set as main body of request", ediEntity.isMainBody()); // Validate second mime part. assertTrue("Second mime part incorrect type ", signedEntity.getPart(1) instanceof ApplicationPkcs7SignatureEntity); ApplicationPkcs7SignatureEntity signatureEntity = (ApplicationPkcs7SignatureEntity) signedEntity.getPart(1); assertTrue("Unexpected content type for second mime part", signatureEntity.getContentType().getValue().startsWith(AS2MediaType.APPLICATION_PKCS7_SIGNATURE)); assertFalse("First mime type set as main body of request", signatureEntity.isMainBody()); } @Test public void aes128CbcEnvelopedMessageTest() throws Exception { envelopedMessageTest(AS2EncryptionAlgorithm.AES128_CBC); } @Test public void aes192CbcEnvelopedMessageTest() throws Exception { envelopedMessageTest(AS2EncryptionAlgorithm.AES192_CBC); } @Test public void aes256CbcEnvelopedMessageTest() throws Exception { envelopedMessageTest(AS2EncryptionAlgorithm.AES256_CBC); } @Test public void aes128CcmEnvelopedMessageTest() throws Exception { envelopedMessageTest(AS2EncryptionAlgorithm.AES128_CCM); } @Test public void aes192CcmEnvelopedMessageTest() throws Exception { envelopedMessageTest(AS2EncryptionAlgorithm.AES192_CCM); } @Test public void aes256CcmEnvelopedMessageTest() throws Exception { envelopedMessageTest(AS2EncryptionAlgorithm.AES256_CCM); } @Test public void aes128GcmEnvelopedMessageTest() throws Exception { envelopedMessageTest(AS2EncryptionAlgorithm.AES128_GCM); } @Test public void aes192GcmEnvelopedMessageTest() throws Exception { envelopedMessageTest(AS2EncryptionAlgorithm.AES192_GCM); } @Test public void aes256GcmEnvelopedMessageTest() throws Exception { envelopedMessageTest(AS2EncryptionAlgorithm.AES256_GCM); } @Test public void camellia128CbcEnvelopedMessageTest() throws Exception { envelopedMessageTest(AS2EncryptionAlgorithm.CAMELLIA128_CBC); } @Test public void camellia192CbcEnvelopedMessageTest() throws Exception { envelopedMessageTest(AS2EncryptionAlgorithm.CAMELLIA192_CBC); } @Test public void camellia256CbcEnvelopedMessageTest() throws Exception { envelopedMessageTest(AS2EncryptionAlgorithm.CAMELLIA256_CBC); } @Test public void cast5CbcEnvelopedMessageTest() throws Exception { envelopedMessageTest(AS2EncryptionAlgorithm.CAST5_CBC); } @Test public void desCbcEnvelopedMessageTest() throws Exception { envelopedMessageTest(AS2EncryptionAlgorithm.DES_CBC); } @Test public void desEde3CbcEnvelopedMessageTest() throws Exception { envelopedMessageTest(AS2EncryptionAlgorithm.DES_EDE3_CBC); } @Test public void cost28147GcfbEnvelopedMessageTest() throws Exception { envelopedMessageTest(AS2EncryptionAlgorithm.GOST28147_GCFB); } @Test public void ideaCbcEnvelopedMessageTest() throws Exception { envelopedMessageTest(AS2EncryptionAlgorithm.IDEA_CBC); } @Test public void rc2CbcEnvelopedMessageTest() throws Exception { envelopedMessageTest(AS2EncryptionAlgorithm.RC2_CBC); } @Test public void rc4EnvelopedMessageTest() throws Exception { envelopedMessageTest(AS2EncryptionAlgorithm.RC4); } @Test public void seedCbcEnvelopedMessageTest() throws Exception { envelopedMessageTest(AS2EncryptionAlgorithm.SEED_CBC); } public void envelopedMessageTest(AS2EncryptionAlgorithm encryptionAlgorithm) throws Exception { AS2ClientConnection clientConnection = new AS2ClientConnection(AS2_VERSION, USER_AGENT, CLIENT_FQDN, TARGET_HOST, TARGET_PORT); AS2ClientManager clientManager = new AS2ClientManager(clientConnection); LOG.info("Key Algoritm: " + signingKP.getPrivate().getAlgorithm()); HttpCoreContext httpContext = clientManager.send(EDI_MESSAGE, REQUEST_URI, SUBJECT, FROM, AS2_NAME, AS2_NAME, AS2MessageStructure.ENCRYPTED, ContentType.create(AS2MediaType.APPLICATION_EDIFACT, AS2Charset.US_ASCII), null, AS2SignatureAlgorithm.SHA256WITHRSA, certList.toArray(new Certificate[0]), signingKP.getPrivate(), DISPOSITION_NOTIFICATION_TO, SIGNED_RECEIPT_MIC_ALGORITHMS, encryptionAlgorithm, certList.toArray(new Certificate[0]), signingKP.getPrivate()); HttpRequest request = httpContext.getRequest(); assertEquals("Unexpected method value", METHOD, request.getRequestLine().getMethod()); assertEquals("Unexpected request URI value", REQUEST_URI, request.getRequestLine().getUri()); assertEquals("Unexpected HTTP version value", HttpVersion.HTTP_1_1, request.getRequestLine().getProtocolVersion()); assertEquals("Unexpected subject value", SUBJECT, request.getFirstHeader(AS2Header.SUBJECT).getValue()); assertEquals("Unexpected from value", FROM, request.getFirstHeader(AS2Header.FROM).getValue()); assertEquals("Unexpected AS2 version value", AS2_VERSION, request.getFirstHeader(AS2Header.AS2_VERSION).getValue()); assertEquals("Unexpected AS2 from value", AS2_NAME, request.getFirstHeader(AS2Header.AS2_FROM).getValue()); assertEquals("Unexpected AS2 to value", AS2_NAME, request.getFirstHeader(AS2Header.AS2_TO).getValue()); assertTrue("Unexpected message id value", request.getFirstHeader(AS2Header.MESSAGE_ID).getValue().endsWith(CLIENT_FQDN + ">")); assertEquals("Unexpected target host value", TARGET_HOST + ":" + TARGET_PORT, request.getFirstHeader(AS2Header.TARGET_HOST).getValue()); assertEquals("Unexpected user agent value", USER_AGENT, request.getFirstHeader(AS2Header.USER_AGENT).getValue()); assertNotNull("Date value missing", request.getFirstHeader(AS2Header.DATE)); assertNotNull("Content length value missing", request.getFirstHeader(AS2Header.CONTENT_LENGTH)); assertTrue("Unexpected content type for message", request.getFirstHeader(AS2Header.CONTENT_TYPE).getValue().startsWith(AS2MimeType.APPLICATION_PKCS7_MIME)); assertTrue("Request does not contain entity", request instanceof BasicHttpEntityEnclosingRequest); HttpEntity entity = ((BasicHttpEntityEnclosingRequest) request).getEntity(); assertNotNull("Request does not contain entity", entity); assertTrue("Unexpected request entity type", entity instanceof ApplicationPkcs7MimeEntity); ApplicationPkcs7MimeEntity envelopedEntity = (ApplicationPkcs7MimeEntity) entity; assertTrue("Entity not set as main body of request", envelopedEntity.isMainBody()); // Validated enveloped part. MimeEntity encryptedEntity = envelopedEntity.getEncryptedEntity(signingKP.getPrivate()); assertTrue("Enveloped mime part incorrect type ", encryptedEntity instanceof ApplicationEDIFACTEntity); ApplicationEDIFACTEntity ediEntity = (ApplicationEDIFACTEntity) encryptedEntity; assertTrue("Unexpected content type for enveloped mime part", ediEntity.getContentType().getValue().startsWith(AS2MediaType.APPLICATION_EDIFACT)); assertFalse("Enveloped mime type set as main body of request", ediEntity.isMainBody()); assertEquals("Unexpected content for enveloped mime part", EDI_MESSAGE.replaceAll("[\n\r]", ""), ediEntity.getEdiMessage().replaceAll("[\n\r]", "")); } @Test public void aes128CbcEnvelopedAndSignedMessageTest() throws Exception { envelopedAndSignedMessageTest(AS2EncryptionAlgorithm.AES128_CBC); } public void envelopedAndSignedMessageTest(AS2EncryptionAlgorithm encryptionAlgorithm) throws Exception { AS2ClientConnection clientConnection = new AS2ClientConnection(AS2_VERSION, USER_AGENT, CLIENT_FQDN, TARGET_HOST, TARGET_PORT); AS2ClientManager clientManager = new AS2ClientManager(clientConnection); LOG.info("Key Algoritm: " + signingKP.getPrivate().getAlgorithm()); HttpCoreContext httpContext = clientManager.send(EDI_MESSAGE, REQUEST_URI, SUBJECT, FROM, AS2_NAME, AS2_NAME, AS2MessageStructure.ENCRYPTED_SIGNED, ContentType.create(AS2MediaType.APPLICATION_EDIFACT, AS2Charset.US_ASCII), null, AS2SignatureAlgorithm.SHA256WITHRSA, certList.toArray(new Certificate[0]), signingKP.getPrivate(), DISPOSITION_NOTIFICATION_TO, SIGNED_RECEIPT_MIC_ALGORITHMS, encryptionAlgorithm, certList.toArray(new Certificate[0]), signingKP.getPrivate()); HttpRequest request = httpContext.getRequest(); assertEquals("Unexpected method value", METHOD, request.getRequestLine().getMethod()); assertEquals("Unexpected request URI value", REQUEST_URI, request.getRequestLine().getUri()); assertEquals("Unexpected HTTP version value", HttpVersion.HTTP_1_1, request.getRequestLine().getProtocolVersion()); assertEquals("Unexpected subject value", SUBJECT, request.getFirstHeader(AS2Header.SUBJECT).getValue()); assertEquals("Unexpected from value", FROM, request.getFirstHeader(AS2Header.FROM).getValue()); assertEquals("Unexpected AS2 version value", AS2_VERSION, request.getFirstHeader(AS2Header.AS2_VERSION).getValue()); assertEquals("Unexpected AS2 from value", AS2_NAME, request.getFirstHeader(AS2Header.AS2_FROM).getValue()); assertEquals("Unexpected AS2 to value", AS2_NAME, request.getFirstHeader(AS2Header.AS2_TO).getValue()); assertTrue("Unexpected message id value", request.getFirstHeader(AS2Header.MESSAGE_ID).getValue().endsWith(CLIENT_FQDN + ">")); assertEquals("Unexpected target host value", TARGET_HOST + ":" + TARGET_PORT, request.getFirstHeader(AS2Header.TARGET_HOST).getValue()); assertEquals("Unexpected user agent value", USER_AGENT, request.getFirstHeader(AS2Header.USER_AGENT).getValue()); assertNotNull("Date value missing", request.getFirstHeader(AS2Header.DATE)); assertNotNull("Content length value missing", request.getFirstHeader(AS2Header.CONTENT_LENGTH)); assertTrue("Unexpected content type for message", request.getFirstHeader(AS2Header.CONTENT_TYPE).getValue().startsWith(AS2MimeType.APPLICATION_PKCS7_MIME)); assertTrue("Request does not contain entity", request instanceof BasicHttpEntityEnclosingRequest); HttpEntity entity = ((BasicHttpEntityEnclosingRequest) request).getEntity(); assertNotNull("Request does not contain entity", entity); assertTrue("Unexpected request entity type", entity instanceof ApplicationPkcs7MimeEntity); ApplicationPkcs7MimeEntity envelopedEntity = (ApplicationPkcs7MimeEntity) entity; assertTrue("Entity not set as main body of request", envelopedEntity.isMainBody()); // Validated enveloped part. MimeEntity encryptedEntity = envelopedEntity.getEncryptedEntity(signingKP.getPrivate()); assertTrue("Enveloped mime part incorrect type ", encryptedEntity instanceof MultipartSignedEntity); MultipartSignedEntity multipartSignedEntity = (MultipartSignedEntity) encryptedEntity; assertTrue("Unexpected content type for enveloped mime part", multipartSignedEntity.getContentType().getValue().startsWith(AS2MediaType.MULTIPART_SIGNED)); assertFalse("Enveloped mime type set as main body of request", multipartSignedEntity.isMainBody()); assertTrue("Request contains invalid number of mime parts", multipartSignedEntity.getPartCount() == 2); // Validated first mime part. assertTrue("First mime part incorrect type ", multipartSignedEntity.getPart(0) instanceof ApplicationEDIFACTEntity); ApplicationEDIFACTEntity ediEntity = (ApplicationEDIFACTEntity) multipartSignedEntity.getPart(0); assertTrue("Unexpected content type for first mime part", ediEntity.getContentType().getValue().startsWith(AS2MediaType.APPLICATION_EDIFACT)); assertFalse("First mime type set as main body of request", ediEntity.isMainBody()); // Validate second mime part. assertTrue("Second mime part incorrect type ", multipartSignedEntity.getPart(1) instanceof ApplicationPkcs7SignatureEntity); ApplicationPkcs7SignatureEntity signatureEntity = (ApplicationPkcs7SignatureEntity) multipartSignedEntity.getPart(1); assertTrue("Unexpected content type for second mime part", signatureEntity.getContentType().getValue().startsWith(AS2MediaType.APPLICATION_PKCS7_SIGNATURE)); assertFalse("First mime type set as main body of request", signatureEntity.isMainBody()); } @Test public void signatureVerificationTest() throws Exception { AS2ClientConnection clientConnection = new AS2ClientConnection(AS2_VERSION, USER_AGENT, CLIENT_FQDN, TARGET_HOST, TARGET_PORT); AS2ClientManager clientManager = new AS2ClientManager(clientConnection); HttpCoreContext httpContext = clientManager.send(EDI_MESSAGE, REQUEST_URI, SUBJECT, FROM, AS2_NAME, AS2_NAME, AS2MessageStructure.SIGNED, ContentType.create(AS2MediaType.APPLICATION_EDIFACT, AS2Charset.US_ASCII), null, AS2SignatureAlgorithm.SHA256WITHRSA, certList.toArray(new Certificate[0]), signingKP.getPrivate(), DISPOSITION_NOTIFICATION_TO, SIGNED_RECEIPT_MIC_ALGORITHMS, null, null, null); HttpRequest request = httpContext.getRequest(); assertTrue("Request does not contain entity", request instanceof BasicHttpEntityEnclosingRequest); HttpEntity entity = ((BasicHttpEntityEnclosingRequest) request).getEntity(); assertNotNull("Request does not contain entity", entity); assertTrue("Unexpected request entity type", entity instanceof MultipartSignedEntity); MultipartSignedEntity multipartSignedEntity = (MultipartSignedEntity) entity; MimeEntity signedEntity = multipartSignedEntity.getSignedDataEntity(); assertTrue("Signed entity wrong type", signedEntity instanceof ApplicationEDIEntity); ApplicationEDIEntity ediMessageEntity = (ApplicationEDIEntity) signedEntity; assertNotNull("Multipart signed entity does not contain EDI message entity", ediMessageEntity); ApplicationPkcs7SignatureEntity signatureEntity = multipartSignedEntity.getSignatureEntity(); assertNotNull("Multipart signed entity does not contain signature entity", signatureEntity); // Validate Signature assertTrue("Signature is invalid", multipartSignedEntity.isValid()); } @Test public void mdnMessageTest() throws Exception { AS2ClientConnection clientConnection = new AS2ClientConnection(AS2_VERSION, USER_AGENT, CLIENT_FQDN, TARGET_HOST, TARGET_PORT); AS2ClientManager clientManager = new AS2ClientManager(clientConnection); HttpCoreContext httpContext = clientManager.send(EDI_MESSAGE, REQUEST_URI, SUBJECT, FROM, AS2_NAME, AS2_NAME, AS2MessageStructure.PLAIN, ContentType.create(AS2MediaType.APPLICATION_EDIFACT, AS2Charset.US_ASCII), null, null, null, null, DISPOSITION_NOTIFICATION_TO, SIGNED_RECEIPT_MIC_ALGORITHMS, null, null, null); HttpResponse response = httpContext.getResponse(); assertEquals("Unexpected method value", HttpVersion.HTTP_1_1, response.getStatusLine().getProtocolVersion()); assertEquals("Unexpected method value", HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); assertEquals("Unexpected method value", EnglishReasonPhraseCatalog.INSTANCE.getReason(200, null), response.getStatusLine().getReasonPhrase()); HttpEntity responseEntity = response.getEntity(); assertNotNull("Response entity", responseEntity); assertTrue("Unexpected response entity type", responseEntity instanceof MultipartSignedEntity); MultipartSignedEntity responseSignedEntity = (MultipartSignedEntity) responseEntity; MimeEntity responseSignedDataEntity = responseSignedEntity.getSignedDataEntity(); assertTrue("Signed entity wrong type", responseSignedDataEntity instanceof DispositionNotificationMultipartReportEntity); DispositionNotificationMultipartReportEntity reportEntity = (DispositionNotificationMultipartReportEntity) responseSignedDataEntity; assertEquals("Unexpected number of body parts in report", 2, reportEntity.getPartCount()); MimeEntity firstPart = reportEntity.getPart(0); assertEquals("Unexpected content type in first body part of report", ContentType.create(AS2MimeType.TEXT_PLAIN, AS2Charset.US_ASCII).toString(), firstPart.getContentTypeValue()); MimeEntity secondPart = reportEntity.getPart(1); assertEquals("Unexpected content type in second body part of report", ContentType.create(AS2MimeType.MESSAGE_DISPOSITION_NOTIFICATION, AS2Charset.US_ASCII).toString(), secondPart.getContentTypeValue()); ApplicationPkcs7SignatureEntity signatureEntity = responseSignedEntity.getSignatureEntity(); assertNotNull("Signature Entity", signatureEntity); // Validate Signature assertTrue("Signature is invalid", responseSignedEntity.isValid()); } @Test public void asynchronousMdnMessageTest() throws Exception { AS2AsynchronousMDNManager mdnManager = new AS2AsynchronousMDNManager(AS2_VERSION, USER_AGENT, CLIENT_FQDN, certList.toArray(new X509Certificate[0]), signingKP.getPrivate()); // Create plain edi request message to acknowledge ApplicationEDIEntity ediEntity = EntityUtils.createEDIEntity(EDI_MESSAGE, ContentType.create(AS2MediaType.APPLICATION_EDIFACT, AS2Charset.US_ASCII), null, false); HttpEntityEnclosingRequest request = new BasicHttpEntityEnclosingRequest("POST", REQUEST_URI); HttpMessageUtils.setHeaderValue(request, AS2Header.SUBJECT, SUBJECT); String httpdate = DATE_GENERATOR.getCurrentDate(); HttpMessageUtils.setHeaderValue(request, AS2Header.DATE, httpdate); HttpMessageUtils.setHeaderValue(request, AS2Header.AS2_TO, AS2_NAME); HttpMessageUtils.setHeaderValue(request, AS2Header.AS2_FROM, AS2_NAME); String originalMessageId = AS2Utils.createMessageId(SERVER_FQDN); HttpMessageUtils.setHeaderValue(request, AS2Header.MESSAGE_ID, originalMessageId); HttpMessageUtils.setHeaderValue(request, AS2Header.DISPOSITION_NOTIFICATION_OPTIONS, DISPOSITION_NOTIFICATION_OPTIONS); EntityUtils.setMessageEntity(request, ediEntity); // Create response for MDN creation. HttpResponse response = new BasicHttpResponse(HttpVersion.HTTP_1_1, 200, "OK"); httpdate = DATE_GENERATOR.getCurrentDate(); response.setHeader(AS2Header.DATE, httpdate); response.setHeader(AS2Header.SERVER, REPORTING_UA); // Create a receipt for edi message Map<String, String> extensionFields = new HashMap<>(); extensionFields.put("Original-Recipient", "rfc822;" + AS2_NAME); AS2DispositionModifier dispositionModifier = AS2DispositionModifier.createWarning("AS2 is cool!"); String[] failureFields = new String[] {"failure-field-1"}; String[] errorFields = new String[] {"error-field-1"}; String[] warningFields = new String[] {"warning-field-1"}; DispositionNotificationMultipartReportEntity mdn = new DispositionNotificationMultipartReportEntity(request, response, DispositionMode.AUTOMATIC_ACTION_MDN_SENT_AUTOMATICALLY, AS2DispositionType.PROCESSED, dispositionModifier, failureFields, errorFields, warningFields, extensionFields, null, "boundary", true); // Send MDN HttpCoreContext httpContext = mdnManager.send(mdn, RECIPIENT_DELIVERY_ADDRESS); HttpRequest mndRequest = httpContext.getRequest(); DispositionNotificationMultipartReportEntity reportEntity = HttpMessageUtils.getEntity(mndRequest, DispositionNotificationMultipartReportEntity.class); assertNotNull("Request does not contain resport", reportEntity); assertEquals("Report entity contains invalid number of parts", 2, reportEntity.getPartCount()); assertTrue("Report first part is not text entity", reportEntity.getPart(0) instanceof TextPlainEntity); assertTrue("Report second part is not MDN entity", reportEntity.getPart(1) instanceof AS2MessageDispositionNotificationEntity); AS2MessageDispositionNotificationEntity mdnEntity = (AS2MessageDispositionNotificationEntity) reportEntity .getPart(1); assertEquals("Unexpected value for Reporting UA", REPORTING_UA, mdnEntity.getReportingUA()); assertEquals("Unexpected value for Final Recipient", AS2_NAME, mdnEntity.getFinalRecipient()); assertEquals("Unexpected value for Original Message ID", originalMessageId, mdnEntity.getOriginalMessageId()); assertEquals("Unexpected value for Disposition Mode", DispositionMode.AUTOMATIC_ACTION_MDN_SENT_AUTOMATICALLY, mdnEntity.getDispositionMode()); assertEquals("Unexpected value for Disposition Type", AS2DispositionType.PROCESSED, mdnEntity.getDispositionType()); assertEquals("Unexpected value for Disposition Modifier", dispositionModifier, mdnEntity.getDispositionModifier()); assertArrayEquals("Unexpected value for Failure Fields", failureFields, mdnEntity.getFailureFields()); assertArrayEquals("Unexpected value for Error Fields", errorFields, mdnEntity.getErrorFields()); assertArrayEquals("Unexpected value for Warning Fields", warningFields, mdnEntity.getWarningFields()); assertEquals("Unexpected value for Extension Fields", extensionFields, mdnEntity.getExtensionFields()); ReceivedContentMic expectedMic = MicUtils.createReceivedContentMic(request); ReceivedContentMic mdnMic = mdnEntity.getReceivedContentMic(); assertEquals("Unexpected value for Recieved Content Mic", expectedMic.getEncodedMessageDigest(), mdnMic.getEncodedMessageDigest()); LOG.debug("\r\n" + AS2Utils.printMessage(mndRequest)); } }
package com.fsck.k9.ui.messageview; import android.app.Activity; import android.app.DialogFragment; import android.app.Fragment; import android.app.FragmentManager; import android.app.LoaderManager; import android.app.LoaderManager.LoaderCallbacks; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.content.IntentSender; import android.content.Loader; import android.net.Uri; import android.os.Bundle; import android.os.Handler; import android.text.TextUtils; import android.util.Log; import android.view.ContextThemeWrapper; import android.view.KeyEvent; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.Toast; import com.fsck.k9.Account; import com.fsck.k9.K9; import com.fsck.k9.Preferences; import com.fsck.k9.R; import com.fsck.k9.activity.ChooseFolder; import com.fsck.k9.activity.MessageReference; import com.fsck.k9.controller.MessagingController; import com.fsck.k9.controller.MessagingListener; import com.fsck.k9.crypto.PgpData; import com.fsck.k9.fragment.ConfirmationDialogFragment; import com.fsck.k9.fragment.ConfirmationDialogFragment.ConfirmationDialogFragmentListener; import com.fsck.k9.fragment.ProgressDialogFragment; import com.fsck.k9.helper.FileBrowserHelper; import com.fsck.k9.helper.FileBrowserHelper.FileBrowserFailOverCallback; import com.fsck.k9.mail.Flag; import com.fsck.k9.mail.MessagingException; import com.fsck.k9.mailstore.AttachmentViewInfo; import com.fsck.k9.mailstore.LocalMessage; import com.fsck.k9.mailstore.MessageViewInfo; import com.fsck.k9.ui.crypto.MessageCryptoAnnotations; import com.fsck.k9.ui.crypto.MessageCryptoCallback; import com.fsck.k9.ui.crypto.MessageCryptoHelper; import com.fsck.k9.ui.message.DecodeMessageLoader; import com.fsck.k9.ui.message.LocalMessageLoader; import com.fsck.k9.view.MessageHeader; import java.util.Collections; import java.util.Locale; public class MessageViewFragment extends Fragment implements ConfirmationDialogFragmentListener, AttachmentViewCallback, OpenPgpHeaderViewCallback, MessageCryptoCallback { private static final String ARG_REFERENCE = "reference"; private static final String STATE_MESSAGE_REFERENCE = "reference"; private static final String STATE_PGP_DATA = "pgpData"; private static final int ACTIVITY_CHOOSE_FOLDER_MOVE = 1; private static final int ACTIVITY_CHOOSE_FOLDER_COPY = 2; private static final int ACTIVITY_CHOOSE_DIRECTORY = 3; private static final int LOCAL_MESSAGE_LOADER_ID = 1; private static final int DECODE_MESSAGE_LOADER_ID = 2; public static MessageViewFragment newInstance(MessageReference reference) { MessageViewFragment fragment = new MessageViewFragment(); Bundle args = new Bundle(); args.putParcelable(ARG_REFERENCE, reference); fragment.setArguments(args); return fragment; } private MessageTopView mMessageView; private PgpData mPgpData; private Account mAccount; private MessageReference mMessageReference; private LocalMessage mMessage; private MessageCryptoAnnotations messageAnnotations; private MessagingController mController; private Handler handler = new Handler(); private DownloadMessageListener downloadMessageListener = new DownloadMessageListener(); private MessageCryptoHelper messageCryptoHelper; /** * Used to temporarily store the destination folder for refile operations if a confirmation * dialog is shown. */ private String mDstFolder; private MessageViewFragmentListener mFragmentListener; /** * {@code true} after {@link #onCreate(Bundle)} has been executed. This is used by * {@code MessageList.configureMenu()} to make sure the fragment has been initialized before * it is used. */ private boolean mInitialized = false; private Context mContext; private LoaderCallbacks<LocalMessage> localMessageLoaderCallback = new LocalMessageLoaderCallback(); private LoaderCallbacks<MessageViewInfo> decodeMessageLoaderCallback = new DecodeMessageLoaderCallback(); private MessageViewInfo messageViewInfo; private AttachmentViewInfo currentAttachmentViewInfo; @Override public void onAttach(Activity activity) { super.onAttach(activity); mContext = activity.getApplicationContext(); try { mFragmentListener = (MessageViewFragmentListener) activity; } catch (ClassCastException e) { throw new ClassCastException(activity.getClass() + " must implement MessageViewFragmentListener"); } } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // This fragments adds options to the action bar setHasOptionsMenu(true); mController = MessagingController.getInstance(getActivity().getApplication()); mInitialized = true; } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { Context context = new ContextThemeWrapper(inflater.getContext(), K9.getK9ThemeResourceId(K9.getK9MessageViewTheme())); LayoutInflater layoutInflater = LayoutInflater.from(context); View view = layoutInflater.inflate(R.layout.message, container, false); mMessageView = (MessageTopView) view.findViewById(R.id.message_view); mMessageView.setAttachmentCallback(this); mMessageView.setOpenPgpHeaderViewCallback(this); mMessageView.setOnToggleFlagClickListener(new OnClickListener() { @Override public void onClick(View v) { onToggleFlagged(); } }); mMessageView.setOnDownloadButtonClickListener(new OnClickListener() { @Override public void onClick(View v) { onDownloadRemainder(); } }); mFragmentListener.messageHeaderViewAvailable(mMessageView.getMessageHeaderView()); return view; } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); MessageReference messageReference; if (savedInstanceState != null) { mPgpData = (PgpData) savedInstanceState.get(STATE_PGP_DATA); messageReference = (MessageReference) savedInstanceState.get(STATE_MESSAGE_REFERENCE); } else { Bundle args = getArguments(); messageReference = args.getParcelable(ARG_REFERENCE); } displayMessage(messageReference, (mPgpData == null)); } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putParcelable(STATE_MESSAGE_REFERENCE, mMessageReference); outState.putSerializable(STATE_PGP_DATA, mPgpData); } private void displayMessage(MessageReference ref, boolean resetPgpData) { mMessageReference = ref; if (K9.DEBUG) { Log.d(K9.LOG_TAG, "MessageView displaying message " + mMessageReference); } Context appContext = getActivity().getApplicationContext(); mAccount = Preferences.getPreferences(appContext).getAccount(mMessageReference.getAccountUuid()); messageCryptoHelper = new MessageCryptoHelper(getActivity(), mAccount, this); if (resetPgpData) { // start with fresh, empty PGP data mPgpData = new PgpData(); } // Clear previous message mMessageView.resetView(); mMessageView.resetHeaderView(); startLoadingMessageFromDatabase(); mFragmentListener.updateMenu(); } public void handleCryptoResult(int requestCode, int resultCode, Intent data) { if (messageCryptoHelper != null) { messageCryptoHelper.handleCryptoResult(requestCode, resultCode, data); } } private void startLoadingMessageFromDatabase() { getLoaderManager().initLoader(LOCAL_MESSAGE_LOADER_ID, null, localMessageLoaderCallback); } private void onLoadMessageFromDatabaseFinished(LocalMessage message) { displayMessageHeader(message); if (message.isBodyMissing()) { startDownloadingMessageBody(message); } else { messageCryptoHelper.decryptOrVerifyMessagePartsIfNecessary(message); } } private void onLoadMessageFromDatabaseFailed() { // mMessageView.showStatusMessage(mContext.getString(R.string.status_invalid_id_error)); } private void startDownloadingMessageBody(LocalMessage message) { throw new RuntimeException("Not implemented yet"); } private void onMessageDownloadFinished(LocalMessage message) { mMessage = message; LoaderManager loaderManager = getLoaderManager(); loaderManager.destroyLoader(LOCAL_MESSAGE_LOADER_ID); loaderManager.destroyLoader(DECODE_MESSAGE_LOADER_ID); onLoadMessageFromDatabaseFinished(mMessage); } private void onDownloadMessageFailed(Throwable t) { mMessageView.enableDownloadButton(); String errorMessage; if (t instanceof IllegalArgumentException) { errorMessage = mContext.getString(R.string.status_invalid_id_error); } else { errorMessage = mContext.getString(R.string.status_network_error); } Toast.makeText(mContext, errorMessage, Toast.LENGTH_LONG).show(); } @Override public void onCryptoOperationsFinished(MessageCryptoAnnotations annotations) { startExtractingTextAndAttachments(annotations); } private void startExtractingTextAndAttachments(MessageCryptoAnnotations annotations) { this.messageAnnotations = annotations; getLoaderManager().initLoader(DECODE_MESSAGE_LOADER_ID, null, decodeMessageLoaderCallback); } private void onDecodeMessageFinished(MessageViewInfo messageContainer) { this.messageViewInfo = messageContainer; showMessage(messageContainer); } private void showMessage(MessageViewInfo messageContainer) { try { mMessageView.setMessage(mAccount, messageContainer); mMessageView.setShowDownloadButton(mMessage); } catch (MessagingException e) { Log.e(K9.LOG_TAG, "Error while trying to display message", e); } } private void displayMessageHeader(LocalMessage message) { mMessageView.setHeaders(message, mAccount); displayMessageSubject(getSubjectForMessage(message)); mFragmentListener.updateMenu(); } /** * Called from UI thread when user select Delete */ public void onDelete() { if (K9.confirmDelete() || (K9.confirmDeleteStarred() && mMessage.isSet(Flag.FLAGGED))) { showDialog(R.id.dialog_confirm_delete); } else { delete(); } } public void onToggleAllHeadersView() { mMessageView.getMessageHeaderView().onShowAdditionalHeaders(); } public boolean allHeadersVisible() { return mMessageView.getMessageHeaderView().additionalHeadersVisible(); } private void delete() { if (mMessage != null) { // Disable the delete button after it's tapped (to try to prevent // accidental clicks) mFragmentListener.disableDeleteAction(); LocalMessage messageToDelete = mMessage; mFragmentListener.showNextMessageOrReturn(); mController.deleteMessages(Collections.singletonList(messageToDelete), null); } } public void onRefile(String dstFolder) { if (!mController.isMoveCapable(mAccount)) { return; } if (!mController.isMoveCapable(mMessage)) { Toast toast = Toast.makeText(getActivity(), R.string.move_copy_cannot_copy_unsynced_message, Toast.LENGTH_LONG); toast.show(); return; } if (K9.FOLDER_NONE.equalsIgnoreCase(dstFolder)) { return; } if (mAccount.getSpamFolderName().equals(dstFolder) && K9.confirmSpam()) { mDstFolder = dstFolder; showDialog(R.id.dialog_confirm_spam); } else { refileMessage(dstFolder); } } private void refileMessage(String dstFolder) { String srcFolder = mMessageReference.getFolderName(); LocalMessage messageToMove = mMessage; mFragmentListener.showNextMessageOrReturn(); mController.moveMessage(mAccount, srcFolder, messageToMove, dstFolder, null); } public void onReply() { if (mMessage != null) { mFragmentListener.onReply(mMessage, mPgpData); } } public void onReplyAll() { if (mMessage != null) { mFragmentListener.onReplyAll(mMessage, mPgpData); } } public void onForward() { if (mMessage != null) { mFragmentListener.onForward(mMessage, mPgpData); } } public void onToggleFlagged() { if (mMessage != null) { boolean newState = !mMessage.isSet(Flag.FLAGGED); mController.setFlag(mAccount, mMessage.getFolder().getName(), Collections.singletonList(mMessage), Flag.FLAGGED, newState); mMessageView.setHeaders(mMessage, mAccount); } } public void onMove() { if ((!mController.isMoveCapable(mAccount)) || (mMessage == null)) { return; } if (!mController.isMoveCapable(mMessage)) { Toast toast = Toast.makeText(getActivity(), R.string.move_copy_cannot_copy_unsynced_message, Toast.LENGTH_LONG); toast.show(); return; } startRefileActivity(ACTIVITY_CHOOSE_FOLDER_MOVE); } public void onCopy() { if ((!mController.isCopyCapable(mAccount)) || (mMessage == null)) { return; } if (!mController.isCopyCapable(mMessage)) { Toast toast = Toast.makeText(getActivity(), R.string.move_copy_cannot_copy_unsynced_message, Toast.LENGTH_LONG); toast.show(); return; } startRefileActivity(ACTIVITY_CHOOSE_FOLDER_COPY); } public void onArchive() { onRefile(mAccount.getArchiveFolderName()); } public void onSpam() { onRefile(mAccount.getSpamFolderName()); } public void onSelectText() { // FIXME // mMessageView.beginSelectingText(); } private void startRefileActivity(int activity) { Intent intent = new Intent(getActivity(), ChooseFolder.class); intent.putExtra(ChooseFolder.EXTRA_ACCOUNT, mAccount.getUuid()); intent.putExtra(ChooseFolder.EXTRA_CUR_FOLDER, mMessageReference.getFolderName()); intent.putExtra(ChooseFolder.EXTRA_SEL_FOLDER, mAccount.getLastSelectedFolderName()); intent.putExtra(ChooseFolder.EXTRA_MESSAGE, mMessageReference); startActivityForResult(intent, activity); } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { if (resultCode != Activity.RESULT_OK) { return; } switch (requestCode) { case ACTIVITY_CHOOSE_DIRECTORY: { if (data != null) { // obtain the filename Uri fileUri = data.getData(); if (fileUri != null) { String filePath = fileUri.getPath(); if (filePath != null) { getAttachmentController(currentAttachmentViewInfo).saveAttachmentTo(filePath); } } } break; } case ACTIVITY_CHOOSE_FOLDER_MOVE: case ACTIVITY_CHOOSE_FOLDER_COPY: { if (data == null) { return; } String destFolderName = data.getStringExtra(ChooseFolder.EXTRA_NEW_FOLDER); MessageReference ref = data.getParcelableExtra(ChooseFolder.EXTRA_MESSAGE); if (mMessageReference.equals(ref)) { mAccount.setLastSelectedFolderName(destFolderName); switch (requestCode) { case ACTIVITY_CHOOSE_FOLDER_MOVE: { mFragmentListener.showNextMessageOrReturn(); moveMessage(ref, destFolderName); break; } case ACTIVITY_CHOOSE_FOLDER_COPY: { copyMessage(ref, destFolderName); break; } } } break; } } } public void onSendAlternate() { if (mMessage != null) { mController.sendAlternate(getActivity(), mAccount, mMessage); } } public void onToggleRead() { if (mMessage != null) { mController.setFlag(mAccount, mMessage.getFolder().getName(), Collections.singletonList(mMessage), Flag.SEEN, !mMessage.isSet(Flag.SEEN)); mMessageView.setHeaders(mMessage, mAccount); String subject = mMessage.getSubject(); displayMessageSubject(subject); mFragmentListener.updateMenu(); } } private void onDownloadRemainder() { if (mMessage.isSet(Flag.X_DOWNLOADED_FULL)) { return; } mMessageView.disableDownloadButton(); mController.loadMessageForViewRemote(mAccount, mMessageReference.getFolderName(), mMessageReference.getUid(), downloadMessageListener); } private void setProgress(boolean enable) { if (mFragmentListener != null) { mFragmentListener.setProgress(enable); } } private void displayMessageSubject(String subject) { if (mFragmentListener != null) { mFragmentListener.displayMessageSubject(subject); } } private String getSubjectForMessage(LocalMessage message) { String subject = message.getSubject(); if (TextUtils.isEmpty(subject)) { return mContext.getString(R.string.general_no_subject); } return subject; } public void moveMessage(MessageReference reference, String destFolderName) { mController.moveMessage(mAccount, mMessageReference.getFolderName(), mMessage, destFolderName, null); } public void copyMessage(MessageReference reference, String destFolderName) { mController.copyMessage(mAccount, mMessageReference.getFolderName(), mMessage, destFolderName, null); } private void showDialog(int dialogId) { DialogFragment fragment; if (dialogId == R.id.dialog_confirm_delete) { String title = getString(R.string.dialog_confirm_delete_title); String message = getString(R.string.dialog_confirm_delete_message); String confirmText = getString(R.string.dialog_confirm_delete_confirm_button); String cancelText = getString(R.string.dialog_confirm_delete_cancel_button); fragment = ConfirmationDialogFragment.newInstance(dialogId, title, message, confirmText, cancelText); } else if (dialogId == R.id.dialog_confirm_spam) { String title = getString(R.string.dialog_confirm_spam_title); String message = getResources().getQuantityString(R.plurals.dialog_confirm_spam_message, 1); String confirmText = getString(R.string.dialog_confirm_spam_confirm_button); String cancelText = getString(R.string.dialog_confirm_spam_cancel_button); fragment = ConfirmationDialogFragment.newInstance(dialogId, title, message, confirmText, cancelText); } else if (dialogId == R.id.dialog_attachment_progress) { String message = getString(R.string.dialog_attachment_progress_title); fragment = ProgressDialogFragment.newInstance(null, message); } else { throw new RuntimeException("Called showDialog(int) with unknown dialog id."); } fragment.setTargetFragment(this, dialogId); fragment.show(getFragmentManager(), getDialogTag(dialogId)); } private void removeDialog(int dialogId) { FragmentManager fm = getFragmentManager(); if (fm == null || isRemoving() || isDetached()) { return; } // Make sure the "show dialog" transaction has been processed when we call // findFragmentByTag() below. Otherwise the fragment won't be found and the dialog will // never be dismissed. fm.executePendingTransactions(); DialogFragment fragment = (DialogFragment) fm.findFragmentByTag(getDialogTag(dialogId)); if (fragment != null) { fragment.dismiss(); } } private String getDialogTag(int dialogId) { return String.format(Locale.US, "dialog-%d", dialogId); } public void zoom(KeyEvent event) { // mMessageView.zoom(event); } @Override public void doPositiveClick(int dialogId) { if (dialogId == R.id.dialog_confirm_delete) { delete(); } else if (dialogId == R.id.dialog_confirm_spam) { refileMessage(mDstFolder); mDstFolder = null; } } @Override public void doNegativeClick(int dialogId) { /* do nothing */ } @Override public void dialogCancelled(int dialogId) { /* do nothing */ } /** * Get the {@link MessageReference} of the currently displayed message. */ public MessageReference getMessageReference() { return mMessageReference; } public boolean isMessageRead() { return (mMessage != null) ? mMessage.isSet(Flag.SEEN) : false; } public boolean isCopyCapable() { return mController.isCopyCapable(mAccount); } public boolean isMoveCapable() { return mController.isMoveCapable(mAccount); } public boolean canMessageBeArchived() { return (!mMessageReference.getFolderName().equals(mAccount.getArchiveFolderName()) && mAccount.hasArchiveFolder()); } public boolean canMessageBeMovedToSpam() { return (!mMessageReference.getFolderName().equals(mAccount.getSpamFolderName()) && mAccount.hasSpamFolder()); } public void updateTitle() { if (mMessage != null) { displayMessageSubject(mMessage.getSubject()); } } public Context getContext() { return mContext; } public void disableAttachmentButtons(AttachmentViewInfo attachment) { // mMessageView.disableAttachmentButtons(attachment); } public void enableAttachmentButtons(AttachmentViewInfo attachment) { // mMessageView.enableAttachmentButtons(attachment); } public void runOnMainThread(Runnable runnable) { handler.post(runnable); } public void showAttachmentLoadingDialog() { // mMessageView.disableAttachmentButtons(); showDialog(R.id.dialog_attachment_progress); } public void hideAttachmentLoadingDialogOnMainThread() { handler.post(new Runnable() { @Override public void run() { removeDialog(R.id.dialog_attachment_progress); // mMessageView.enableAttachmentButtons(); } }); } public void refreshAttachmentThumbnail(AttachmentViewInfo attachment) { // mMessageView.refreshAttachmentThumbnail(attachment); } @Override public void onPgpSignatureButtonClick(PendingIntent pendingIntent) { try { getActivity().startIntentSenderForResult( pendingIntent.getIntentSender(), 42, null, 0, 0, 0); } catch (IntentSender.SendIntentException e) { Log.e(K9.LOG_TAG, "SendIntentException", e); } } public interface MessageViewFragmentListener { public void onForward(LocalMessage mMessage, PgpData mPgpData); public void disableDeleteAction(); public void onReplyAll(LocalMessage mMessage, PgpData mPgpData); public void onReply(LocalMessage mMessage, PgpData mPgpData); public void displayMessageSubject(String title); public void setProgress(boolean b); public void showNextMessageOrReturn(); public void messageHeaderViewAvailable(MessageHeader messageHeaderView); public void updateMenu(); } public boolean isInitialized() { return mInitialized ; } class LocalMessageLoaderCallback implements LoaderCallbacks<LocalMessage> { @Override public Loader<LocalMessage> onCreateLoader(int id, Bundle args) { setProgress(true); return new LocalMessageLoader(mContext, mController, mAccount, mMessageReference); } @Override public void onLoadFinished(Loader<LocalMessage> loader, LocalMessage message) { setProgress(false); mMessage = message; if (message == null) { onLoadMessageFromDatabaseFailed(); } else { onLoadMessageFromDatabaseFinished(message); } } @Override public void onLoaderReset(Loader<LocalMessage> loader) { // Do nothing } } class DecodeMessageLoaderCallback implements LoaderCallbacks<MessageViewInfo> { @Override public Loader<MessageViewInfo> onCreateLoader(int id, Bundle args) { setProgress(true); return new DecodeMessageLoader(mContext, mMessage, messageAnnotations); } @Override public void onLoadFinished(Loader<MessageViewInfo> loader, MessageViewInfo messageContainer) { setProgress(false); onDecodeMessageFinished(messageContainer); } @Override public void onLoaderReset(Loader<MessageViewInfo> loader) { // Do nothing } } @Override public void onViewAttachment(AttachmentViewInfo attachment) { //TODO: check if we have to download the attachment first getAttachmentController(attachment).viewAttachment(); } @Override public void onSaveAttachment(AttachmentViewInfo attachment) { //TODO: check if we have to download the attachment first getAttachmentController(attachment).saveAttachment(); } @Override public void onSaveAttachmentToUserProvidedDirectory(final AttachmentViewInfo attachment) { //TODO: check if we have to download the attachment first currentAttachmentViewInfo = attachment; FileBrowserHelper.getInstance().showFileBrowserActivity(MessageViewFragment.this, null, ACTIVITY_CHOOSE_DIRECTORY, new FileBrowserFailOverCallback() { @Override public void onPathEntered(String path) { getAttachmentController(attachment).saveAttachmentTo(path); } @Override public void onCancel() { // Do nothing } }); } private AttachmentController getAttachmentController(AttachmentViewInfo attachment) { return new AttachmentController(mController, this, attachment); } private class DownloadMessageListener extends MessagingListener { @Override public void loadMessageForViewFinished(Account account, String folder, String uid, final LocalMessage message) { handler.post(new Runnable() { @Override public void run() { onMessageDownloadFinished(message); } }); } @Override public void loadMessageForViewFailed(Account account, String folder, String uid, final Throwable t) { handler.post(new Runnable() { @Override public void run() { onDownloadMessageFailed(t); } }); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.query.aggregation.histogram; import com.google.common.collect.ImmutableList; import org.apache.druid.data.input.Row; import org.apache.druid.java.util.common.Pair; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.io.Closer; import org.apache.druid.query.QueryRunner; import org.apache.druid.query.QueryRunnerTestHelper; import org.apache.druid.query.dimension.DefaultDimensionSpec; import org.apache.druid.query.groupby.GroupByQuery; import org.apache.druid.query.groupby.GroupByQueryConfig; import org.apache.druid.query.groupby.GroupByQueryRunnerFactory; import org.apache.druid.query.groupby.GroupByQueryRunnerTest; import org.apache.druid.query.groupby.GroupByQueryRunnerTestHelper; import org.apache.druid.query.groupby.orderby.DefaultLimitSpec; import org.apache.druid.query.groupby.orderby.OrderByColumnSpec; import org.apache.druid.query.groupby.strategy.GroupByStrategySelector; import org.apache.druid.segment.TestHelper; import org.junit.After; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** */ @RunWith(Parameterized.class) public class FixedBucketsHistogramGroupByQueryTest { private static final Closer resourceCloser = Closer.create(); private final QueryRunner<Row> runner; private final GroupByQueryRunnerFactory factory; @Parameterized.Parameters(name = "{0}") public static Iterable<Object[]> constructorFeeder() { final GroupByQueryConfig v1Config = new GroupByQueryConfig() { @Override public String getDefaultStrategy() { return GroupByStrategySelector.STRATEGY_V1; } @Override public String toString() { return "v1"; } }; final GroupByQueryConfig v1SingleThreadedConfig = new GroupByQueryConfig() { @Override public boolean isSingleThreaded() { return true; } @Override public String getDefaultStrategy() { return GroupByStrategySelector.STRATEGY_V1; } @Override public String toString() { return "v1SingleThreaded"; } }; final GroupByQueryConfig v2Config = new GroupByQueryConfig() { @Override public String getDefaultStrategy() { return GroupByStrategySelector.STRATEGY_V2; } @Override public String toString() { return "v2"; } }; v1Config.setMaxIntermediateRows(10000); v1SingleThreadedConfig.setMaxIntermediateRows(10000); final List<Object[]> constructors = new ArrayList<>(); final List<GroupByQueryConfig> configs = ImmutableList.of( v1Config, v1SingleThreadedConfig, v2Config ); for (GroupByQueryConfig config : configs) { final Pair<GroupByQueryRunnerFactory, Closer> factoryAndCloser = GroupByQueryRunnerTest.makeQueryRunnerFactory( config ); final GroupByQueryRunnerFactory factory = factoryAndCloser.lhs; resourceCloser.register(factoryAndCloser.rhs); for (QueryRunner<Row> runner : QueryRunnerTestHelper.makeQueryRunners(factory)) { final String testName = StringUtils.format( "config=%s, runner=%s", config.toString(), runner.toString() ); constructors.add(new Object[]{testName, factory, runner}); } } return constructors; } public FixedBucketsHistogramGroupByQueryTest( String testName, GroupByQueryRunnerFactory factory, QueryRunner runner ) { this.factory = factory; this.runner = runner; ApproximateHistogramDruidModule.registerSerde(); } @After public void teardown() throws IOException { resourceCloser.close(); } @Test public void testGroupByWithFixedHistogramAgg() { FixedBucketsHistogramAggregatorFactory aggFactory = new FixedBucketsHistogramAggregatorFactory( "histo", "index", 10, 0, 2000, FixedBucketsHistogram.OutlierHandlingMode.OVERFLOW ); GroupByQuery query = new GroupByQuery.Builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setGranularity(QueryRunnerTestHelper.allGran).setDimensions(new DefaultDimensionSpec( QueryRunnerTestHelper.marketDimension, "marketalias" )) .setInterval(QueryRunnerTestHelper.fullOnInterval) .setLimitSpec( new DefaultLimitSpec( Collections.singletonList(new OrderByColumnSpec("marketalias", OrderByColumnSpec.Direction.DESCENDING)), 1 ) ).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, aggFactory) .setPostAggregatorSpecs( Collections.singletonList( new QuantilePostAggregator("quantile", "histo", 0.5f) ) ) .build(); List<Row> expectedResults = Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow( "1970-01-01T00:00:00.000Z", "marketalias", "upfront", "rows", 186L, "quantile", 969.6969604492188f, "histo", new FixedBucketsHistogram( 0, 2000, 10, FixedBucketsHistogram.OutlierHandlingMode.OVERFLOW, new long[]{0, 0, 4, 33, 66, 35, 25, 11, 10, 2}, 186, 1870.061029, 545.990623, 0, 0, 0 ) ) ); Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); TestHelper.assertExpectedObjects(expectedResults, results, "fixed-histo"); } @Test(expected = IllegalArgumentException.class) public void testGroupByWithSameNameComplexPostAgg() { FixedBucketsHistogramAggregatorFactory aggFactory = new FixedBucketsHistogramAggregatorFactory( "histo", "index", 10, 0, 2000, FixedBucketsHistogram.OutlierHandlingMode.OVERFLOW ); GroupByQuery query = new GroupByQuery.Builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setGranularity(QueryRunnerTestHelper.allGran).setDimensions(new DefaultDimensionSpec( QueryRunnerTestHelper.marketDimension, "marketalias" )) .setInterval(QueryRunnerTestHelper.fullOnInterval) .setLimitSpec( new DefaultLimitSpec( Collections.singletonList(new OrderByColumnSpec("marketalias", OrderByColumnSpec.Direction.DESCENDING)), 1 ) ).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, aggFactory) .setPostAggregatorSpecs( Collections.singletonList( new QuantilePostAggregator("quantile", "quantile", 0.5f) ) ) .build(); List<Row> expectedResults = Collections.singletonList( GroupByQueryRunnerTestHelper.createExpectedRow( "1970-01-01T00:00:00.000Z", "marketalias", "upfront", "rows", 186L, "quantile", 969.6969604492188f ) ); Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); TestHelper.assertExpectedObjects(expectedResults, results, "fixed-histo"); } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.psi.search; import com.intellij.injected.editor.VirtualFileWindow; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleUtilCore; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.roots.FileIndexFacade; import com.intellij.openapi.roots.OrderRootType; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiElement; import com.intellij.psi.search.*; import com.intellij.util.containers.ContainerUtil; import com.jetbrains.python.PyNames; import com.jetbrains.python.codeInsight.typing.PyTypeShed; import com.jetbrains.python.sdk.PythonSdkType; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; /** * * @author yole */ public class PyProjectScopeBuilder extends ProjectScopeBuilderImpl { public PyProjectScopeBuilder(Project project) { super(project); } /** * This method is necessary because of the check in IndexCacheManagerImpl.shouldBeFound() * In Python, files in PYTHONPATH are library classes but not library sources, so the check in that method ensures that * nothing is found there even when the user selects the "Project and Libraries" scope. Thus, we have to override the * isSearchOutsideRootModel() flag for that scope. * * @return all scope */ @NotNull @Override public GlobalSearchScope buildAllScope() { return new ProjectAndLibrariesScope(myProject) { @Override public boolean isSearchOutsideRootModel() { return true; } }; } /** * Project directories are commonly included in PYTHONPATH and as a result are listed as library classes. Core logic * includes them in project scope only if they are also marked as source roots. Python code is often not marked as source * root, so we need to override the core logic and check only whether the file is under project content. * * @return project search scope */ @NotNull @Override public GlobalSearchScope buildProjectScope() { final FileIndexFacade fileIndex = FileIndexFacade.getInstance(myProject); return new ProjectScopeImpl(myProject, fileIndex) { @Override public boolean contains(@NotNull VirtualFile file) { if (file instanceof VirtualFileWindow) return true; return fileIndex.isInContent(file); } }; } /** * Calculates a search scope which excludes Python standard library tests. Using such scope may be quite a bit slower than using * the regular "project and libraries" search scope, so it should be used only for displaying the list of variants to the user * (for example, for class name completion or auto-import). * * @param project the project for which the scope should be calculated * @return the resulting scope */ public static GlobalSearchScope excludeSdkTestsScope(Project project) { final Sdk sdk = ProjectRootManager.getInstance(project).getProjectSdk(); // TODO cache the scope in project userdata (update when SDK paths change or different project SDK is selected) GlobalSearchScope scope = excludeSdkTestsScope(project, sdk); return scope != null ? ProjectScope.getAllScope(project).intersectWith(scope) : ProjectScope.getAllScope(project); } public static GlobalSearchScope excludeSdkTestsScope(PsiElement anchor) { final Project project = anchor.getProject(); Module module = ModuleUtilCore.findModuleForPsiElement(anchor); if (module != null) { Sdk sdk = PythonSdkType.findPythonSdk(module); if (sdk != null) { GlobalSearchScope excludeScope = excludeSdkTestsScope(project, sdk); if (excludeScope != null) { return GlobalSearchScope.allScope(project).intersectWith(excludeScope); } } } return excludeSdkTestsScope(project); } @Nullable private static GlobalSearchScope excludeSdkTestsScope(Project project, Sdk sdk) { if (sdk != null && sdk.getSdkType() instanceof PythonSdkType) { List<VirtualFile> excludedDirs = new ArrayList<>(); VirtualFile libDir = findLibDir(sdk); if (libDir != null) { // superset of test dirs found in Python 2.5 to 3.1 excludedDirs.addAll(findTestDirs(libDir, "test", "bsddb/test", "ctypes/test", "distutils/tests", "email/test", "importlib/test", "json/tests", "lib2to3/tests", "sqlite3/test", "tkinter/test", "idlelib/testcode.py")); } // XXX: Disable resolving to any third-party libraries from typeshed in the same places where we don't want SDK tests excludedDirs.addAll(Arrays.stream(sdk.getRootProvider().getFiles(OrderRootType.CLASSES)) .filter(file -> PyTypeShed.INSTANCE.isInside(file) || PyTypeShed.INSTANCE.isInThirdPartyLibraries(file)) .collect(Collectors.toList())); if (!excludedDirs.isEmpty()) { GlobalSearchScope scope = buildUnionScope(project, excludedDirs); return GlobalSearchScope.notScope(scope); } } return null; } private static GlobalSearchScope buildUnionScope(Project project, List<VirtualFile> testDirs) { GlobalSearchScope scope = GlobalSearchScopes.directoryScope(project, testDirs.get(0), true); for (int i = 1; i < testDirs.size(); i++) { scope = scope.union(GlobalSearchScopes.directoryScope(project, testDirs.get(i), true)); } return scope; } private static List<VirtualFile> findTestDirs(VirtualFile baseDir, String... relativePaths) { List<VirtualFile> result = new ArrayList<>(); for (String path : relativePaths) { VirtualFile child = baseDir.findFileByRelativePath(path); if (child != null) { result.add(child); } } return result; } @Nullable public static VirtualFile findLibDir(Sdk sdk) { return findLibDir(sdk.getRootProvider().getFiles(OrderRootType.CLASSES)); } public static VirtualFile findVirtualEnvLibDir(Sdk sdk) { VirtualFile[] classVFiles = sdk.getRootProvider().getFiles(OrderRootType.CLASSES); String homePath = sdk.getHomePath(); if (homePath != null) { File root = PythonSdkType.getVirtualEnvRoot(homePath); if (root != null) { File libRoot = new File(root, "lib"); File[] versionRoots = libRoot.listFiles(); if (versionRoots != null && !SystemInfo.isWindows) { final File versionRoot = ContainerUtil.find(versionRoots, file -> file.isDirectory() && file.getName().startsWith("python")); if (versionRoot != null) { libRoot = versionRoot; } } final String libRootPath = libRoot.getPath(); for (VirtualFile file : classVFiles) { if (FileUtil.pathsEqual(file.getPath(), libRootPath)) { return file; } // venv module doesn't add virtualenv's lib/pythonX.Y directory itself in sys.path final VirtualFile parent = file.getParent(); if (PyNames.SITE_PACKAGES.equals(file.getName()) && FileUtil.pathsEqual(parent.getPath(), libRootPath)) { return parent; } } } } return null; } @Nullable private static VirtualFile findLibDir(VirtualFile[] files) { for (VirtualFile file : files) { if (!file.isValid()) { continue; } if ((file.findChild("__future__.py") != null || file.findChild("__future__.pyc") != null) && file.findChild("xml") != null && file.findChild("email") != null) { return file; } // Mock SDK does not have aforementioned modules if (ApplicationManager.getApplication().isUnitTestMode() && file.getName().equals("Lib")) { return file; } } return null; } }
/* The MIT License (MIT) Copyright (c) 2014 Marcus Craske <limpygnome@gmail.com> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ---------------------------------------------------------------------------- Authors: Marcus Craske <limpygnome@gmail.com> ---------------------------------------------------------------------------- */ package pals.base.assessment; import java.util.ArrayList; import pals.base.Logging; import pals.base.NodeCore; import pals.base.Plugin; import pals.base.UUID; import pals.base.database.Connector; import pals.base.database.DatabaseException; import pals.base.database.Result; /** * A model to represent the criteria type, for a question. * * @version 1.0 */ public class TypeCriteria { // Enums ******************************************************************* /** * The status from attempting to persist the model. * * @since 1.0 */ public enum PersistStatus { /** * Successfully persisted. * * @since 1.0 */ Success, /** * Failed to persist due to an exception or unknown state. * * @since 1.0 */ Failed, /** * Invalid identifier. * * @since 1.0 */ Invalid_UUID, /** * Invalid plugin UUID. * * @since 1.0 */ Invalid_PluginUUID, /** * Invalid title. * * @since 1.0 */ Invalid_Title, /** * Invalid description. * * @since 1.0 */ Invalid_Description, } // Fields ****************************************************************** private boolean persisted; private UUID uuidCType; private UUID uuidPlugin; private String title; private String description; // Methods - Constructors ************************************************** /** * Constructs a new type of criteria. * * @since 1.0 */ public TypeCriteria() { this(null, null, null, null); } /** * Constructs a new type of criteria. * * @param uuidCType The identifier of the criteria. * @param uuidPlugin The identifier of the plugin, which is responsible * for handling the criteria. * @param title The title of the criteria. * @param description A description of the criteria. * @since 1.0 */ public TypeCriteria(UUID uuidCType, UUID uuidPlugin, String title, String description) { this.persisted = false; this.uuidCType = uuidCType; this.uuidPlugin = uuidPlugin; this.title = title; this.description = description; } // Methods - Persistence *************************************************** /** * Loads all the persisted criteria-types for a question-type. * * @param conn Database connector. * @param qt Type of question. * @return Array of types of questions available. * @since 1.0 */ public static TypeCriteria[] loadAll(Connector conn, TypeQuestion qt) { try { Result res = conn.read("SELECT * FROM pals_criteria_types WHERE uuid_ctype IN (SELECT uuid_ctype FROM pals_qtype_ctype WHERE uuid_qtype=?);", qt.getUuidQType().getBytes()); TypeCriteria c; ArrayList<TypeCriteria> buffer = new ArrayList<>(); while(res.next()) { if((c = load(res)) != null) buffer.add(c); } return buffer.toArray(new TypeCriteria[buffer.size()]); } catch(DatabaseException ex) { NodeCore core; if((core = NodeCore.getInstance())!=null) core.getLogging().logEx("Base", ex, Logging.EntryType.Warning); return new TypeCriteria[0]; } } /** * Loads a persisted model from the database. * * @param conn Database connector. * @param uuidCType The UUID of the model. * @return An instance of the model or null. * @since 1.0 */ public static TypeCriteria load(Connector conn, UUID uuidCType) { if(uuidCType == null) return null; try { Result res = conn.read("SELECT * FROM pals_criteria_types WHERE uuid_ctype=?;", uuidCType.getBytes()); return res.next() ? load(res) : null; } catch(DatabaseException ex) { NodeCore core; if((core = NodeCore.getInstance())!=null) core.getLogging().logEx("Base", ex, Logging.EntryType.Warning); return null; } } /** * Loads a persisted model from a result; next() should be pre-invoked. * * @param result The result with the data; next() should be pre-invoked. * @return An instance of the model or null. * @since 1.0 */ public static TypeCriteria load(Result result) { try { TypeCriteria tc = new TypeCriteria(UUID.parse((byte[])result.get("uuid_ctype")), UUID.parse((byte[])result.get("uuid_plugin")), (String)result.get("title"), (String)result.get("description")); tc.persisted = true; return tc; } catch(DatabaseException ex) { NodeCore core; if((core = NodeCore.getInstance())!=null) core.getLogging().logEx("Base", ex, Logging.EntryType.Warning); return null; } } /** * Persists the model. * * @param conn Database connector. * @return Status from the operation. * @since 1.0 */ public TypeCriteria.PersistStatus persist(Connector conn) { // Validate data if(uuidCType == null) return TypeCriteria.PersistStatus.Invalid_UUID; else if(uuidPlugin == null) return TypeCriteria.PersistStatus.Invalid_PluginUUID; else if(title == null) return TypeCriteria.PersistStatus.Invalid_Title; else if(description == null || description.length() < 0) return TypeCriteria.PersistStatus.Invalid_Description; else { // Attempt to persist data try { if(persisted) { conn.execute("UPDATE pals_criteria_types SET uuid_plugin=?, title=?, description=? WHERE uuid_ctype=?;", uuidPlugin.getBytes(), title, description, uuidCType.getBytes()); } else { conn.execute("INSERT INTO pals_criteria_types (uuid_ctype, uuid_plugin, title, description) VALUES(?,?,?,?);", uuidCType.getBytes(), uuidPlugin.getBytes(), title, description); persisted = true; } return TypeCriteria.PersistStatus.Success; } catch(DatabaseException ex) { NodeCore core; if((core = NodeCore.getInstance())!=null) core.getLogging().logEx("Base", ex, Logging.EntryType.Warning); return TypeCriteria.PersistStatus.Failed; } } } /** * Removes the model from the database. * * @param conn Database connector. * @return True = removed, false = failed. * @since 1.0 */ public boolean delete(Connector conn) { if(uuidCType == null || !persisted) return false; try { conn.execute("DELETE FROM pals_criteria_types WHERE uuid_ctype=?;", uuidCType.getBytes()); persisted = false; return true; } catch(DatabaseException ex) { NodeCore core; if((core = NodeCore.getInstance())!=null) core.getLogging().logEx("Base", ex, Logging.EntryType.Warning); return false; } } // Methods - Mutators ****************************************************** /** * Sets the identifier of the criteria. * * @param uuidCType The new identifier; cannot be null. * @since 1.0 */ public void setUuidCType(UUID uuidCType) { this.uuidCType = uuidCType; } /** * Sets the plugin which handles and owns this criteria. * * @param uuidPlugin Plugin identifier; cannot be null. * @since 1.0 */ public void setUuidPlugin(UUID uuidPlugin) { this.uuidPlugin = uuidPlugin; } /** * Sets the title. * * @param title The title for this model; cannot be null. * @since 1.0 */ public void setTitle(String title) { this.title = title; } /** * Sets the description. * * @param description The description; cannot be null. * @since 1.0 */ public void setDescription(String description) { this.description = description; } // Methods - Accessors ***************************************************** /** * Indicates if the model has been persisted. * * @return True = persisted, false = not persisted. * @since 1.0 */ public boolean isPersisted() { return persisted; } /** * The UUID identifier. * * @return The UUID of this model. * @since 1.0 */ public UUID getUuidCType() { return uuidCType; } /** * The UUID of the plugin. * * @return The UUID of the plugin which owns this model. * @since 1.0 */ public UUID getUuidPlugin() { return uuidPlugin; } /** * The title. * * @return The title of this model. * @since 1.0 */ public String getTitle() { return title; } /** * The description. * * @return The description of this model. * @since 1.0 */ public String getDescription() { return description; } // Methods - Accessors - Limits ******************************************** /** * The minimum length of a title. * * @return The minimum length of a title. * @since 1.0 */ public int getTitleMin() { return 1; } /** * The maximum length of a title. * * @return The maximum length of a title. * @since 1.0 */ public int getTitleMax() { return 64; } // Methods - Accessors - Static ******************************************** /** * Indicates if a type of criteria is able to serve a type of question. * * @param conn Database connector. * @param qt Type of question. * @param ct Type of criteria. * @return True = capable, false = not capable. * @since 1.0 */ public static boolean isCapable(Connector conn, TypeQuestion qt, TypeCriteria ct) { try { return ((long)conn.executeScalar("SELECT COUNT('') FROM pals_qtype_ctype WHERE uuid_qtype=? AND uuid_ctype=?;", qt.getUuidQType().getBytes(), ct.getUuidCType().getBytes())) > 0; } catch(DatabaseException ex) { NodeCore core; if((core = NodeCore.getInstance())!=null) core.getLogging().logEx("Base", ex, Logging.EntryType.Warning); return false; } } // Methods - Static ******************************************************** /** * Registers a new criteria type. If a type already exists with the same * UUID, it's loaded and returned and no changes will occur on the * database. * * @param conn Database connector. * @param core The current instance of the core. * @param plugin The plugin which owns the type. * @param uuid The identifier of the type. * @param title The title of the type. * @param description A description for the type. * @return Instance of type. Can be null if type cannot be persisted. * @since 1.0 */ public static TypeCriteria register(Connector conn, NodeCore core, Plugin plugin, UUID uuid, String title, String description) { TypeCriteria tc = new TypeCriteria(uuid, plugin.getUUID(), title, description); TypeCriteria.PersistStatus psc = tc.persist(conn); if(psc != TypeCriteria.PersistStatus.Success) { core.getLogging().log("Base.TypeCriteria#register", "Failed to register type-criteria '"+title+"' during installation!", Logging.EntryType.Error); return null; } return tc; } /** * Unregisters a type of criteria. * * @param conn Database connector. * @param uuid The identifier of the type. * @return Indicates if the operation has succeeded. * @since 1.0 */ public static boolean unregister(Connector conn, UUID uuid) { TypeCriteria tc = TypeCriteria.load(conn, uuid); if(tc != null) return tc.delete(conn); return false; } // Methods - Overrides ***************************************************** /** * Tests if two instances are equal based on the specified object being * of this type and the plugin and type UUIDs being equal. * * @param o The object to be tested. * @return True = equal, false = not equal. * @since 1.0 */ @Override public boolean equals(Object o) { if(o == null) return false; else if(uuidPlugin == null || uuidCType == null) return false; else if(!(o instanceof TypeCriteria)) return false; TypeCriteria tq = (TypeCriteria)o; return uuidPlugin.equals(tq.getUuidPlugin()) && uuidCType.equals(tq.getUuidCType()); } /** * The hash-code, based on the hash-code of the UUID plugin hash-code. * * @return Hash-code. * @since 1.0 */ @Override public int hashCode() { return uuidPlugin == null ? -1 : uuidPlugin.hashCode(); } }
/** * $Revision: $ * $Date: $ * * Copyright (C) 2005-2008 Jive Software. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.openfire.http; import java.io.IOException; import java.net.InetAddress; import java.util.Map; import java.util.TimerTask; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import org.dom4j.DocumentException; import org.dom4j.DocumentHelper; import org.dom4j.Element; import org.jivesoftware.openfire.SessionManager; import org.jivesoftware.openfire.StreamID; import org.jivesoftware.openfire.auth.UnauthorizedException; import org.jivesoftware.util.JiveConstants; import org.jivesoftware.util.JiveGlobals; import org.jivesoftware.util.TaskEngine; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Manages sessions for all users connecting to Openfire using the HTTP binding protocol, * <a href="http://www.xmpp.org/extensions/xep-0124.html">XEP-0124</a>. */ public class HttpSessionManager { private static final Logger Log = LoggerFactory.getLogger(HttpSessionManager.class); private SessionManager sessionManager; private Map<String, HttpSession> sessionMap = new ConcurrentHashMap<String, HttpSession>( JiveGlobals.getIntProperty("xmpp.httpbind.session.initial.count", 16)); private TimerTask inactivityTask; private ThreadPoolExecutor sendPacketPool; private SessionListener sessionListener = new SessionListener() { public void connectionOpened(HttpSession session, HttpConnection connection) { } public void connectionClosed(HttpSession session, HttpConnection connection) { } public void sessionClosed(HttpSession session) { sessionMap.remove(session.getStreamID().getID()); } }; /** * Creates a new HttpSessionManager instance. */ public HttpSessionManager() { JiveGlobals.migrateProperty("xmpp.httpbind.worker.threads"); JiveGlobals.migrateProperty("xmpp.httpbind.worker.timeout"); this.sessionManager = SessionManager.getInstance(); init(); } public void init() { Log.warn("HttpSessionManager.init() recreate sendPacketPool"); // Configure a pooled executor to handle async routing for incoming packets // with a default size of 16 threads ("xmpp.httpbind.worker.threads"); also // uses an unbounded task queue and configurable keep-alive (default: 60 secs) // Note: server supports up to 254 client threads by default (@see HttpBindManager) // BOSH installations expecting heavy loads may want to allocate additional threads // to this worker pool to ensure timely delivery of inbound packets int maxPoolSize = JiveGlobals.getIntProperty("xmpp.httpbind.worker.threads", // use deprecated property as default (shared with ConnectionManagerImpl) JiveGlobals.getIntProperty("xmpp.client.processing.threads", 8)); int keepAlive = JiveGlobals.getIntProperty("xmpp.httpbind.worker.timeout", 60); sendPacketPool = new ThreadPoolExecutor(getCorePoolSize(maxPoolSize), maxPoolSize, keepAlive, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>(), // unbounded task queue new ThreadFactory() { // custom thread factory for BOSH workers final AtomicInteger counter = new AtomicInteger(1); public Thread newThread(Runnable runnable) { Thread thread = new Thread(Thread.currentThread().getThreadGroup(), runnable, "httpbind-worker-" + counter.getAndIncrement()); thread.setDaemon(true); return thread; } }); } private int getCorePoolSize(int maxPoolSize) { return (maxPoolSize/4)+1; } /** * Starts the services used by the HttpSessionManager. */ public void start() { inactivityTask = new HttpSessionReaper(); TaskEngine.getInstance().schedule(inactivityTask, 30 * JiveConstants.SECOND, 30 * JiveConstants.SECOND); sendPacketPool.prestartCoreThread(); } /** * Stops any services and cleans up any resources used by the HttpSessionManager. */ public void stop() { inactivityTask.cancel(); for (HttpSession session : sessionMap.values()) { session.close(); } sessionMap.clear(); sendPacketPool.shutdown(); } /** * Returns the session related to a stream id. * * @param streamID the stream id to retrieve the session. * @return the session related to the provided stream id. */ public HttpSession getSession(String streamID) { return sessionMap.get(streamID); } /** * Creates an HTTP binding session which will allow a user to exchange packets with Openfire. * * @param address the internet address that was used to bind to Openfire. * @param rootNode the body element that was sent containing the request for a new session. * @param connection the HTTP connection object which abstracts the individual connections to * Openfire over the HTTP binding protocol. The initial session creation response is returned to * this connection. * @return the created HTTP session. * * @throws UnauthorizedException if the Openfire server is currently in an uninitialized state. * Either shutting down or starting up. * @throws HttpBindException when there is an internal server error related to the creation of * the initial session creation response. */ public HttpSession createSession(InetAddress address, Element rootNode, HttpConnection connection) throws UnauthorizedException, HttpBindException { // TODO Check if IP address is allowed to connect to the server // Default language is English ("en"). String language = rootNode.attributeValue("xml:lang"); if (language == null || "".equals(language)) { language = "en"; } int wait = getIntAttribute(rootNode.attributeValue("wait"), 60); int hold = getIntAttribute(rootNode.attributeValue("hold"), 1); String version = rootNode.attributeValue("ver"); if (version == null || "".equals(version)) { version = "1.5"; } HttpSession session = createSession(connection.getRequestId(), address, connection); session.setWait(Math.min(wait, getMaxWait())); session.setHold(hold); session.setSecure(connection.isSecure()); session.setMaxPollingInterval(getPollingInterval()); session.setMaxRequests(getMaxRequests()); session.setMaxPause(getMaxPause()); if(session.isPollingSession()) { session.setDefaultInactivityTimeout(getPollingInactivityTimeout()); } else { session.setDefaultInactivityTimeout(getInactivityTimeout()); } session.resetInactivityTimeout(); // Store language and version information in the connection. session.setLanguage(language); String [] versionString = version.split("\\."); session.setMajorVersion(Integer.parseInt(versionString[0])); session.setMinorVersion(Integer.parseInt(versionString[1])); connection.setSession(session); try { connection.deliverBody(createSessionCreationResponse(session), true); } catch (HttpConnectionClosedException e) { Log.error("Error creating session.", e); throw new HttpBindException("Internal server error", BoshBindingError.internalServerError); } catch (DocumentException e) { Log.error("Error creating session.", e); throw new HttpBindException("Internal server error", BoshBindingError.internalServerError); } catch (IOException e) { Log.error("Error creating session.", e); throw new HttpBindException("Internal server error", BoshBindingError.internalServerError); } return session; } /** * Returns the maximum length of a temporary session pause (in seconds) that the client MAY * request. * * @return the maximum length of a temporary session pause (in seconds) that the client MAY * request. */ public int getMaxPause() { return JiveGlobals.getIntProperty("xmpp.httpbind.client.maxpause", 300); } /** * Returns the longest time (in seconds) that Openfire is allowed to wait before responding to * any request during the session. This enables the client to prevent its TCP connection from * expiring due to inactivity, as well as to limit the delay before it discovers any network * failure. * * @return the longest time (in seconds) that Openfire is allowed to wait before responding to * any request during the session. */ public int getMaxWait() { return JiveGlobals.getIntProperty("xmpp.httpbind.client.requests.wait", Integer.MAX_VALUE); } /** * Openfire SHOULD include two additional attributes in the session creation response element, * specifying the shortest allowable polling interval and the longest allowable inactivity * period (both in seconds). Communication of these parameters enables the client to engage in * appropriate behavior (e.g., not sending empty request elements more often than desired, and * ensuring that the periods with no requests pending are never too long). * * @return the maximum allowable period over which a client can send empty requests to the * server. */ public int getPollingInterval() { return JiveGlobals.getIntProperty("xmpp.httpbind.client.requests.polling", 5); } /** * Openfire MAY limit the number of simultaneous requests the client makes with the 'requests' * attribute. The RECOMMENDED value is "2". Servers that only support polling behavior MUST * prevent clients from making simultaneous requests by setting the 'requests' attribute to a * value of "1" (however, polling is NOT RECOMMENDED). In any case, clients MUST NOT make more * simultaneous requests than specified by the Openfire. * * @return the number of simultaneous requests allowable. */ public int getMaxRequests() { return JiveGlobals.getIntProperty("xmpp.httpbind.client.requests.max", 2); } /** * Seconds a session has to be idle to be closed. Default is 30. Sending stanzas to the * client is not considered as activity. We are only considering the connection active when the * client sends some data or hearbeats (i.e. whitespaces) to the server. The reason for this is * that sending data will fail if the connection is closed. And if the thread is blocked while * sending data (because the socket is closed) then the clean up thread will close the socket * anyway. * * @return Seconds a session has to be idle to be closed. */ public int getInactivityTimeout() { return JiveGlobals.getIntProperty("xmpp.httpbind.client.idle", 30); } /** * Seconds a polling session has to be idle to be closed. Default is 60. Sending stanzas to the * client is not considered as activity. We are only considering the connection active when the * client sends some data or hearbeats (i.e. whitespaces) to the server. The reason for this is * that sending data will fail if the connection is closed. And if the thread is blocked while * sending data (because the socket is closed) then the clean up thread will close the socket * anyway. * * @return Seconds a polling session has to be idle to be closed. */ public int getPollingInactivityTimeout() { return JiveGlobals.getIntProperty("xmpp.httpbind.client.idle.polling", 60); } private HttpSession createSession(long rid, InetAddress address, HttpConnection connection) throws UnauthorizedException { // Create a ClientSession for this user. StreamID streamID = SessionManager.getInstance().nextStreamID(); // Send to the server that a new client session has been created HttpSession session = sessionManager.createClientHttpSession(rid, address, streamID, connection); // Register that the new session is associated with the specified stream ID sessionMap.put(streamID.getID(), session); session.addSessionCloseListener(sessionListener); return session; } private static int getIntAttribute(String value, int defaultValue) { if (value == null || "".equals(value.trim())) { return defaultValue; } try { return Integer.valueOf(value); } catch (Exception ex) { return defaultValue; } } private static String createSessionCreationResponse(HttpSession session) throws DocumentException { Element response = DocumentHelper.createElement("body"); response.addNamespace("", "http://jabber.org/protocol/httpbind"); response.addNamespace("stream", "http://etherx.jabber.org/streams"); response.addAttribute("from", session.getServerName()); response.addAttribute("authid", session.getStreamID().getID()); response.addAttribute("sid", session.getStreamID().getID()); response.addAttribute("secure", Boolean.TRUE.toString()); response.addAttribute("requests", String.valueOf(session.getMaxRequests())); response.addAttribute("inactivity", String.valueOf(session.getInactivityTimeout())); response.addAttribute("polling", String.valueOf(session.getMaxPollingInterval())); response.addAttribute("wait", String.valueOf(session.getWait())); if ((session.getMajorVersion() == 1 && session.getMinorVersion() >= 6) || session.getMajorVersion() > 1) { response.addAttribute("hold", String.valueOf(session.getHold())); response.addAttribute("ack", String.valueOf(session.getLastAcknowledged())); response.addAttribute("maxpause", String.valueOf(session.getMaxPause())); response.addAttribute("ver", String.valueOf(session.getMajorVersion()) + "." + String.valueOf(session.getMinorVersion())); } Element features = response.addElement("stream:features"); for (Element feature : session.getAvailableStreamFeaturesElements()) { features.add(feature); } return response.asXML(); } private class HttpSessionReaper extends TimerTask { @Override public void run() { long currentTime = System.currentTimeMillis(); for (HttpSession session : sessionMap.values()) { try { long lastActive = currentTime - session.getLastActivity(); if (Log.isDebugEnabled()) { Log.debug("Session was last active " + lastActive + " ms ago: " + session.getAddress()); } if (lastActive > session.getInactivityTimeout() * JiveConstants.SECOND) { Log.info("Closing idle session: " + session.getAddress()); session.close(); } } catch (Exception e) { Log.error("Failed to determine idle state for session: " + session, e); } } } } protected void execute(Runnable runnable) { this.sendPacketPool.execute(runnable); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.pipeline.bucketselector; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.script.Script; import org.elasticsearch.script.Script.ScriptField; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; import java.util.TreeMap; import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.Parser.BUCKETS_PATH; import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.Parser.GAP_POLICY; public class BucketSelectorPipelineAggregatorBuilder extends PipelineAggregatorBuilder<BucketSelectorPipelineAggregatorBuilder> { public static final String NAME = BucketSelectorPipelineAggregator.TYPE.name(); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); private final Map<String, String> bucketsPathsMap; private Script script; private GapPolicy gapPolicy = GapPolicy.SKIP; public BucketSelectorPipelineAggregatorBuilder(String name, Map<String, String> bucketsPathsMap, Script script) { super(name, BucketSelectorPipelineAggregator.TYPE.name(), new TreeMap<>(bucketsPathsMap).values() .toArray(new String[bucketsPathsMap.size()])); this.bucketsPathsMap = bucketsPathsMap; this.script = script; } public BucketSelectorPipelineAggregatorBuilder(String name, Script script, String... bucketsPaths) { this(name, convertToBucketsPathMap(bucketsPaths), script); } /** * Read from a stream. */ public BucketSelectorPipelineAggregatorBuilder(StreamInput in) throws IOException { super(in, BucketSelectorPipelineAggregator.TYPE.name()); int mapSize = in.readVInt(); bucketsPathsMap = new HashMap<String, String>(mapSize); for (int i = 0; i < mapSize; i++) { bucketsPathsMap.put(in.readString(), in.readString()); } script = new Script(in); gapPolicy = GapPolicy.readFrom(in); } @Override protected void doWriteTo(StreamOutput out) throws IOException { out.writeVInt(bucketsPathsMap.size()); for (Entry<String, String> e : bucketsPathsMap.entrySet()) { out.writeString(e.getKey()); out.writeString(e.getValue()); } script.writeTo(out); gapPolicy.writeTo(out); } private static Map<String, String> convertToBucketsPathMap(String[] bucketsPaths) { Map<String, String> bucketsPathsMap = new HashMap<>(); for (int i = 0; i < bucketsPaths.length; i++) { bucketsPathsMap.put("_value" + i, bucketsPaths[i]); } return bucketsPathsMap; } /** * Sets the gap policy to use for this aggregation. */ public BucketSelectorPipelineAggregatorBuilder gapPolicy(GapPolicy gapPolicy) { if (gapPolicy == null) { throw new IllegalArgumentException("[gapPolicy] must not be null: [" + name + "]"); } this.gapPolicy = gapPolicy; return this; } /** * Gets the gap policy to use for this aggregation. */ public GapPolicy gapPolicy() { return gapPolicy; } @Override protected PipelineAggregator createInternal(Map<String, Object> metaData) throws IOException { return new BucketSelectorPipelineAggregator(name, bucketsPathsMap, script, gapPolicy, metaData); } @Override protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { builder.field(BUCKETS_PATH.getPreferredName(), bucketsPathsMap); builder.field(ScriptField.SCRIPT.getPreferredName(), script); builder.field(GAP_POLICY.getPreferredName(), gapPolicy.getName()); return builder; } public static BucketSelectorPipelineAggregatorBuilder parse(String reducerName, QueryParseContext context) throws IOException { XContentParser parser = context.parser(); XContentParser.Token token; Script script = null; String currentFieldName = null; Map<String, String> bucketsPathsMap = null; GapPolicy gapPolicy = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.VALUE_STRING) { if (context.getParseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) { bucketsPathsMap = new HashMap<>(); bucketsPathsMap.put("_value", parser.text()); } else if (context.getParseFieldMatcher().match(currentFieldName, GAP_POLICY)) { gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation()); } else if (context.getParseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) { script = Script.parse(parser, context.getParseFieldMatcher()); } else { throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "]."); } } else if (token == XContentParser.Token.START_ARRAY) { if (context.getParseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) { List<String> paths = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { String path = parser.text(); paths.add(path); } bucketsPathsMap = new HashMap<>(); for (int i = 0; i < paths.size(); i++) { bucketsPathsMap.put("_value" + i, paths.get(i)); } } else { throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "]."); } } else if (token == XContentParser.Token.START_OBJECT) { if (context.getParseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) { script = Script.parse(parser, context.getParseFieldMatcher()); } else if (context.getParseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) { Map<String, Object> map = parser.map(); bucketsPathsMap = new HashMap<>(); for (Map.Entry<String, Object> entry : map.entrySet()) { bucketsPathsMap.put(entry.getKey(), String.valueOf(entry.getValue())); } } else { throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "]."); } } else { throw new ParsingException(parser.getTokenLocation(), "Unexpected token " + token + " in [" + reducerName + "]."); } } if (bucketsPathsMap == null) { throw new ParsingException(parser.getTokenLocation(), "Missing required field [" + BUCKETS_PATH.getPreferredName() + "] for bucket_selector aggregation [" + reducerName + "]"); } if (script == null) { throw new ParsingException(parser.getTokenLocation(), "Missing required field [" + ScriptField.SCRIPT.getPreferredName() + "] for bucket_selector aggregation [" + reducerName + "]"); } BucketSelectorPipelineAggregatorBuilder factory = new BucketSelectorPipelineAggregatorBuilder(reducerName, bucketsPathsMap, script); if (gapPolicy != null) { factory.gapPolicy(gapPolicy); } return factory; } @Override protected boolean overrideBucketsPath() { return true; } @Override protected int doHashCode() { return Objects.hash(bucketsPathsMap, script, gapPolicy); } @Override protected boolean doEquals(Object obj) { BucketSelectorPipelineAggregatorBuilder other = (BucketSelectorPipelineAggregatorBuilder) obj; return Objects.equals(bucketsPathsMap, other.bucketsPathsMap) && Objects.equals(script, other.script) && Objects.equals(gapPolicy, other.gapPolicy); } @Override public String getWriteableName() { return NAME; } }
package de.lessvoid.nifty.gdx.input; import com.badlogic.gdx.Input; import com.badlogic.gdx.utils.*; import javax.annotation.Nonnull; import javax.annotation.Nullable; /** * Enables repeating of NON-PRINTABLE keys, which is not supported in LibGDX. Only keys whose character would return * true for {@link Character#isISOControl(char)} are allowed to be used. The reason why is that LibGDX already repeats * printable characters for you automatically. * * @author Aaron Mahan &lt;aaron@forerunnergames.com&gt; */ public class GdxKeyRepeatSystem { public static final int DEFAULT_KEY_REPEAT_RATE_MILLIS = 100; // For reference by users of this class. public static final int DEFAULT_KEY_REPEAT_START_DELAY_MILLIS = 500; // For reference by users of this class. private static final float DEFAULT_KEY_REPEAT_START_DELAY_SECONDS = millisToSeconds (DEFAULT_KEY_REPEAT_START_DELAY_MILLIS); private static final float DEFAULT_KEY_REPEAT_RATE_SECONDS = millisToSeconds(DEFAULT_KEY_REPEAT_RATE_MILLIS); @Nonnull private final Input input; @Nonnull private final GdxInputSystem gdxInputSystem; @Nonnull private final Timer keyRepeatTimer; @Nonnull private final IntMap<Timer.Task> keyRepeatTasks; @Nonnull private final IntArray repeatingKeys; @Nonnull private final IntFloatMap keyRepeatRates; @Nonnull private final IntFloatMap keyRepeatStartDelays; @Nullable private Timer.Task currentRepeatingKeyTask; private int currentRepeatingGdxKeyCode; private int currentRepeatingKeyIndex; public GdxKeyRepeatSystem(@Nonnull final GdxInputSystem gdxInputSystem) { input = gdxInputSystem.getInput(); this.gdxInputSystem = gdxInputSystem; keyRepeatTimer = new Timer(); keyRepeatTasks = new IntMap<Timer.Task>(); repeatingKeys = new IntArray(); keyRepeatRates = new IntFloatMap(); keyRepeatStartDelays = new IntFloatMap(); currentRepeatingKeyTask = null; currentRepeatingGdxKeyCode = 0; currentRepeatingKeyIndex = 0; } /** * Get the key repeat rate of the specified NON-PRINTABLE key. Only keys whose character would return true for * {@link Character#isISOControl(char)} are allowed. The reason why is that LibGDX already repeats printable * characters * for you automatically. * * @param gdxKeyCode The {@link com.badlogic.gdx.Input.Keys} keyCode of the key to get the repeat rate of. Must be * a NON-PRINTABLE key (See above). * @return The key repeat rate of the specified key in milliseconds. If no repeat rate was set for that specific key, * then the default value will be returned, see {@link #DEFAULT_KEY_REPEAT_RATE_MILLIS}. The value will always be * greater than 0. */ public int getKeyRepeatRateMillis(final int gdxKeyCode) { checkKeyCode(gdxKeyCode); return secondsToMillis(keyRepeatRates.get(gdxKeyCode, DEFAULT_KEY_REPEAT_RATE_SECONDS)); } /** * Get the key repeat start delay of the specified NON-PRINTABLE key. Only keys whose character would return true * for {@link Character#isISOControl(char)} are allowed. The reason why is that LibGDX already repeats printable * characters for you automatically. * * @param gdxKeyCode The {@link com.badlogic.gdx.Input.Keys} keyCode of the key to get the repeat start delay of. * Must be a NON-PRINTABLE key (See above). * @return The key repeat start delay of the specified key in milliseconds. If no repeat start delay was set for that * specific key, then the default value will be returned, see {@link #DEFAULT_KEY_REPEAT_START_DELAY_MILLIS}. The * value will always be non-negative (may be 0). */ public int getKeyRepeatStartDelayMillis(final int gdxKeyCode) { checkKeyCode(gdxKeyCode); return secondsToMillis(keyRepeatStartDelays.get(gdxKeyCode, DEFAULT_KEY_REPEAT_START_DELAY_SECONDS)); } /** * Check whether the specified NON-PRINTABLE key is set to repeat. Only keys whose character would return true for * {@link Character#isISOControl(char)} are allowed. The reason why is that LibGDX already repeats printable * characters * for you automatically. * * @param gdxKeyCode The {@link com.badlogic.gdx.Input.Keys} keyCode of the key to check. Must be a NON-PRINTABLE * key (See above). * @return The repeat status of the specified key. */ public boolean isRepeatingKey(final int gdxKeyCode) { checkKeyCode(gdxKeyCode); return repeatingKeys.contains(gdxKeyCode); } /** * Enable or disable key repeat for the specified NON-PRINTABLE key. Only keys whose character would return true for * {@link Character#isISOControl(char)} are allowed. The reason why is that LibGDX already repeats printable * characters * for you automatically. * * @param gdxKeyCode The {@link com.badlogic.gdx.Input.Keys} keyCode of the key to repeat. Must be a NON-PRINTABLE * key (See above). * @param isEnabled Whether to enable or disable key repeat for the specified key. */ public void setKeyRepeat(final int gdxKeyCode, final boolean isEnabled) { checkKeyCode(gdxKeyCode); if (isEnabled && !repeatingKeys.contains(gdxKeyCode)) { repeatingKeys.add(gdxKeyCode); registerKeyRepeatTask(gdxKeyCode); } else if (!isEnabled && repeatingKeys.contains(gdxKeyCode)) { repeatingKeys.removeValue(gdxKeyCode); } } /** * Set the key repeat rate for the specified NON-PRINTABLE key. Only keys whose character would return true for * {@link Character#isISOControl(char)} are allowed. The reason why is that LibGDX already repeats printable * characters * for you automatically. * * @param gdxKeyCode The {@link com.badlogic.gdx.Input.Keys} keyCode of the key to set the repeat rate of. * Must be a NON-PRINTABLE key (See above). * @param repeatRateMillis The repeat rate in milliseconds, must be greater than 0. */ public void setKeyRepeatRate(final int gdxKeyCode, final int repeatRateMillis) { checkRepeatRate(repeatRateMillis); checkKeyCode(gdxKeyCode); keyRepeatRates.put(gdxKeyCode, millisToSeconds(repeatRateMillis)); } /** * Set the key repeat start delay for the specified NON-PRINTABLE key. Only keys whose character would return true * for {@link Character#isISOControl(char)} are allowed. The reason why is that LibGDX already repeats printable * characters for you automatically. * * @param gdxKeyCode The {@link com.badlogic.gdx.Input.Keys} keyCode of the key to set the repeat start * delay of. Must be a NON-PRINTABLE key (See above). * @param repeatStartDelayMillis The repeat start delay in milliseconds, must be non-negative. */ public void setKeyRepeatStartDelay(final int gdxKeyCode, final int repeatStartDelayMillis) { checkKeyCode(gdxKeyCode); checkRepeatRateStartDelay(gdxKeyCode); keyRepeatStartDelays.put(gdxKeyCode, millisToSeconds(repeatStartDelayMillis)); } /** * Process any key repeats. If a repeating key is being held down, this method will ensure that it gets repeated. If * a repeating key is released, this method will ensure that it stops. It should be called once per frame, i.e., in * {@link com.badlogic.gdx.ApplicationListener#render()} */ public void update() { for (currentRepeatingKeyIndex = 0; currentRepeatingKeyIndex < repeatingKeys.size; ++currentRepeatingKeyIndex) { updateCurrentRepeatingKey(); updateCurrentRepeatingKeyTask(); updateCurrentRepeatingKeySchedule(); } } // internal implementations private void updateCurrentRepeatingKey() { currentRepeatingGdxKeyCode = repeatingKeys.get(currentRepeatingKeyIndex); } private void updateCurrentRepeatingKeyTask() { currentRepeatingKeyTask = keyRepeatTasks.get(currentRepeatingGdxKeyCode); } private void updateCurrentRepeatingKeySchedule() { if (input.isKeyPressed(currentRepeatingGdxKeyCode)) { scheduleCurrentRepeatingKeyTask(); } else { cancelCurrentRepeatingKeyTask(); } } private void scheduleCurrentRepeatingKeyTask() { if (currentRepeatingKeyTask != null && !currentRepeatingKeyTask.isScheduled()) { keyRepeatTimer.scheduleTask( currentRepeatingKeyTask, keyRepeatStartDelays.get(currentRepeatingGdxKeyCode, DEFAULT_KEY_REPEAT_START_DELAY_SECONDS), keyRepeatRates.get(currentRepeatingGdxKeyCode, DEFAULT_KEY_REPEAT_RATE_SECONDS)); } } private void cancelCurrentRepeatingKeyTask() { if (currentRepeatingKeyTask != null && currentRepeatingKeyTask.isScheduled()) { currentRepeatingKeyTask.cancel(); } } private void registerKeyRepeatTask(final int gdxKeyCode) { if (!keyRepeatTasks.containsKey(gdxKeyCode)) { keyRepeatTasks.put(gdxKeyCode, new Timer.Task() { @Override public void run() { if (input.isKeyPressed(gdxKeyCode)) { gdxInputSystem.keyDown(gdxKeyCode); } } }); } } private void checkKeyCode(final int gdxKeyCode) throws GdxRuntimeException { char temp; switch (gdxKeyCode) { case Input.Keys.NUM_0: case Input.Keys.NUMPAD_0: { temp = '0'; break; } case Input.Keys.NUM_1: case Input.Keys.NUMPAD_1: { temp = '1'; break; } case Input.Keys.NUM_2: case Input.Keys.NUMPAD_2: { temp = '2'; break; } case Input.Keys.NUM_3: case Input.Keys.NUMPAD_3: { temp = '3'; break; } case Input.Keys.NUM_4: case Input.Keys.NUMPAD_4: { temp = '4'; break; } case Input.Keys.NUM_5: case Input.Keys.NUMPAD_5: { temp = '5'; break; } case Input.Keys.NUM_6: case Input.Keys.NUMPAD_6: { temp = '6'; break; } case Input.Keys.NUM_7: case Input.Keys.NUMPAD_7: { temp = '7'; break; } case Input.Keys.NUM_8: case Input.Keys.NUMPAD_8: { temp = '8'; break; } case Input.Keys.NUM_9: case Input.Keys.NUMPAD_9: { temp = '9'; break; } case Input.Keys.A: { temp = 'A'; break; } case Input.Keys.APOSTROPHE: { temp = '\''; break; } case Input.Keys.AT: { temp = '@'; break; } case Input.Keys.B: { temp = 'B'; break; } case Input.Keys.BACKSLASH: { temp = '\\'; break; } case Input.Keys.C: { temp = 'C'; break; } case Input.Keys.COMMA: { temp = ','; break; } case Input.Keys.D: { temp = 'D'; break; } case Input.Keys.E: { temp = 'E'; break; } case Input.Keys.EQUALS: { temp = '='; break; } case Input.Keys.F: { temp = 'F'; break; } case Input.Keys.G: { temp = 'G'; break; } case Input.Keys.GRAVE: { temp = '`'; break; } case Input.Keys.H: { temp = 'H'; break; } case Input.Keys.I: { temp = 'I'; break; } case Input.Keys.J: { temp = 'J'; break; } case Input.Keys.K: { temp = 'K'; break; } case Input.Keys.L: { temp = 'L'; break; } case Input.Keys.LEFT_BRACKET: { temp = '['; break; } case Input.Keys.M: { temp = 'M'; break; } case Input.Keys.MINUS: { temp = '-'; break; } case Input.Keys.N: { temp = 'N'; break; } case Input.Keys.O: { temp = 'O'; break; } case Input.Keys.P: { temp = 'P'; break; } case Input.Keys.PERIOD: { temp = '.'; break; } case Input.Keys.PLUS: { temp = '+'; break; } case Input.Keys.POUND: { temp = '#'; break; } case Input.Keys.Q: { temp = 'Q'; break; } case Input.Keys.R: { temp = 'R'; break; } case Input.Keys.RIGHT_BRACKET: { temp = ']'; break; } case Input.Keys.S: { temp = 'S'; break; } case Input.Keys.SEMICOLON: { temp = ';'; break; } case Input.Keys.SLASH: { temp = '/'; break; } case Input.Keys.SPACE: { temp = ' '; break; } case Input.Keys.STAR: { temp = '*'; break; } case Input.Keys.T: { temp = 'T'; break; } case Input.Keys.U: { temp = 'U'; break; } case Input.Keys.V: { temp = 'V'; break; } case Input.Keys.W: { temp = 'W'; break; } case Input.Keys.X: { temp = 'X'; break; } case Input.Keys.Y: { temp = 'Y'; break; } case Input.Keys.Z: { temp = 'Z'; break; } case Input.Keys.ANY_KEY: { throw new GdxRuntimeException("Only key codes whose character would return true for " + "Character.isISOControl() are allowed.\nThe reason why is that LibGDX already repeats printable " + "characters for you automatically.\nOffending key code: " + gdxKeyCode + ", which represents the " + "special LibGDX constant: com.badlogic.gdx.Input.Keys.ANY_KEY."); } default: { return; } } throw new GdxRuntimeException("Only key codes whose character would return true for " + "Character.isISOControl() are allowed.\nThe reason why is that LibGDX already repeats printable " + "characters for you automatically.\nOffending key code: " + gdxKeyCode + ", which represents the " + "printable character: " + temp); } private static void checkRepeatRate(final int repeatRateMillis) throws GdxRuntimeException { if (repeatRateMillis <= 0) { throw new GdxRuntimeException("Key repeat rate must be greater than 0 milliseconds."); } } private static void checkRepeatRateStartDelay(final int repeatStartDelayMillis) throws GdxRuntimeException { if (repeatStartDelayMillis < 0) { throw new GdxRuntimeException("Key repeat start delay cannot be less than 0 milliseconds."); } } private static float millisToSeconds(int milliseconds) { return milliseconds / 1000.0f; } private static int secondsToMillis(float seconds) { return (int) (seconds * 1000); } }
package org.zaproxy.zap.extension.cmss; import java.io.BufferedReader; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; import java.nio.channels.Channels; import java.nio.channels.ReadableByteChannel; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.jdom.Document; public class PlunginGuesser { /** * ****** this is will be modular , each app must have its own plugin */ // I will start with specific wabaaps, CMSs first // and joomla first public static void joomlaComponentLister(URL url, String componentType){ try{ InputStream flux = null; if(componentType.compareTo("plugin")==0){ flux=new FileInputStream("resources/pluginEnum/joomla_plugins.txt"); } if(componentType.compareTo("theme")==0){ flux=new FileInputStream("resources/pluginEnum/joomla_themes.txt"); } InputStreamReader lecture=new InputStreamReader(flux); BufferedReader buff=new BufferedReader(lecture); String line; while ((line=buff.readLine())!=null){ //System.out.println(line); URL completeUrl = new URL((url.toString()+line).replaceAll(" ", "")); //System.out.println(completeUrl.toString()); HttpURLConnection con = (HttpURLConnection) completeUrl.openConnection(); con.setRequestMethod("HEAD"); //System.out.println(con.getResponseCode()); if(con.getResponseCode() == HttpURLConnection.HTTP_OK){ //System.out.println(completeUrl.toString()); //System.out.println(con.getResponseCode()); System.out.println(componentType+" : "+line+" exists!!"); URL rdm = new URL(completeUrl.toString()+"readme.txt"); //System.out.println(rdm.toString()); HttpURLConnection conx = (HttpURLConnection) rdm.openConnection(); conx.setRequestMethod("HEAD"); if(conx.getResponseCode() == HttpURLConnection.HTTP_OK){ System.out.println("------------> readme exists !!"); } } } buff.close(); } catch (Exception e){ System.out.println(e.toString()); } } public static void prepareJoomlaPluginDB(){ //pending } // using fuzzdb from googlecode.com // j'ai remarque que la base de wp scan est plus riche avec une difference de format // celle de fuzz contient le chemin du plugin a partir de l'url : /component/nom_plugin .. // celle de wp scan contirnt que le nom // donc il faut combiner combiner (-_-) // I noted that wpscan files contain better list of plugins // so TODO: decide if we continue to use fuzzdb or use wpscan files or combine public static void getJoomlaPluginDB() throws IOException{ URL website = new URL("https://fuzzdb.googlecode.com/svn/trunk/Discovery/PredictableRes/CMS/joomla_plugins.fuzz.txt"); ReadableByteChannel rbc = Channels.newChannel(website.openStream()); @SuppressWarnings("resource") FileOutputStream fos = new FileOutputStream("resources/pluginEnum/joomla_plugins.txt"); fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); } public static void getJoomlaThemeDB() throws IOException{ URL website = new URL("https://fuzzdb.googlecode.com/svn/trunk/Discovery/PredictableRes/CMS/joomla_themes.fuzz.txt"); ReadableByteChannel rbc = Channels.newChannel(website.openStream()); @SuppressWarnings("resource") FileOutputStream fos = new FileOutputStream("resources/pluginEnum/joomla_themes.txt"); fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); } //wordpress part public static void wordpressComponentLister(URL url, String componentType){ try{ InputStream flux = null; if(componentType.compareTo("plugin")==0){ flux=new FileInputStream("resources/pluginEnum/wp_plugins.txt"); } if(componentType.compareTo("theme")==0){ flux=new FileInputStream("resources/pluginEnum/wp_themes.txt"); } InputStreamReader lecture=new InputStreamReader(flux); BufferedReader buff=new BufferedReader(lecture); String line; while ((line=buff.readLine())!=null){ //System.out.println(line); URL completeUrl = new URL((url.toString()+line).replaceAll(" ", "")); //System.out.println(completeUrl.toString()); HttpURLConnection con = (HttpURLConnection) completeUrl.openConnection(); con.setRequestMethod("HEAD"); //System.out.println(con.getResponseCode()); if(con.getResponseCode() == HttpURLConnection.HTTP_OK){ //System.out.println(completeUrl.toString()); //System.out.println(con.getResponseCode()); System.out.println(componentType+" : "+line+" exists!!"); URL rdm = new URL(completeUrl.toString()+"readme.txt"); //System.out.println(rdm.toString()); HttpURLConnection conx = (HttpURLConnection) rdm.openConnection(); conx.setRequestMethod("HEAD"); if(conx.getResponseCode() == HttpURLConnection.HTTP_OK){ System.out.println("------------> readme exists !!"); Document doc = (Document) conx.getContent(); Pattern p = Pattern.compile("Stable tag: (.+)"); Matcher m = p.matcher(doc.toString()); while(m.find()){ System.out.println(" version : "+m.group(0)); } } } } buff.close(); } catch (Exception e){ System.out.println(e.toString()); } } public static void getWordpressPluginDB() throws IOException{ URL website = new URL("https://fuzzdb.googlecode.com/svn/trunk/Discovery/PredictableRes/CMS/wp_plugins.fuzz.txt"); ReadableByteChannel rbc = Channels.newChannel(website.openStream()); @SuppressWarnings("resource") FileOutputStream fos = new FileOutputStream("resources/pluginEnum/wp_plugins.txt"); fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); } public static void getWordpressThemeDB() throws IOException{ URL website = new URL("https://fuzzdb.googlecode.com/svn/trunk/Discovery/PredictableRes/CMS/wp_themes.fuzz.txt"); ReadableByteChannel rbc = Channels.newChannel(website.openStream()); @SuppressWarnings("resource") FileOutputStream fos = new FileOutputStream("resources/pluginEnum/wp_themes.txt"); fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); } // drupal part public static void drupalComponentLister(URL url, String componentType){ try{ InputStream flux = null; if(componentType.compareTo("plugin")==0){ flux=new FileInputStream("resources/pluginEnum/drupal_plugins.txt"); } if(componentType.compareTo("theme")==0){ flux=new FileInputStream("resources/pluginEnum/drupal_themes.txt"); } InputStreamReader lecture=new InputStreamReader(flux); BufferedReader buff=new BufferedReader(lecture); String line; while ((line=buff.readLine())!=null){ //System.out.println(line); URL completeUrl = new URL((url.toString()+line).replaceAll(" ", "")); //System.out.println(completeUrl.toString()); HttpURLConnection con = (HttpURLConnection) completeUrl.openConnection(); con.setRequestMethod("HEAD"); //System.out.println(con.getResponseCode()); if(con.getResponseCode() == HttpURLConnection.HTTP_OK){ //System.out.println(completeUrl.toString()); //System.out.println(con.getResponseCode()); System.out.println(componentType+" : "+line+" exists!!"); URL rdm = new URL(completeUrl.toString()+"readme.txt"); //System.out.println(rdm.toString()); HttpURLConnection conx = (HttpURLConnection) rdm.openConnection(); conx.setRequestMethod("HEAD"); if(conx.getResponseCode() == HttpURLConnection.HTTP_OK){ System.out.println("------------> readme exists !!"); } } } buff.close(); } catch (Exception e){ System.out.println(e.toString()); } } public static void getDrupalPluginDB() throws IOException{ URL website = new URL("https://fuzzdb.googlecode.com/svn/trunk/Discovery/PredictableRes/CMS/drupal_plugins.fuzz.txt"); ReadableByteChannel rbc = Channels.newChannel(website.openStream()); @SuppressWarnings("resource") FileOutputStream fos = new FileOutputStream("resources/pluginEnum/drupal_plugins.txt"); fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); } public static void getDrupalThemeDB() throws IOException{ URL website = new URL("https://fuzzdb.googlecode.com/svn/trunk/Discovery/PredictableRes/CMS/drupal_themes.fuzz.txt"); ReadableByteChannel rbc = Channels.newChannel(website.openStream()); @SuppressWarnings("resource") FileOutputStream fos = new FileOutputStream("resources/pluginEnum/drupal_themes.txt"); fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); } }
/* * Copyright 2019 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import com.google.common.collect.ImmutableSet; import com.google.javascript.jscomp.AccessorSummary.PropertyAccessKind; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.jstype.JSType; import com.google.javascript.rhino.jstype.JSTypeNative; /** * Logic for answering questions about portions of the AST. * * <p><b>What kind of methods should go here?</b> * * <p>Methods that answer questions about some portion of the AST and that may require global * information about the compilation, generally taking at least one {@link Node} as an argument. For * example: * * <ul> * <li>Does a node have side effects? * <li>Can we statically determine the value of a node? * </ul> * * <p><b>What kind of logic should not go here?</b> * * <p>Really simple logic that requires no global information, like finding the parameter list node * of a function, should be in {@link NodeUtil}. Logic that creates new Nodes or modifies the AST * should go in {@link AstFactory}. */ public class AstAnalyzer { /** * The set of builtin constructors that don't have side effects. * * <p>TODO(bradfordcsmith): If all of these are annotated {@code sideefectfree}, can we drop this * list? */ private static final ImmutableSet<String> CONSTRUCTORS_WITHOUT_SIDE_EFFECTS = ImmutableSet.of("Array", "Date", "Error", "Object", "RegExp", "XMLHttpRequest"); // A list of built-in object creation or primitive type cast functions that // can also be called as constructors but lack side-effects. // TODO(johnlenz): consider adding an extern annotation for this. private static final ImmutableSet<String> BUILTIN_FUNCTIONS_WITHOUT_SIDEEFFECTS = ImmutableSet.of("Object", "Array", "String", "Number", "Boolean", "RegExp", "Error"); private static final ImmutableSet<String> OBJECT_METHODS_WITHOUT_SIDEEFFECTS = ImmutableSet.of("toString", "valueOf"); private static final ImmutableSet<String> REGEXP_METHODS = ImmutableSet.of("test", "exec"); private static final ImmutableSet<String> STRING_REGEXP_METHODS = ImmutableSet.of("match", "replace", "search", "split"); private final AbstractCompiler compiler; private final boolean assumeGettersArePure; AstAnalyzer(AbstractCompiler compiler, boolean assumeGettersArePure) { this.compiler = checkNotNull(compiler); this.assumeGettersArePure = assumeGettersArePure; } /** * Returns true if the node may create new mutable state, or change existing state. * * @see <a href="http://www.xkcd.org/326/">XKCD Cartoon</a> */ boolean mayEffectMutableState(Node n) { return checkForStateChangeHelper(n, /* checkForNewObjects= */ true); } /** * Returns true if the node which may have side effects when executed. This version default to the * "safe" assumptions when the compiler object is not provided (RegExp have side-effects, etc). */ public boolean mayHaveSideEffects(Node n) { return checkForStateChangeHelper(n, /* checkForNewObjects= */ false); } /** * Returns true if this function call may have side effects. * * <p>This method is guaranteed to return true all calls that have side-effects, but may also * return true for calls that have none. * * @param callNode - function call node */ boolean functionCallHasSideEffects(Node callNode) { checkState(callNode.isCall() || callNode.isTaggedTemplateLit(), callNode); if (callNode.isNoSideEffectsCall()) { return false; } if (callNode.isOnlyModifiesArgumentsCall() && NodeUtil.allArgsUnescapedLocal(callNode)) { return false; } Node callee = callNode.getFirstChild(); // Built-in functions with no side effects. if (callee.isName()) { String name = callee.getString(); if (BUILTIN_FUNCTIONS_WITHOUT_SIDEEFFECTS.contains(name)) { return false; } } else if (callee.isGetProp()) { if (callNode.hasOneChild() && OBJECT_METHODS_WITHOUT_SIDEEFFECTS.contains(callee.getLastChild().getString())) { return false; } if (callNode.isOnlyModifiesThisCall() && NodeUtil.evaluatesToLocalValue(callee.getFirstChild())) { return false; } // Many common Math functions have no side-effects. // TODO(nicksantos): This is a terrible terrible hack, until // I create a definitionProvider that understands namespacing. if (callee.getFirstChild().isName() && callee.isQualifiedName() && callee.getFirstChild().getString().equals("Math")) { switch (callee.getLastChild().getString()) { case "abs": case "acos": case "acosh": case "asin": case "asinh": case "atan": case "atanh": case "atan2": case "cbrt": case "ceil": case "cos": case "cosh": case "exp": case "expm1": case "floor": case "hypot": case "log": case "log10": case "log1p": case "log2": case "max": case "min": case "pow": case "round": case "sign": case "sin": case "sinh": case "sqrt": case "tan": case "tanh": case "trunc": return false; case "random": return !callNode.hasOneChild(); // no parameters default: // Unknown Math.* function, so fall out of this switch statement. } } if (!compiler.hasRegExpGlobalReferences()) { if (callee.getFirstChild().isRegExp() && REGEXP_METHODS.contains(callee.getLastChild().getString())) { return false; } else if (isTypedAsString(callee.getFirstChild())) { // Unlike regexs, string methods don't need to be hosted on a string literal // to avoid leaking mutating global state changes, it is just necessary that // the regex object can't be referenced. String method = callee.getLastChild().getString(); Node param = callee.getNext(); if (param != null) { if (param.isString()) { if (STRING_REGEXP_METHODS.contains(method)) { return false; } } else if (param.isRegExp()) { if ("replace".equals(method)) { // Assume anything but a string constant has side-effects return !param.getNext().isString(); } else if (STRING_REGEXP_METHODS.contains(method)) { return false; } } } } } } return true; } private boolean isTypedAsString(Node n) { if (n.isString()) { return true; } if (compiler.getOptions().useTypesForLocalOptimization) { JSType type = n.getJSType(); if (type != null) { JSType nativeStringType = compiler.getTypeRegistry().getNativeType(JSTypeNative.STRING_TYPE); if (type.isEquivalentTo(nativeStringType)) { return true; } } } return false; } /** * Returns true if some node in n's subtree changes application state. If {@code * checkForNewObjects} is true, we assume that newly created mutable objects (like object * literals) change state. Otherwise, we assume that they have no side effects. */ private boolean checkForStateChangeHelper(Node n, boolean checkForNewObjects) { Node parent = n.getParent(); // Rather than id which ops may have side effects, id the ones // that we know to be safe switch (n.getToken()) { case THROW: // Throw is a side-effect by definition. case YIELD: case AWAIT: case FOR_AWAIT_OF: // Context switches can conceal side-effects. case FOR_OF: case FOR_IN: // Enhanced for loops are almost always side-effectful; it's not worth checking them // further. Particularly, they represent a kind of assignment op. case VAR: case LET: case CONST: case EXPORT: // Variable declarations are side-effects. return true; case SUPER: // The super keyword is a noop on its own. return false; case OBJECTLIT: case ARRAYLIT: case REGEXP: if (checkForNewObjects) { return true; } break; case OBJECT_REST: case OBJECT_SPREAD: // Object-rest and object-spread may trigger a getter. if (assumeGettersArePure) { break; // We still need to inspect the children. } return true; case ITER_REST: case ITER_SPREAD: if (NodeUtil.iteratesImpureIterable(n)) { return true; } break; case NAME: // TODO(b/129564961): Consider EXPORT declarations. if (n.hasChildren()) { // This is the left side of a var/let/const return true; } break; case FUNCTION: // Function expressions don't have side-effects, but function // declarations change the namespace. Either way, we don't need to // check the children, since they aren't executed at declaration time. return checkForNewObjects || NodeUtil.isFunctionDeclaration(n); case GETTER_DEF: case SETTER_DEF: case MEMBER_FUNCTION_DEF: // simply defining a member function, getter, or setter has no side effects return false; case CLASS: return checkForNewObjects || NodeUtil.isClassDeclaration(n) // Check the extends clause for side effects. || checkForStateChangeHelper(n.getSecondChild(), checkForNewObjects) // Check for class members that are computed properties with side effects. || checkForStateChangeHelper(n.getLastChild(), checkForNewObjects); case CLASS_MEMBERS: for (Node member = n.getFirstChild(); member != null; member = member.getNext()) { if (member.isComputedProp() && checkForStateChangeHelper(member.getFirstChild(), checkForNewObjects)) { return true; } } return false; case NEW: if (checkForNewObjects) { return true; } if (!constructorCallHasSideEffects(n)) { // loop below will see if the constructor parameters have // side-effects break; } return true; case CALL: // calls to functions that have no side effects have the no // side effect property set. if (!functionCallHasSideEffects(n)) { // loop below will see if the function parameters have // side-effects break; } return true; case TAGGED_TEMPLATELIT: // TODO(b/128527671): Inspect the children of the expression for side-effects. return functionCallHasSideEffects(n); case CAST: case AND: case BLOCK: case ROOT: case EXPR_RESULT: case HOOK: case IF: case PARAM_LIST: case DEFAULT_VALUE: // Any context that supports DEFAULT_VALUE is already an assignment. The possiblity of a // default doesn't itself create a side-effect. Therefore, we prefer to defer the decision. case NUMBER: case OR: case THIS: case TRUE: case FALSE: case NULL: case STRING: case SWITCH: case TEMPLATELIT_SUB: case TRY: case EMPTY: case TEMPLATELIT: case TEMPLATELIT_STRING: case COMPUTED_PROP: // Assume that COMPUTED_PROP keys in OBJECT_PATTERN never trigger getters. break; case STRING_KEY: if (parent.isObjectPattern()) { // This STRING_KEY names a property being read from. // Assumption: GETELEM (via a COMPUTED_PROP) never triggers a getter or setter. if (getPropertyKind(n.getString()).hasGetter()) { return true; } } break; case GETELEM: // Since we can't see what property is accessed we cannot tell whether obj[someProp] will // trigger a getter or setter, and thus could have side effects. // We will assume it does not. This introduces some risk of code breakage, but the code // size cost of assuming all GETELEM nodes have side effects is completely unacceptable. break; case GETPROP: if (getPropertyKind(n.getLastChild().getString()).hasGetterOrSetter()) { // TODO(b/135640150): Use the parent nodes to determine whether this is a get or set. return true; } break; default: if (NodeUtil.isSimpleOperator(n)) { break; } if (NodeUtil.isAssignmentOp(n)) { Node assignTarget = n.getFirstChild(); if (assignTarget.isName()) { return true; } // Assignments will have side effects if // a) The RHS has side effects, or // b) The LHS has side effects, or // c) A name on the LHS will exist beyond the life of this statement. if (checkForStateChangeHelper(n.getFirstChild(), checkForNewObjects) || checkForStateChangeHelper(n.getLastChild(), checkForNewObjects)) { return true; } if (NodeUtil.isGet(assignTarget)) { // If the object being assigned to is a local object, don't // consider this a side-effect as it can't be referenced // elsewhere. Don't do this recursively as the property might // be an alias of another object, unlike a literal below. Node current = assignTarget.getFirstChild(); if (NodeUtil.evaluatesToLocalValue(current)) { return false; } // A literal value as defined by "isLiteralValue" is guaranteed // not to be an alias, or any components which are aliases of // other objects. // If the root object is a literal don't consider this a // side-effect. while (NodeUtil.isGet(current)) { current = current.getFirstChild(); } return !NodeUtil.isLiteralValue(current, true); } else { // TODO(johnlenz): remove this code and make this an exception. This // is here only for legacy reasons, the AST is not valid but // preserve existing behavior. return !NodeUtil.isLiteralValue(assignTarget, true); } } return true; } for (Node c = n.getFirstChild(); c != null; c = c.getNext()) { if (checkForStateChangeHelper(c, checkForNewObjects)) { return true; } } return false; } /** * Do calls to this constructor have side effects? * * @param newNode - constructor call node */ boolean constructorCallHasSideEffects(Node newNode) { checkArgument(newNode.isNew(), "Expected NEW node, got %s", newNode.getToken()); if (newNode.isNoSideEffectsCall()) { return false; } // allArgsUnescapedLocal() is actually confirming that all of the arguments are literals or // values created at the point they are passed in to the call and are not saved anywhere in the // calling scope. // TODO(bradfordcsmith): It would be good to rename allArgsUnescapedLocal() to something // that makes this clearer. if (newNode.isOnlyModifiesArgumentsCall() && NodeUtil.allArgsUnescapedLocal(newNode)) { return false; } Node nameNode = newNode.getFirstChild(); return !nameNode.isName() || !CONSTRUCTORS_WITHOUT_SIDE_EFFECTS.contains(nameNode.getString()); } /** * Returns true if the current node's type implies side effects. * * <p>This is a non-recursive version of the may have side effects check; used to check wherever * the current node's type is one of the reasons why a subtree has side effects. */ boolean nodeTypeMayHaveSideEffects(Node n) { checkNotNull(compiler); if (NodeUtil.isAssignmentOp(n)) { return true; } switch (n.getToken()) { case DELPROP: case DEC: case INC: case YIELD: case THROW: case AWAIT: case FOR_IN: // assigns to a loop LHS case FOR_OF: // assigns to a loop LHS, runs an iterator case FOR_AWAIT_OF: // assigns to a loop LHS, runs an iterator, async operations. return true; case CALL: case TAGGED_TEMPLATELIT: return functionCallHasSideEffects(n); case NEW: return constructorCallHasSideEffects(n); case NAME: // A variable definition. // TODO(b/129564961): Consider EXPORT declarations. return n.hasChildren(); case OBJECT_REST: case OBJECT_SPREAD: // Object-rest and object-spread may trigger a getter. return !assumeGettersArePure; case ITER_REST: case ITER_SPREAD: return NodeUtil.iteratesImpureIterable(n); case STRING_KEY: if (n.getParent().isObjectPattern()) { return getPropertyKind(n.getString()).hasGetter(); } break; case GETPROP: return getPropertyKind(n.getLastChild().getString()).hasGetterOrSetter(); default: break; } return false; } private PropertyAccessKind getPropertyKind(String name) { return assumeGettersArePure ? PropertyAccessKind.NORMAL : compiler.getAccessorSummary().getKind(name); } }
/* * Copyright 2013-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.cxx; import com.facebook.buck.cxx.platform.CxxPlatform; import com.facebook.buck.cxx.platform.Linker; import com.facebook.buck.cxx.platform.NativeLinkable; import com.facebook.buck.cxx.platform.NativeLinkableInput; import com.facebook.buck.cxx.platform.SharedLibraryInterfaceFactory; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.log.Logger; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.Flavor; import com.facebook.buck.model.FlavorConvertible; import com.facebook.buck.model.FlavorDomain; import com.facebook.buck.model.Flavored; import com.facebook.buck.model.InternalFlavor; import com.facebook.buck.parser.NoSuchBuildTargetException; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildRuleParams; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.BuildRuleType; import com.facebook.buck.rules.CellPathResolver; import com.facebook.buck.rules.DefaultSourcePathResolver; import com.facebook.buck.rules.Description; import com.facebook.buck.rules.ImplicitDepsInferringDescription; import com.facebook.buck.rules.ImplicitFlavorsInferringDescription; import com.facebook.buck.rules.MetadataProvidingDescription; import com.facebook.buck.rules.NoopBuildRule; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SourcePathResolver; import com.facebook.buck.rules.SourcePathRuleFinder; import com.facebook.buck.rules.SymlinkTree; import com.facebook.buck.rules.TargetGraph; import com.facebook.buck.rules.args.Arg; import com.facebook.buck.rules.args.SourcePathArg; import com.facebook.buck.rules.coercer.FrameworkPath; import com.facebook.buck.rules.coercer.PatternMatchedCollection; import com.facebook.buck.rules.coercer.SourceList; import com.facebook.buck.rules.macros.StringWithMacros; import com.facebook.buck.util.HumanReadableException; import com.facebook.buck.util.RichStream; import com.facebook.buck.util.immutables.BuckStyleImmutable; import com.facebook.buck.versions.Version; import com.facebook.buck.versions.VersionPropagator; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Multimaps; import com.google.common.collect.Sets; import java.nio.file.Path; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.SortedSet; import java.util.regex.Pattern; import java.util.stream.Stream; import org.immutables.value.Value; public class CxxLibraryDescription implements Description<CxxLibraryDescriptionArg>, ImplicitDepsInferringDescription<CxxLibraryDescription.CommonArg>, ImplicitFlavorsInferringDescription, Flavored, MetadataProvidingDescription<CxxLibraryDescriptionArg>, VersionPropagator<CxxLibraryDescriptionArg> { private static final Logger LOG = Logger.get(CxxLibraryDescription.class); public enum Type implements FlavorConvertible { HEADERS(CxxDescriptionEnhancer.HEADER_SYMLINK_TREE_FLAVOR), EXPORTED_HEADERS(CxxDescriptionEnhancer.EXPORTED_HEADER_SYMLINK_TREE_FLAVOR), SANDBOX_TREE(CxxDescriptionEnhancer.SANDBOX_TREE_FLAVOR), SHARED(CxxDescriptionEnhancer.SHARED_FLAVOR), SHARED_INTERFACE(InternalFlavor.of("shared-interface")), STATIC_PIC(CxxDescriptionEnhancer.STATIC_PIC_FLAVOR), STATIC(CxxDescriptionEnhancer.STATIC_FLAVOR), MACH_O_BUNDLE(CxxDescriptionEnhancer.MACH_O_BUNDLE_FLAVOR), ; private final Flavor flavor; Type(Flavor flavor) { this.flavor = flavor; } @Override public Flavor getFlavor() { return flavor; } } private static final FlavorDomain<Type> LIBRARY_TYPE = FlavorDomain.from("C/C++ Library Type", Type.class); public enum MetadataType implements FlavorConvertible { CXX_HEADERS(InternalFlavor.of("header-symlink-tree")), CXX_PREPROCESSOR_INPUT(InternalFlavor.of("cxx-preprocessor-input")), ; private final Flavor flavor; MetadataType(Flavor flavor) { this.flavor = flavor; } @Override public Flavor getFlavor() { return flavor; } } public static final FlavorDomain<MetadataType> METADATA_TYPE = FlavorDomain.from("C/C++ Metadata Type", MetadataType.class); private static final FlavorDomain<HeaderVisibility> HEADER_VISIBILITY = FlavorDomain.from("C/C++ Header Visibility", HeaderVisibility.class); private static final FlavorDomain<CxxPreprocessables.HeaderMode> HEADER_MODE = FlavorDomain.from("C/C++ Header Mode", CxxPreprocessables.HeaderMode.class); private final CxxBuckConfig cxxBuckConfig; private final CxxPlatform defaultCxxPlatform; private final InferBuckConfig inferBuckConfig; private final FlavorDomain<CxxPlatform> cxxPlatforms; public CxxLibraryDescription( CxxBuckConfig cxxBuckConfig, CxxPlatform defaultCxxPlatform, InferBuckConfig inferBuckConfig, FlavorDomain<CxxPlatform> cxxPlatforms) { this.cxxBuckConfig = cxxBuckConfig; this.defaultCxxPlatform = defaultCxxPlatform; this.inferBuckConfig = inferBuckConfig; this.cxxPlatforms = cxxPlatforms; } @Override public Optional<ImmutableSet<FlavorDomain<?>>> flavorDomains() { return Optional.of( ImmutableSet.of( // Missing: CXX Compilation Database // Missing: CXX Description Enhancer // Missing: CXX Infer Enhancer cxxPlatforms, LinkerMapMode.FLAVOR_DOMAIN, StripStyle.FLAVOR_DOMAIN)); } @Override public boolean hasFlavors(ImmutableSet<Flavor> flavors) { return cxxPlatforms.containsAnyOf(flavors) || flavors.contains(CxxCompilationDatabase.COMPILATION_DATABASE) || flavors.contains(CxxCompilationDatabase.UBER_COMPILATION_DATABASE) || CxxInferEnhancer.INFER_FLAVOR_DOMAIN.containsAnyOf(flavors) || flavors.contains(CxxInferEnhancer.InferFlavors.INFER_ANALYZE.getFlavor()) || flavors.contains(CxxInferEnhancer.InferFlavors.INFER_CAPTURE_ALL.getFlavor()) || flavors.contains(CxxDescriptionEnhancer.EXPORTED_HEADER_SYMLINK_TREE_FLAVOR) || LinkerMapMode.FLAVOR_DOMAIN.containsAnyOf(flavors); } /** * This function is broken out so that CxxInferEnhancer can get a list of dependencies for * building the library. */ static ImmutableList<CxxPreprocessorInput> getPreprocessorInputsForBuildingLibrarySources( BuildRuleResolver ruleResolver, CellPathResolver cellRoots, BuildTarget target, CommonArg args, CxxPlatform cxxPlatform, ImmutableSet<BuildRule> deps, TransitiveCxxPreprocessorInputFunction transitivePreprocessorInputs, HeaderSymlinkTree headerSymlinkTree, Optional<SymlinkTree> sandboxTree) throws NoSuchBuildTargetException { return CxxDescriptionEnhancer.collectCxxPreprocessorInput( target, cxxPlatform, deps, ImmutableListMultimap.copyOf( Multimaps.transformValues( CxxFlags.getLanguageFlagsWithMacros( args.getPreprocessorFlags(), args.getPlatformPreprocessorFlags(), args.getLangPreprocessorFlags(), cxxPlatform), f -> CxxDescriptionEnhancer.toStringWithMacrosArgs( target, cellRoots, ruleResolver, cxxPlatform, f))), ImmutableList.of(headerSymlinkTree), ImmutableSet.of(), RichStream.from( transitivePreprocessorInputs.apply( target, ruleResolver, cxxPlatform, deps, // Also add private deps if we are _not_ reexporting all deps. args.isReexportAllHeaderDependencies() ? CxxDeps.EMPTY : args.getPrivateCxxDeps())) .toOnceIterable(), args.getIncludeDirs(), sandboxTree); } private static ImmutableMap<CxxPreprocessAndCompile, SourcePath> requireObjects( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleResolver ruleResolver, SourcePathResolver sourcePathResolver, SourcePathRuleFinder ruleFinder, CellPathResolver cellRoots, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, CxxSourceRuleFactory.PicType pic, CxxLibraryDescriptionArg args, ImmutableSet<BuildRule> deps, TransitiveCxxPreprocessorInputFunction transitivePreprocessorInputs) throws NoSuchBuildTargetException { boolean shouldCreatePrivateHeadersSymlinks = args.getXcodePrivateHeadersSymlinks() .orElse(cxxPlatform.getPrivateHeadersSymlinksEnabled()); HeaderSymlinkTree headerSymlinkTree = CxxDescriptionEnhancer.requireHeaderSymlinkTree( buildTarget, projectFilesystem, ruleResolver, cxxPlatform, CxxDescriptionEnhancer.parseHeaders( buildTarget, ruleResolver, ruleFinder, sourcePathResolver, Optional.of(cxxPlatform), args), HeaderVisibility.PRIVATE, shouldCreatePrivateHeadersSymlinks); Optional<SymlinkTree> sandboxTree = Optional.empty(); if (cxxBuckConfig.sandboxSources()) { sandboxTree = CxxDescriptionEnhancer.createSandboxTree(buildTarget, ruleResolver, cxxPlatform); } // Create rule to build the object files. ImmutableMultimap<CxxSource.Type, Arg> compilerFlags = ImmutableListMultimap.copyOf( Multimaps.transformValues( CxxFlags.getLanguageFlagsWithMacros( args.getCompilerFlags(), args.getPlatformCompilerFlags(), args.getLangCompilerFlags(), cxxPlatform), f -> CxxDescriptionEnhancer.toStringWithMacrosArgs( buildTarget, cellRoots, ruleResolver, cxxPlatform, f))); return CxxSourceRuleFactory.of( projectFilesystem, buildTarget, ruleResolver, sourcePathResolver, ruleFinder, cxxBuckConfig, cxxPlatform, getPreprocessorInputsForBuildingLibrarySources( ruleResolver, cellRoots, buildTarget, args, cxxPlatform, deps, transitivePreprocessorInputs, headerSymlinkTree, sandboxTree), compilerFlags, args.getPrefixHeader(), args.getPrecompiledHeader(), pic, sandboxTree) .requirePreprocessAndCompileRules( CxxDescriptionEnhancer.parseCxxSources( buildTarget, ruleResolver, ruleFinder, sourcePathResolver, cxxPlatform, args)); } private static NativeLinkableInput getSharedLibraryNativeLinkTargetInput( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleResolver ruleResolver, SourcePathResolver pathResolver, SourcePathRuleFinder ruleFinder, CellPathResolver cellRoots, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, CxxLibraryDescriptionArg arg, ImmutableSet<BuildRule> deps, ImmutableList<StringWithMacros> linkerFlags, ImmutableList<StringWithMacros> exportedLinkerFlags, ImmutableSet<FrameworkPath> frameworks, ImmutableSet<FrameworkPath> libraries, TransitiveCxxPreprocessorInputFunction transitiveCxxPreprocessorInputFunction) throws NoSuchBuildTargetException { // Create rules for compiling the PIC object files. ImmutableMap<CxxPreprocessAndCompile, SourcePath> objects = requireObjects( buildTarget, projectFilesystem, ruleResolver, pathResolver, ruleFinder, cellRoots, cxxBuckConfig, cxxPlatform, CxxSourceRuleFactory.PicType.PIC, arg, deps, transitiveCxxPreprocessorInputFunction); return NativeLinkableInput.builder() .addAllArgs( RichStream.<StringWithMacros>empty() .concat(linkerFlags.stream()) .concat(exportedLinkerFlags.stream()) .map( f -> CxxDescriptionEnhancer.toStringWithMacrosArgs( buildTarget, cellRoots, ruleResolver, cxxPlatform, f)) .toImmutableList()) .addAllArgs(SourcePathArg.from(objects.values())) .setFrameworks(frameworks) .setLibraries(libraries) .build(); } /** * Create all build rules needed to generate the shared library. * * @return the {@link CxxLink} rule representing the actual shared library. */ private static CxxLink createSharedLibrary( BuildTarget buildTargetMaybeWithLinkerMapMode, ProjectFilesystem projectFilesystem, BuildRuleResolver ruleResolver, SourcePathResolver pathResolver, SourcePathRuleFinder ruleFinder, CellPathResolver cellRoots, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, CxxLibraryDescriptionArg args, ImmutableSet<BuildRule> deps, ImmutableList<StringWithMacros> linkerFlags, ImmutableSet<FrameworkPath> frameworks, ImmutableSet<FrameworkPath> libraries, Optional<String> soname, Optional<Linker.CxxRuntimeType> cxxRuntimeType, Linker.LinkType linkType, Linker.LinkableDepType linkableDepType, Optional<SourcePath> bundleLoader, ImmutableSet<BuildTarget> blacklist, TransitiveCxxPreprocessorInputFunction transitiveCxxPreprocessorInputFunction) throws NoSuchBuildTargetException { BuildTarget buildTargetWithoutLinkerMapMode = LinkerMapMode.removeLinkerMapModeFlavorInTarget( buildTargetMaybeWithLinkerMapMode, LinkerMapMode.FLAVOR_DOMAIN.getValue(buildTargetMaybeWithLinkerMapMode)); // Create rules for compiling the PIC object files. ImmutableMap<CxxPreprocessAndCompile, SourcePath> objects = requireObjects( buildTargetWithoutLinkerMapMode, projectFilesystem, ruleResolver, pathResolver, ruleFinder, cellRoots, cxxBuckConfig, cxxPlatform, CxxSourceRuleFactory.PicType.PIC, args, deps, transitiveCxxPreprocessorInputFunction); // Setup the rules to link the shared library. BuildTarget sharedTarget = CxxDescriptionEnhancer.createSharedLibraryBuildTarget( buildTargetMaybeWithLinkerMapMode, cxxPlatform.getFlavor(), linkType); String sharedLibrarySoname = CxxDescriptionEnhancer.getSharedLibrarySoname( soname, buildTargetMaybeWithLinkerMapMode, cxxPlatform); Path sharedLibraryPath = CxxDescriptionEnhancer.getSharedLibraryPath( projectFilesystem, sharedTarget, sharedLibrarySoname); ImmutableList.Builder<StringWithMacros> extraLdFlagsBuilder = ImmutableList.builder(); extraLdFlagsBuilder.addAll(linkerFlags); ImmutableList<StringWithMacros> extraLdFlags = extraLdFlagsBuilder.build(); return CxxLinkableEnhancer.createCxxLinkableBuildRule( cxxBuckConfig, cxxPlatform, projectFilesystem, ruleResolver, pathResolver, ruleFinder, sharedTarget, linkType, Optional.of(sharedLibrarySoname), sharedLibraryPath, linkableDepType, args.getThinLto(), RichStream.from(deps).filter(NativeLinkable.class).toImmutableList(), cxxRuntimeType, bundleLoader, blacklist, ImmutableSet.of(), NativeLinkableInput.builder() .addAllArgs( RichStream.from(extraLdFlags) .map( f -> CxxDescriptionEnhancer.toStringWithMacrosArgs( buildTargetMaybeWithLinkerMapMode, cellRoots, ruleResolver, cxxPlatform, f)) .toImmutableList()) .addAllArgs(SourcePathArg.from(objects.values())) .setFrameworks(frameworks) .setLibraries(libraries) .build(), Optional.empty()); } @Override public Class<CxxLibraryDescriptionArg> getConstructorArgType() { return CxxLibraryDescriptionArg.class; } /** @return a {@link HeaderSymlinkTree} for the headers of this C/C++ library. */ private HeaderSymlinkTree createHeaderSymlinkTreeBuildRule( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleResolver resolver, CxxPlatform cxxPlatform, CxxLibraryDescriptionArg args) throws NoSuchBuildTargetException { boolean shouldCreatePrivateHeaderSymlinks = args.getXcodePrivateHeadersSymlinks() .orElse(cxxPlatform.getPrivateHeadersSymlinksEnabled()); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver); SourcePathResolver pathResolver = DefaultSourcePathResolver.from(ruleFinder); return CxxDescriptionEnhancer.createHeaderSymlinkTree( buildTarget, projectFilesystem, resolver, cxxPlatform, CxxDescriptionEnhancer.parseHeaders( buildTarget, resolver, ruleFinder, pathResolver, Optional.of(cxxPlatform), args), HeaderVisibility.PRIVATE, shouldCreatePrivateHeaderSymlinks); } /** @return a {@link HeaderSymlinkTree} for the exported headers of this C/C++ library. */ private HeaderSymlinkTree createExportedHeaderSymlinkTreeBuildRule( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleResolver resolver, CxxPreprocessables.HeaderMode mode, CxxLibraryDescriptionArg args) throws NoSuchBuildTargetException { SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver); SourcePathResolver pathResolver = DefaultSourcePathResolver.from(ruleFinder); return CxxDescriptionEnhancer.createHeaderSymlinkTree( buildTarget, projectFilesystem, mode, CxxDescriptionEnhancer.parseExportedHeaders( buildTarget, resolver, ruleFinder, pathResolver, Optional.empty(), args), HeaderVisibility.PUBLIC); } /** @return a {@link HeaderSymlinkTree} for the exported headers of this C/C++ library. */ private HeaderSymlinkTree createExportedPlatformHeaderSymlinkTreeBuildRule( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleResolver resolver, CxxPlatform cxxPlatform, CxxLibraryDescriptionArg args) throws NoSuchBuildTargetException { boolean shouldCreatePublicHeaderSymlinks = args.getXcodePublicHeadersSymlinks().orElse(cxxPlatform.getPublicHeadersSymlinksEnabled()); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver); SourcePathResolver pathResolver = DefaultSourcePathResolver.from(ruleFinder); return CxxDescriptionEnhancer.createHeaderSymlinkTree( buildTarget, projectFilesystem, resolver, cxxPlatform, CxxDescriptionEnhancer.parseExportedPlatformHeaders( buildTarget, resolver, ruleFinder, pathResolver, cxxPlatform, args), HeaderVisibility.PUBLIC, shouldCreatePublicHeaderSymlinks); } /** * Create all build rules needed to generate the static library. * * @return build rule that builds the static library version of this C/C++ library. */ private static BuildRule createStaticLibraryBuildRule( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleResolver resolver, CellPathResolver cellRoots, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, CxxLibraryDescriptionArg args, ImmutableSet<BuildRule> deps, CxxSourceRuleFactory.PicType pic, TransitiveCxxPreprocessorInputFunction transitiveCxxPreprocessorInputFunction) throws NoSuchBuildTargetException { SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver); SourcePathResolver sourcePathResolver = DefaultSourcePathResolver.from(ruleFinder); // Create rules for compiling the object files. ImmutableMap<CxxPreprocessAndCompile, SourcePath> objects = requireObjects( buildTarget, projectFilesystem, resolver, sourcePathResolver, ruleFinder, cellRoots, cxxBuckConfig, cxxPlatform, pic, args, deps, transitiveCxxPreprocessorInputFunction); // Write a build rule to create the archive for this C/C++ library. BuildTarget staticTarget = CxxDescriptionEnhancer.createStaticLibraryBuildTarget( buildTarget, cxxPlatform.getFlavor(), pic); if (objects.isEmpty()) { return new NoopBuildRule(staticTarget, projectFilesystem) { @Override public SortedSet<BuildRule> getBuildDeps() { return ImmutableSortedSet.of(); } }; } Path staticLibraryPath = CxxDescriptionEnhancer.getStaticLibraryPath( projectFilesystem, buildTarget, cxxPlatform.getFlavor(), pic, cxxPlatform.getStaticLibraryExtension()); return Archive.from( staticTarget, projectFilesystem, ruleFinder, cxxPlatform, cxxBuckConfig.getArchiveContents(), staticLibraryPath, ImmutableList.copyOf(objects.values()), /* cacheable */ true); } /** @return a {@link CxxLink} rule which builds a shared library version of this C/C++ library. */ private static CxxLink createSharedLibraryBuildRule( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleResolver resolver, CellPathResolver cellRoots, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, CxxLibraryDescriptionArg args, ImmutableSet<BuildRule> deps, Linker.LinkType linkType, Linker.LinkableDepType linkableDepType, Optional<SourcePath> bundleLoader, ImmutableSet<BuildTarget> blacklist, TransitiveCxxPreprocessorInputFunction transitiveCxxPreprocessorInputFunction) throws NoSuchBuildTargetException { ImmutableList.Builder<StringWithMacros> linkerFlags = ImmutableList.builder(); linkerFlags.addAll( CxxFlags.getFlagsWithMacrosWithPlatformMacroExpansion( args.getLinkerFlags(), args.getPlatformLinkerFlags(), cxxPlatform)); linkerFlags.addAll( CxxFlags.getFlagsWithMacrosWithPlatformMacroExpansion( args.getExportedLinkerFlags(), args.getExportedPlatformLinkerFlags(), cxxPlatform)); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver); SourcePathResolver sourcePathResolver = DefaultSourcePathResolver.from(ruleFinder); return createSharedLibrary( buildTarget, projectFilesystem, resolver, sourcePathResolver, ruleFinder, cellRoots, cxxBuckConfig, cxxPlatform, args, deps, linkerFlags.build(), args.getFrameworks(), args.getLibraries(), args.getSoname(), args.getCxxRuntimeType(), linkType, linkableDepType, bundleLoader, blacklist, transitiveCxxPreprocessorInputFunction); } private static BuildRule createSharedLibraryInterface( BuildTarget baseTarget, ProjectFilesystem projectFilesystem, BuildRuleResolver resolver, CxxPlatform cxxPlatform) throws NoSuchBuildTargetException { Optional<SharedLibraryInterfaceFactory> factory = cxxPlatform.getSharedLibraryInterfaceFactory(); if (!factory.isPresent()) { throw new HumanReadableException( "%s: C/C++ platform %s does not support shared library interfaces", baseTarget, cxxPlatform.getFlavor()); } CxxLink sharedLibrary = (CxxLink) resolver.requireRule( baseTarget.withAppendedFlavors(cxxPlatform.getFlavor(), Type.SHARED.getFlavor())); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver); SourcePathResolver pathResolver = DefaultSourcePathResolver.from(ruleFinder); return factory .get() .createSharedInterfaceLibrary( baseTarget.withAppendedFlavors( Type.SHARED_INTERFACE.getFlavor(), cxxPlatform.getFlavor()), projectFilesystem, resolver, pathResolver, ruleFinder, sharedLibrary.getSourcePathToOutput()); } @Override public BuildRule createBuildRule( TargetGraph targetGraph, BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleParams params, BuildRuleResolver resolver, CellPathResolver cellRoots, CxxLibraryDescriptionArg args) throws NoSuchBuildTargetException { return createBuildRule( buildTarget, projectFilesystem, params, resolver, cellRoots, args, args.getLinkStyle(), Optional.empty(), ImmutableSet.of(), ImmutableSortedSet.of(), TransitiveCxxPreprocessorInputFunction.fromLibraryRule()); } public BuildRule createBuildRule( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleParams metadataRuleParams, final BuildRuleResolver resolver, CellPathResolver cellRoots, final CxxLibraryDescriptionArg args, Optional<Linker.LinkableDepType> linkableDepType, final Optional<SourcePath> bundleLoader, ImmutableSet<BuildTarget> blacklist, ImmutableSortedSet<BuildTarget> extraDeps, TransitiveCxxPreprocessorInputFunction transitiveCxxPreprocessorInputFunction) throws NoSuchBuildTargetException { // See if we're building a particular "type" and "platform" of this library, and if so, extract // them from the flavors attached to the build target. Optional<Map.Entry<Flavor, Type>> type = getLibType(buildTarget); Optional<CxxPlatform> platform = cxxPlatforms.getValue(buildTarget); CxxDeps cxxDeps = CxxDeps.builder().addDeps(args.getCxxDeps()).addDeps(extraDeps).build(); if (buildTarget.getFlavors().contains(CxxCompilationDatabase.COMPILATION_DATABASE)) { // XXX: This needs bundleLoader for tests.. CxxPlatform cxxPlatform = platform.orElse(defaultCxxPlatform); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver); SourcePathResolver sourcePathResolver = DefaultSourcePathResolver.from(ruleFinder); ImmutableMap<CxxPreprocessAndCompile, SourcePath> objects = requireObjects( buildTarget.withoutFlavors(CxxCompilationDatabase.COMPILATION_DATABASE), projectFilesystem, resolver, sourcePathResolver, ruleFinder, cellRoots, cxxBuckConfig, cxxPlatform, CxxSourceRuleFactory.PicType.PIC, args, cxxDeps.get(resolver, cxxPlatform), transitiveCxxPreprocessorInputFunction); return CxxCompilationDatabase.createCompilationDatabase( buildTarget, projectFilesystem, objects.keySet()); } else if (buildTarget .getFlavors() .contains(CxxCompilationDatabase.UBER_COMPILATION_DATABASE)) { return CxxDescriptionEnhancer.createUberCompilationDatabase( platform.isPresent() ? buildTarget : buildTarget.withAppendedFlavors(defaultCxxPlatform.getFlavor()), projectFilesystem, resolver); } else if (CxxInferEnhancer.INFER_FLAVOR_DOMAIN.containsAnyOf(buildTarget.getFlavors())) { return CxxInferEnhancer.requireInferRule( buildTarget, projectFilesystem, resolver, cellRoots, cxxBuckConfig, platform.orElse(defaultCxxPlatform), args, inferBuckConfig); } else if (type.isPresent() && !platform.isPresent()) { BuildTarget untypedBuildTarget = getUntypedBuildTarget(buildTarget); switch (type.get().getValue()) { case EXPORTED_HEADERS: Optional<CxxPreprocessables.HeaderMode> mode = HEADER_MODE.getValue(buildTarget); if (mode.isPresent()) { return createExportedHeaderSymlinkTreeBuildRule( untypedBuildTarget, projectFilesystem, resolver, mode.get(), args); } break; // $CASES-OMITTED$ default: } } else if (type.isPresent() && platform.isPresent()) { // If we *are* building a specific type of this lib, call into the type specific // rule builder methods. BuildTarget untypedBuildTarget = getUntypedBuildTarget(buildTarget); switch (type.get().getValue()) { case HEADERS: return createHeaderSymlinkTreeBuildRule( untypedBuildTarget, projectFilesystem, resolver, platform.get(), args); case EXPORTED_HEADERS: return createExportedPlatformHeaderSymlinkTreeBuildRule( untypedBuildTarget, projectFilesystem, resolver, platform.get(), args); case SHARED: return createSharedLibraryBuildRule( untypedBuildTarget, projectFilesystem, resolver, cellRoots, cxxBuckConfig, platform.get(), args, cxxDeps.get(resolver, platform.get()), Linker.LinkType.SHARED, linkableDepType.orElse(Linker.LinkableDepType.SHARED), Optional.empty(), blacklist, transitiveCxxPreprocessorInputFunction); case SHARED_INTERFACE: return createSharedLibraryInterface( untypedBuildTarget, projectFilesystem, resolver, platform.get()); case MACH_O_BUNDLE: return createSharedLibraryBuildRule( untypedBuildTarget, projectFilesystem, resolver, cellRoots, cxxBuckConfig, platform.get(), args, cxxDeps.get(resolver, platform.get()), Linker.LinkType.MACH_O_BUNDLE, linkableDepType.orElse(Linker.LinkableDepType.SHARED), bundleLoader, blacklist, transitiveCxxPreprocessorInputFunction); case STATIC: return createStaticLibraryBuildRule( untypedBuildTarget, projectFilesystem, resolver, cellRoots, cxxBuckConfig, platform.get(), args, cxxDeps.get(resolver, platform.get()), CxxSourceRuleFactory.PicType.PDC, transitiveCxxPreprocessorInputFunction); case STATIC_PIC: return createStaticLibraryBuildRule( untypedBuildTarget, projectFilesystem, resolver, cellRoots, cxxBuckConfig, platform.get(), args, cxxDeps.get(resolver, platform.get()), CxxSourceRuleFactory.PicType.PIC, transitiveCxxPreprocessorInputFunction); case SANDBOX_TREE: return CxxDescriptionEnhancer.createSandboxTreeBuildRule( resolver, args, platform.get(), untypedBuildTarget, projectFilesystem); } throw new RuntimeException("unhandled library build type"); } boolean hasObjectsForAnyPlatform = !args.getSrcs().isEmpty(); Predicate<CxxPlatform> hasObjects; if (hasObjectsForAnyPlatform) { hasObjects = x -> true; } else { hasObjects = input -> !args.getPlatformSrcs().getMatchingValues(input.getFlavor().toString()).isEmpty(); } // Otherwise, we return the generic placeholder of this library, that dependents can use // get the real build rules via querying the action graph. SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver); final SourcePathResolver pathResolver = DefaultSourcePathResolver.from(ruleFinder); return new CxxLibrary( buildTarget, projectFilesystem, metadataRuleParams, resolver, args.getPrivateCxxDeps(), args.getExportedCxxDeps(), Predicates.not(hasObjects), input -> { ImmutableList<StringWithMacros> flags = CxxFlags.getFlagsWithMacrosWithPlatformMacroExpansion( args.getExportedLinkerFlags(), args.getExportedPlatformLinkerFlags(), input); return RichStream.from(flags) .map( f -> CxxDescriptionEnhancer.toStringWithMacrosArgs( buildTarget, cellRoots, resolver, input, f)) .toImmutableList(); }, cxxPlatform -> { try { return getSharedLibraryNativeLinkTargetInput( buildTarget, projectFilesystem, resolver, pathResolver, ruleFinder, cellRoots, cxxBuckConfig, cxxPlatform, args, cxxDeps.get(resolver, cxxPlatform), CxxFlags.getFlagsWithMacrosWithPlatformMacroExpansion( args.getLinkerFlags(), args.getPlatformLinkerFlags(), cxxPlatform), CxxFlags.getFlagsWithMacrosWithPlatformMacroExpansion( args.getExportedLinkerFlags(), args.getExportedPlatformLinkerFlags(), cxxPlatform), args.getFrameworks(), args.getLibraries(), transitiveCxxPreprocessorInputFunction); } catch (NoSuchBuildTargetException e) { throw new RuntimeException(e); } }, args.getSupportedPlatformsRegex(), args.getFrameworks(), args.getLibraries(), args.getForceStatic().orElse(false) ? NativeLinkable.Linkage.STATIC : args.getPreferredLinkage().orElse(NativeLinkable.Linkage.ANY), args.getLinkWhole().orElse(false), args.getSoname(), args.getTests(), args.getCanBeAsset().orElse(false), !buildTarget .getFlavors() .contains(CxxDescriptionEnhancer.EXPORTED_HEADER_SYMLINK_TREE_FLAVOR), args.isReexportAllHeaderDependencies()); } public static Optional<Map.Entry<Flavor, Type>> getLibType(BuildTarget buildTarget) { return LIBRARY_TYPE.getFlavorAndValue(buildTarget); } static BuildTarget getUntypedBuildTarget(BuildTarget buildTarget) { Optional<Map.Entry<Flavor, Type>> type = getLibType(buildTarget); if (!type.isPresent()) { return buildTarget; } Set<Flavor> flavors = Sets.newHashSet(buildTarget.getFlavors()); flavors.remove(type.get().getKey()); BuildTarget target = buildTarget.withFlavors(flavors); return target; } @Override public void findDepsForTargetFromConstructorArgs( BuildTarget buildTarget, CellPathResolver cellRoots, CommonArg constructorArg, ImmutableCollection.Builder<BuildTarget> extraDepsBuilder, ImmutableCollection.Builder<BuildTarget> targetGraphOnlyDepsBuilder) { // Get any parse time deps from the C/C++ platforms. extraDepsBuilder.addAll(CxxPlatforms.getParseTimeDeps(cxxPlatforms.getValues())); } public FlavorDomain<CxxPlatform> getCxxPlatforms() { return cxxPlatforms; } public CxxPlatform getDefaultCxxPlatform() { return defaultCxxPlatform; } /** * Convenience function to query the {@link CxxHeaders} metadata of a target. * * <p>Use this function instead of constructing the BuildTarget manually. */ public static Optional<CxxHeaders> queryMetadataCxxHeaders( BuildRuleResolver resolver, BuildTarget baseTarget, CxxPreprocessables.HeaderMode mode) throws NoSuchBuildTargetException { return resolver.requireMetadata( baseTarget.withAppendedFlavors(MetadataType.CXX_HEADERS.getFlavor(), mode.getFlavor()), CxxHeaders.class); } /** * Convenience function to query the {@link CxxPreprocessorInput} metadata of a target. * * <p>Use this function instead of constructing the BuildTarget manually. */ public static Optional<CxxPreprocessorInput> queryMetadataCxxPreprocessorInput( BuildRuleResolver resolver, BuildTarget baseTarget, CxxPlatform platform, HeaderVisibility visibility) throws NoSuchBuildTargetException { return resolver.requireMetadata( baseTarget.withAppendedFlavors( MetadataType.CXX_PREPROCESSOR_INPUT.getFlavor(), platform.getFlavor(), visibility.getFlavor()), CxxPreprocessorInput.class); } @Override public <U> Optional<U> createMetadata( BuildTarget buildTarget, BuildRuleResolver resolver, CellPathResolver cellRoots, CxxLibraryDescriptionArg args, Optional<ImmutableMap<BuildTarget, Version>> selectedVersions, final Class<U> metadataClass) throws NoSuchBuildTargetException { Map.Entry<Flavor, MetadataType> type = METADATA_TYPE.getFlavorAndValue(buildTarget).orElseThrow(IllegalArgumentException::new); BuildTarget baseTarget = buildTarget.withoutFlavors(type.getKey()); switch (type.getValue()) { case CXX_HEADERS: { Optional<CxxHeaders> symlinkTree = Optional.empty(); if (!args.getExportedHeaders().isEmpty()) { CxxPreprocessables.HeaderMode mode = HEADER_MODE.getRequiredValue(buildTarget); baseTarget = baseTarget.withoutFlavors(mode.getFlavor()); symlinkTree = Optional.of( CxxSymlinkTreeHeaders.from( (HeaderSymlinkTree) resolver.requireRule( baseTarget .withoutFlavors(LIBRARY_TYPE.getFlavors()) .withAppendedFlavors( Type.EXPORTED_HEADERS.getFlavor(), mode.getFlavor())), CxxPreprocessables.IncludeType.LOCAL)); } return symlinkTree.map(metadataClass::cast); } case CXX_PREPROCESSOR_INPUT: { Map.Entry<Flavor, CxxPlatform> platform = cxxPlatforms .getFlavorAndValue(buildTarget) .orElseThrow(IllegalArgumentException::new); Map.Entry<Flavor, HeaderVisibility> visibility = HEADER_VISIBILITY .getFlavorAndValue(buildTarget) .orElseThrow(IllegalArgumentException::new); baseTarget = baseTarget.withoutFlavors(platform.getKey(), visibility.getKey()); CxxPreprocessorInput.Builder cxxPreprocessorInputBuilder = CxxPreprocessorInput.builder(); // TODO(agallagher): We currently always add exported flags and frameworks to the // preprocessor input to mimic existing behavior, but this should likely be fixed. cxxPreprocessorInputBuilder.putAllPreprocessorFlags( Multimaps.transformValues( CxxFlags.getLanguageFlagsWithMacros( args.getExportedPreprocessorFlags(), args.getExportedPlatformPreprocessorFlags(), args.getExportedLangPreprocessorFlags(), platform.getValue()), f -> CxxDescriptionEnhancer.toStringWithMacrosArgs( buildTarget, cellRoots, resolver, platform.getValue(), f))); cxxPreprocessorInputBuilder.addAllFrameworks(args.getFrameworks()); if (visibility.getValue() == HeaderVisibility.PRIVATE && !args.getHeaders().isEmpty()) { HeaderSymlinkTree symlinkTree = (HeaderSymlinkTree) resolver.requireRule( baseTarget.withAppendedFlavors( platform.getKey(), Type.HEADERS.getFlavor())); cxxPreprocessorInputBuilder.addIncludes( CxxSymlinkTreeHeaders.from(symlinkTree, CxxPreprocessables.IncludeType.LOCAL)); } if (visibility.getValue() == HeaderVisibility.PUBLIC) { // Add platform-agnostic headers. queryMetadataCxxHeaders( resolver, baseTarget, CxxDescriptionEnhancer.getHeaderModeForPlatform( resolver, platform.getValue(), args.getXcodePublicHeadersSymlinks() .orElse(platform.getValue().getPublicHeadersSymlinksEnabled()))) .ifPresent(cxxPreprocessorInputBuilder::addIncludes); // Add platform-specific headers. if (!args.getExportedPlatformHeaders() .getMatchingValues(platform.getKey().toString()) .isEmpty()) { HeaderSymlinkTree symlinkTree = (HeaderSymlinkTree) resolver.requireRule( baseTarget .withoutFlavors(LIBRARY_TYPE.getFlavors()) .withAppendedFlavors( Type.EXPORTED_HEADERS.getFlavor(), platform.getKey())); cxxPreprocessorInputBuilder.addIncludes( CxxSymlinkTreeHeaders.from(symlinkTree, CxxPreprocessables.IncludeType.LOCAL)); } } CxxPreprocessorInput cxxPreprocessorInput = cxxPreprocessorInputBuilder.build(); return Optional.of(cxxPreprocessorInput).map(metadataClass::cast); } } throw new IllegalStateException(String.format("unhandled metadata type: %s", type.getValue())); } @Override public ImmutableSortedSet<Flavor> addImplicitFlavors( ImmutableSortedSet<Flavor> argDefaultFlavors) { return addImplicitFlavorsForRuleTypes(argDefaultFlavors, Description.getBuildRuleType(this)); } public ImmutableSortedSet<Flavor> addImplicitFlavorsForRuleTypes( ImmutableSortedSet<Flavor> argDefaultFlavors, BuildRuleType... types) { Optional<Flavor> typeFlavor = LIBRARY_TYPE.getFlavor(argDefaultFlavors); Optional<Flavor> platformFlavor = getCxxPlatforms().getFlavor(argDefaultFlavors); LOG.debug("Got arg default type %s platform %s", typeFlavor, platformFlavor); for (BuildRuleType type : types) { ImmutableMap<String, Flavor> libraryDefaults = cxxBuckConfig.getDefaultFlavorsForRuleType(type); if (!typeFlavor.isPresent()) { typeFlavor = Optional.ofNullable(libraryDefaults.get(CxxBuckConfig.DEFAULT_FLAVOR_LIBRARY_TYPE)); } if (!platformFlavor.isPresent()) { platformFlavor = Optional.ofNullable(libraryDefaults.get(CxxBuckConfig.DEFAULT_FLAVOR_PLATFORM)); } } ImmutableSortedSet<Flavor> result = ImmutableSortedSet.of( // Default to static if not otherwise specified. typeFlavor.orElse(CxxDescriptionEnhancer.STATIC_FLAVOR), platformFlavor.orElse(defaultCxxPlatform.getFlavor())); LOG.debug("Got default flavors %s for rule types %s", result, Arrays.toString(types)); return result; } /** * This is a hack to allow fine grained control over how the transitive {@code * CxxPreprocessorInput}s are found. Since not all {@code Description}s which use {@code * CxxLibraryDescription} generate a {@code CxxLibrary}, blinding attempting to require it will * not work. * * <p>Therefore for those other rules, we create the list from scratch. */ @FunctionalInterface public interface TransitiveCxxPreprocessorInputFunction { Stream<CxxPreprocessorInput> apply( BuildTarget target, BuildRuleResolver ruleResolver, CxxPlatform cxxPlatform, ImmutableSet<BuildRule> deps, CxxDeps privateDeps) throws NoSuchBuildTargetException; /** * Retrieve the transitive CxxPreprocessorInput from the CxxLibrary rule. * * <p>This is used by CxxLibrary and AppleLibrary. Rules that do not generate a CxxLibrary rule * (namely AppleTest) cannot use this. */ static TransitiveCxxPreprocessorInputFunction fromLibraryRule() { return (target, ruleResolver, cxxPlatform, ignored, privateDeps) -> { BuildTarget rawTarget = target.withoutFlavors( ImmutableSet.<Flavor>builder() .addAll(LIBRARY_TYPE.getFlavors()) .add(cxxPlatform.getFlavor()) .build()); BuildRule rawRule = ruleResolver.requireRule(rawTarget); CxxLibrary rule = (CxxLibrary) rawRule; ImmutableMap<BuildTarget, CxxPreprocessorInput> inputs = rule.getTransitiveCxxPreprocessorInput(cxxPlatform); ImmutableList<CxxPreprocessorDep> privateDepsForPlatform = RichStream.from(privateDeps.get(ruleResolver, cxxPlatform)) .filter(CxxPreprocessorDep.class) .toImmutableList(); if (privateDepsForPlatform.isEmpty()) { // Nothing to add. return inputs.values().stream(); } else { Map<BuildTarget, CxxPreprocessorInput> result = new LinkedHashMap<>(); result.putAll(inputs); for (CxxPreprocessorDep dep : privateDepsForPlatform) { result.putAll(dep.getTransitiveCxxPreprocessorInput(cxxPlatform)); } return result.values().stream(); } }; } /** * Retrieve the transtiive {@link CxxPreprocessorInput} from an explicitly specified deps list. * * <p>This is used by AppleTest, which doesn't generate a CxxLibrary rule that computes this. */ static TransitiveCxxPreprocessorInputFunction fromDeps() { return (target, ruleResolver, cxxPlatform, deps, privateDeps) -> { Map<BuildTarget, CxxPreprocessorInput> input = new LinkedHashMap<>(); input.put( target, queryMetadataCxxPreprocessorInput( ruleResolver, target, cxxPlatform, HeaderVisibility.PUBLIC) .orElseThrow(IllegalStateException::new)); for (BuildRule rule : deps) { if (rule instanceof CxxPreprocessorDep) { input.putAll( ((CxxPreprocessorDep) rule).getTransitiveCxxPreprocessorInput(cxxPlatform)); } } return input.values().stream(); }; } } public interface CommonArg extends LinkableCxxConstructorArg { @Value.Default default SourceList getExportedHeaders() { return SourceList.EMPTY; } @Value.Default default PatternMatchedCollection<SourceList> getExportedPlatformHeaders() { return PatternMatchedCollection.of(); } ImmutableList<StringWithMacros> getExportedPreprocessorFlags(); @Value.Default default PatternMatchedCollection<ImmutableList<StringWithMacros>> getExportedPlatformPreprocessorFlags() { return PatternMatchedCollection.of(); } ImmutableMap<CxxSource.Type, ImmutableList<StringWithMacros>> getExportedLangPreprocessorFlags(); ImmutableList<StringWithMacros> getExportedLinkerFlags(); @Value.Default default PatternMatchedCollection<ImmutableList<StringWithMacros>> getExportedPlatformLinkerFlags() { return PatternMatchedCollection.of(); } @Value.NaturalOrder ImmutableSortedSet<BuildTarget> getExportedDeps(); @Value.Default default PatternMatchedCollection<ImmutableSortedSet<BuildTarget>> getExportedPlatformDeps() { return PatternMatchedCollection.of(); } Optional<Pattern> getSupportedPlatformsRegex(); Optional<String> getSoname(); Optional<Boolean> getForceStatic(); Optional<Boolean> getLinkWhole(); Optional<Boolean> getCanBeAsset(); Optional<NativeLinkable.Linkage> getPreferredLinkage(); Optional<Boolean> getXcodePublicHeadersSymlinks(); Optional<Boolean> getXcodePrivateHeadersSymlinks(); /** * extra_xcode_sources will add the files to the list of files to be compiled in the Xcode * target. */ ImmutableList<SourcePath> getExtraXcodeSources(); /** * extra_xcode_sources will add the files to the list of files in the project and won't add them * to an Xcode target. */ ImmutableList<SourcePath> getExtraXcodeFiles(); /** * Controls whether the headers of dependencies in "deps" is re-exported for compiling targets * that depend on this one. */ @Value.Default default boolean isReexportAllHeaderDependencies() { return true; } /** * These fields are passed through to SwiftLibrary for mixed C/Swift targets; they are not used * otherwise. */ Optional<SourcePath> getBridgingHeader(); Optional<String> getModuleName(); /** @return C/C++ deps which are propagated to dependents. */ @Value.Derived default CxxDeps getExportedCxxDeps() { return CxxDeps.builder() .addDeps(getExportedDeps()) .addPlatformDeps(getExportedPlatformDeps()) .build(); } /** * Override parent class's deps to include exported deps. * * @return the C/C++ deps this rule builds against. */ @Override @Value.Derived default CxxDeps getCxxDeps() { return CxxDeps.concat(getPrivateCxxDeps(), getExportedCxxDeps()); } } @BuckStyleImmutable @Value.Immutable interface AbstractCxxLibraryDescriptionArg extends CommonArg {} }
/******************************************************************************* * Copyright (c) 2006, 2012 Oracle Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Oracle Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.bpel.ui.editors.xpath; import org.eclipse.jface.text.rules.IWordDetector; /** * @author Michal Chmielewski (michal.chmielewski@oracle.com) * @date Oct 25, 2006 * */ public class XPathWordDetector implements IWordDetector { /** * Detects NCNames * @author Michal Chmielewski (michal.chmielewski@oracle.com) * @date Oct 26, 2006 * */ static public class NCNameWordDetector extends XPathWordDetector { /** (non-Javadoc) * @see org.eclipse.bpel.ui.editors.xpath.XPathWordDetector#isWordPart(char) */ @Override public boolean isWordPart (char c) { return c != ':' && super.isWordPart(c); } } /** * Detects variables in XPath expressions. * * @author Michal Chmielewski (michal.chmielewski@oracle.com) * @date Nov 17, 2006 * */ static public class VariableDetector extends NCNameWordDetector { /** * These always start with a $ * @see org.eclipse.bpel.ui.editors.xpath.XPathWordDetector#isWordStart(char) */ @Override public boolean isWordStart (char c) { return c == '$' ; } /** * The rest of the variable is a NCName but does not contain a '.' */ @Override public boolean isWordPart (char c) { return c != '.' && super.isWordPart(c); } } /** * @author Michal Chmielewski (michal.chmielewski@oracle.com) * @date Nov 27, 2006 * */ static public class MessagePartDetector extends NCNameWordDetector { /** * These always start with a $ * @see org.eclipse.bpel.ui.editors.xpath.XPathWordDetector#isWordStart(char) */ @Override public boolean isWordStart (char c) { return c == '.' ; } /** * The rest of the variable is a NCName but does not contain a '.' */ @Override public boolean isWordPart (char c) { return c != '.' && c != '/' && super.isWordPart(c); } } /** * Detects variables in XPath expressions. * * @author Michal Chmielewski (michal.chmielewski@oracle.com) * @date Nov 17, 2006 * */ static public class QNameDetector extends NCNameWordDetector { int colCount = 0; /** * These always start with a $ * @see org.eclipse.bpel.ui.editors.xpath.XPathWordDetector#isWordStart(char) */ @Override public boolean isWordStart (char c) { colCount = 0; return super.isWordStart(c); } /** * The rest of the variable is a NCName but does not contain a '.' */ @Override public boolean isWordPart (char c) { if (c == ':') { if (colCount == 0) { colCount += 1; return true; } return false; } return super.isWordPart(c); } } /** * @see org.eclipse.jface.text.rules.IWordDetector#isWordPart(char) */ public boolean isWordPart (char c) { return isNameChar(c); } /** (non-Javadoc) * @see org.eclipse.jface.text.rules.IWordDetector#isWordStart(char) */ public boolean isWordStart (char c) { return (c == '_' || isLetter(c)) ; } /** * Answer if we are a name character. * * @param c * @return true if name character, false otherwise */ static public boolean isNameChar (char c) { return isLetter(c) || isDigit(c) || c == '.' || c == '-' || c == '_' || c == ':' || isCombiningCharacter(c) || isExtender ( c ); } /** * Return of we are a letter according to the XML spec. * * @param c * @return true if letter, false otherwise. */ static public boolean isLetter ( char c ) { return isBaseChar ( c ) || isIdeographic ( c ); } /** * Check if we are base character according to the XML spec. * @param c the character * @return true if base, false otherwise. */ static public boolean isBaseChar ( char c ) { switch ( getPlane ( c ) ) { case 0x00 : return range(c,0x0041,0x005A) || range(c,0x0061,0x007A) || range(c,0x00C0,0x00D6) || range(c,0x00D8,0x00F6) || range(c,0x00F8,0x00FF) ; case 0x01 : return range(c,0x0100,0x0131) || range(c,0x0134,0x013E) || range(c,0x0141,0x0148) || range(c,0x014A,0x017E) || range(c,0x0180,0x01C3) || range(c,0x01CD,0x01F0) || range(c,0x01F4,0x01F5) || range(c,0x01FA,0x01FF); case 0x02 : return range(c,0x0200, 0x0217) || range(c,0x0250,0x02A8) || range(c,0x02BB,0x02C1); case 0x03 : return c == 0x0386 || range(c,0x0388,0x038A) || c == 0x038C || range(c,0x038E,0x03A1) || range(c,0x03A3,0x03CE) || range(c,0x03D0,0x03D6) || c == 0x03DA || c == 0x03DC || c == 0x03DE || c == 0x03E0 || range(c,0x03E2,0x03F3); case 0x04 : return range(c,0x0401,0x040C) || range(c,0x040E,0x044F) || range(c,0x0451,0x045C) || range(c,0x045E,0x0481) || range(c,0x0490,0x04C4) || range(c,0x04C7,0x04C8) || range(c,0x04CB,0x04CC) || range(c,0x04D0,0x04EB) || range(c,0x04EE,0x04F5) || range(c,0x04F8,0x04F9); case 0x05 : return range(c,0x0531,0x0556) || c == 0x0559 || range(c,0x0561,0x0586) || range(c,0x05D0,0x05EA) || range(c,0x05F0,0x05F2); case 0x06 : return range(c,0x0621,0x063A) || range(c,0x0641,0x064A) || range(c,0x0671,0x06B7) || range(c,0x06BA,0x06BE) || range(c,0x06C0,0x06CE) || range(c,0x06D0,0x06D3) || c == 0x06D5 || range(c,0x06E5,0x06E6); case 0x07 : case 0x08 : return false; case 0x09 : return range(c,0x0905,0x0939) || c == 0x093D || range(c,0x0958,0x0961) || range(c,0x0985,0x098C) || range(c,0x098F,0x0990) || range(c,0x0993,0x09A8) || range(c,0x09AA,0x09B0) || c == 0x09B2 || range(c,0x09B6,0x09B9) || range(c,0x09DC,0x09DD) || range(c,0x09DF,0x09E1) || range(c,0x09F0,0x09F1) ; case 0x0A : return range(c,0x0A05,0x0A0A) || range(c,0x0A0F,0x0A10) || range(c,0x0A13,0x0A28) || range(c,0x0A2A,0x0A30) || range(c,0x0A32,0x0A33) || range(c,0x0A35,0x0A36) || range(c,0x0A38,0x0A39) || range(c,0x0A59,0x0A5C) || c == 0x0A5E || range(c,0x0A72,0x0A74) || range(c,0x0A85,0x0A8B) || c == 0x0A8D || range(c,0x0A8F,0x0A91) || range(c,0x0A93,0x0AA8) || range(c,0x0AAA,0x0AB0) || range(c,0x0AB2,0x0AB3) || range(c,0x0AB5,0x0AB9) || c == 0x0ABD || c == 0x0AE0; case 0x0B : return range(c,0x0B05,0x0B0C) || range(c,0x0B0F,0x0B10) || range(c,0x0B13,0x0B28) || range(c,0x0B2A,0x0B30) || range(c,0x0B32,0x0B33) || range(c,0x0B36,0x0B39) || c == 0x0B3D || range(c,0x0B5C,0x0B5D) || range(c,0x0B5F,0x0B61) || range(c,0x0B85,0x0B8A) || range(c,0x0B8E,0x0B90) || range(c,0x0B92,0x0B95) || range(c,0x0B99,0x0B9A) || c == 0x0B9C || range(c,0x0B9E,0x0B9F) || range(c,0x0BA3,0x0BA4) || range(c,0x0BA8,0x0BAA) || range(c,0x0BAE,0x0BB5) || range(c,0x0BB7,0x0BB9); case 0x0C : return range(c,0x0C05,0x0C0C) || range(c,0x0C0E,0x0C10) || range(c,0x0C12,0x0C28) || range(c,0x0C2A,0x0C33) || range(c,0x0C35,0x0C39) || range(c,0x0C60,0x0C61) || range(c,0x0C85,0x0C8C) || range(c,0x0C8E,0x0C90) || range(c,0x0C92,0x0CA8) || range(c,0x0CAA,0x0CB3) || range(c,0x0CB5,0x0CB9) || c == 0x0CDE || range(c,0x0CE0,0x0CE1); case 0x0D : return range(c,0x0D05,0x0D0C) || range(c,0x0D0E,0x0D10) || range(c,0x0D12,0x0D28) || range(c,0x0D2A,0x0D39) || range(c,0x0D60,0x0D61) ; case 0x0E : return range(c,0x0E01,0x0E2E) || c == 0x0E30 || range(c,0x0E32,0x0E33) || range(c,0x0E40,0x0E45) || range(c,0x0E81,0x0E82) || c == 0x0E84 || range(c,0x0E87,0x0E88) || c == 0x0E8A || c == 0x0E8D || range(c,0x0E94,0x0E97) || range(c,0x0E99,0x0E9F) || range(c,0x0EA1,0x0EA3) || c == 0x0EA5 || c == 0x0EA7 || range(c,0x0EAA,0x0EAB) || range(c,0x0EAD,0x0EAE) || c == 0x0EB0 || range(c,0x0EB2,0x0EB3) || c == 0x0EBD || range(c,0x0EC0,0x0EC4); case 0x0F : return range(c,0x0F40,0x0F47) || range(c,0x0F49,0x0F69); case 0x10 : return range(c,0x0F40,0x0F47) || range(c,0x0F49,0x0F69); case 0x11 : return c == 0x1100 || range(c,0x1102,0x1103) || range(c,0x1105,0x1107) || c == 0x1109 || range(c,0x110B,0x110C) || range(c,0x110E,0x1112) || c == 0x113C || c == 0x113E || c == 0x1140 || c == 0x114C || c == 0x114E || c == 0x1150 || range(c,0x1154,0x1155) || c == 0x1159 || range(c,0x115F,0x1161) || c == 0x1163 || c == 0x1165 || c == 0x1167 || c == 0x1169 || range(c,0x116D,0x116E) || range(c,0x1172,0x1173) || c == 0x1175 || c == 0x119E || c == 0x11A8 || c == 0x11AB || range(c,0x11AE,0x11AF) || range(c,0x11B7,0x11B8) || c == 0x11BA || range(c,0x11BC,0x11C2) || c == 0x11EB || c == 0x11F0 || c == 0x11F9; case 0x1E : return range(c,0x1E00,0x1E9B) || range(c,0x1EA0,0x1EF9); case 0x1F : return range(c,0x1F00,0x1F15) || range(c,0x1F18,0x1F1D) || range(c,0x1F20,0x1F45) || range(c,0x1F48,0x1F4D) || range(c,0x1F50,0x1F57) || c == 0x1F59 || c == 0x1F5B || c == 0x1F5D || range(c,0x1F5F,0x1F7D) || range(c,0x1F80,0x1FB4) || range(c,0x1FB6,0x1FBC) || c == 0x1FBE || range(c,0x1FC2,0x1FC4) || range(c,0x1FC6,0x1FCC) || range(c,0x1FD0,0x1FD3) || range(c,0x1FD6,0x1FDB) || range(c,0x1FE0,0x1FEC) || range(c,0x1FF2,0x1FF4) || range(c,0x1FF6,0x1FFC); case 0x21 : return c == 0x2126 || range(c,0x212A,0x212B) || c == 0x212E || range(c,0x2180,0x2182); case 0x30 : return range(c,0x3041,0x3094) || range(c,0x30A1,0x30FA); case 0x31 : return range(c,0x3105,0x312C); default : return range(c,0xAC00,0xD7A3); } } /** * Answer if the character is a combining character. * * @param c the character * @return true of combining, false otherwise. */ static public boolean isCombiningCharacter ( char c ) { switch ( getPlane ( c ) ) { case 0x03 : return range(c,0x0300,0x0345) || range(c,0x0360,0x0361) ; case 0x04 : return range(c,0x0483,0x0486); case 0x05 : return range(c,0x0591,0x05A1) || range(c,0x05A3,0x05B9) || range(c,0x05BB,0x05BD) || c == 0x05BF || range(c,0x05C1,0x05C2) || c == 0x05C4 ; case 0x06 : return range(c,0x064B,0x0652) || c == 0x0670 || range(c,0x06D6,0x06DC) || range(c,0x06DD,0x06DF) || range(c,0x06E0,0x06E4) || range(c,0x06E7,0x06E8) || range(c,0x06EA,0x06ED) ; case 0x09 : return range(c,0x0901,0x0903) || c == 0x093C || range(c,0x093E,0x094C) || c == 0x094D || range(c,0x0951,0x0954) || range(c,0x0962,0x0963) || range(c,0x0981,0x0983) || c == 0x09BC || c == 0x09BE || c == 0x09BF || range(c,0x09C0,0x09C4) || range(c,0x09C7,0x09C8) || range(c,0x09CB,0x09CD) || c == 0x09D7 || range(c,0x09E2,0x09E3) ; case 0x0A : return c == 0x0A02 || c == 0x0A3C || c == 0x0A3E || c == 0x0A3F || range(c,0x0A40,0x0A42) || range(c,0x0A47,0x0A48) || range(c,0x0A4B,0x0A4D) || range(c,0x0A70,0x0A71) || range(c,0x0A81,0x0A83) || c == 0x0ABC || range(c,0x0ABE,0x0AC5) || range(c,0x0AC7,0x0AC9) || range(c,0x0ACB,0x0ACD) ; case 0x0B : return range(c,0x0B01,0x0B03) || c == 0x0B3C || range(c,0x0B3E,0x0B43) || range(c,0x0B47,0x0B48) || range(c,0x0B4B,0x0B4D) || range(c,0x0B56,0x0B57) || range(c,0x0B82,0x0B83) || range(c,0x0BBE,0x0BC2) || range(c,0x0BC6,0x0BC8) || range(c,0x0BCA,0x0BCD) || c == 0x0BD7 ; case 0x0C : return range(c,0x0C01,0x0C03) || range(c,0x0C3E,0x0C44) || range(c,0x0C46,0x0C48) || range(c,0x0C4A,0x0C4D) || range(c,0x0C55,0x0C56) || range(c,0x0C82,0x0C83) || range(c,0x0CBE,0x0CC4) || range(c,0x0CC6,0x0CC8) || range(c,0x0CCA,0x0CCD) || range(c,0x0CD5,0x0CD6) ; case 0x0D : return range(c,0x0D02,0x0D03) || range(c,0x0D3E,0x0D43) || range(c,0x0D46,0x0D48) || range(c,0x0D4A,0x0D4D) || c == 0x0D57 ; case 0x0E : return c == 0x0E31 || range(c,0x0E34,0x0E3A) || range(c,0x0E47,0x0E4E) || c == 0x0EB1 || range(c,0x0EB4,0x0EB9) || range(c,0x0EBB,0x0EBC) || range(c,0x0EC8,0x0ECD); case 0x0F : return range(c,0x0F18,0x0F19) || c == 0x0F35 || c == 0x0F37 || c == 0x0F39 || c == 0x0F3E || c == 0x0F3F || range(c,0x0F71,0x0F84) || range(c,0x0F86,0x0F8B) || range(c,0x0F90,0x0F95) || c == 0x0F97 || range(c,0x0F99,0x0FAD) || range(c,0x0FB1,0x0FB7) || c == 0x0FB9 ; case 0x20 : return range(c,0x20D0,0x20DC) || c == 0x20E1; case 0x30 : return range(c,0x302A,0x302F) || c == 0x3099 || c == 0x309A ; default : return false; } } /** * Answer is the character is an Ideographic, per XML spec. * <pre> * [#x4E00-#x9FA5] | #x3007 | [#x3021-#x3029] * </pre> * @param c the character to test * @return return true if yes, false if no. */ static public boolean isIdeographic (char c) { return range(c,0x4e00, 0x9fa5) || c == 0x3007 || range(c,0x3021, 0x3029 ) ; } /** * Answer if we are a digit, based on the XML specification. * <pre> * [#x0030-#x0039] | [#x0660-#x0669] | [#x06F0-#x06F9] | [#x0966-#x096F] | * [#x09E6-#x09EF] | [#x0A66-#x0A6F] | [#x0AE6-#x0AEF] | [#x0B66-#x0B6F] | * [#x0BE7-#x0BEF] | [#x0C66-#x0C6F] | [#x0CE6-#x0CEF] | [#x0D66-#x0D6F] | * [#x0E50-#x0E59] | [#x0ED0-#x0ED9] | [#x0F20-#x0F29] * </pre> * @param c * @return true if digit, false otherwise. */ static public boolean isDigit ( char c ) { switch ( getPlane ( c ) ) { case 0x00 : return range(c,0x0030,0x0039); case 0x06 : return range(c,0x0660,0x0669) || range(c,0x06F0,0x06F9); case 0x09 : return range(c,0x0966,0x096F) || range(c,0x09E6,0x09EF); case 0x0A: return range(c,0x0A66,0x0A6F) || range(c,0x0AE6,0x0AEF); case 0x0B: return range(c,0x0B66,0x0B6F) || range(c,0x0BE7,0x0BEF); case 0x0C: return range(c,0x0C66,0x0C6F) || range(c,0x0CE6,0x0CEF); case 0x0D: return range(c,0x0D66,0x0D6F); case 0x0E: return range(c,0x0E50,0x0E59) || range(c,0x0ED0,0x0ED9); case 0x0F: return range(c,0x0F20,0x0F29); default : return false; } } /** * Answer if we are an extender character. * <pre> * #x00B7 | #x02D0 | #x02D1 | #x0387 | #x0640 | #x0E46 | #x0EC6 | #x3005 | * [#x3031-#x3035] | [#x309D-#x309E] | [#x30FC-#x30FE] * </pre> * @param c * @return true if extender false otherwise. */ static public boolean isExtender (char c) { boolean res = ( c == 0x00B7 || c == 0x02D0 || c == 0x02D1 || c == 0x0387 || c == 0x0640 || c == 0x0E46 || c == 0x0EC6 || c == 0x3005 ); if (res) { return res; } return range(c,0x3031,0x3035) || range(c,0x309D,0x309E) || range(c,0x30FC,0x30FE); } /** * Return the plane of the character in the unicode system. * * @param c * @return the plane (high 2 bytes) */ static public int getPlane (int c) { return (c >>> 16); } /** * Ask if the character is in the range specified. * * @param c the character * @param l low value (inclusive) * @param h high value (inclusive) * @return true if in range, false otherwise. */ static public boolean range ( char c, int l, int h) { return ( c >= l && c <= h ); } }
// Copyright 2017 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.query2.cquery; import static com.google.common.collect.ImmutableMap.toImmutableMap; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.base.Verify; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.util.concurrent.AsyncFunction; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.MoreExecutors; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.analysis.ConfiguredTargetValue; import com.google.devtools.build.lib.analysis.config.BuildConfigurationValue; import com.google.devtools.build.lib.analysis.config.transitions.TransitionFactory; import com.google.devtools.build.lib.analysis.configuredtargets.RuleConfiguredTarget; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.cmdline.TargetParsingException; import com.google.devtools.build.lib.cmdline.TargetPattern; import com.google.devtools.build.lib.events.ExtendedEventHandler; import com.google.devtools.build.lib.packages.RuleTransitionData; import com.google.devtools.build.lib.packages.Target; import com.google.devtools.build.lib.pkgcache.PackageManager; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.query2.NamedThreadSafeOutputFormatterCallback; import com.google.devtools.build.lib.query2.PostAnalysisQueryEnvironment; import com.google.devtools.build.lib.query2.SkyQueryEnvironment; import com.google.devtools.build.lib.query2.cquery.ProtoOutputFormatterCallback.OutputType; import com.google.devtools.build.lib.query2.engine.Callback; import com.google.devtools.build.lib.query2.engine.KeyExtractor; import com.google.devtools.build.lib.query2.engine.QueryEnvironment; import com.google.devtools.build.lib.query2.engine.QueryException; import com.google.devtools.build.lib.query2.engine.QueryExpression; import com.google.devtools.build.lib.query2.engine.QueryUtil.ThreadSafeMutableKeyExtractorBackedSetImpl; import com.google.devtools.build.lib.query2.query.aspectresolvers.AspectResolver; import com.google.devtools.build.lib.rules.AliasConfiguredTarget; import com.google.devtools.build.lib.server.FailureDetails.ConfigurableQuery; import com.google.devtools.build.lib.skyframe.BuildConfigurationKey; import com.google.devtools.build.lib.skyframe.ConfiguredTargetKey; import com.google.devtools.build.lib.skyframe.SkyframeExecutor; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.WalkableGraph; import java.io.OutputStream; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; import java.util.List; import java.util.Set; import java.util.function.Function; import java.util.function.Supplier; import javax.annotation.Nullable; /** * {@link QueryEnvironment} that runs queries over the configured target (analysis) graph. * * <p>Aspects are partially supported. Their dependencies appear as implicit dependencies on the * targets they're connected to, but the aspects themselves aren't visible as query nodes. See * comments on {@link PostAnalysisQueryEnvironment#targetifyValues} and b/163052263 for details. */ public class ConfiguredTargetQueryEnvironment extends PostAnalysisQueryEnvironment<KeyedConfiguredTarget> { /** Common query functions and cquery specific functions. */ public static final ImmutableList<QueryFunction> FUNCTIONS = populateFunctions(); /** Cquery specific functions. */ public static final ImmutableList<QueryFunction> CQUERY_FUNCTIONS = getCqueryFunctions(); private CqueryOptions cqueryOptions; private final KeyExtractor<KeyedConfiguredTarget, ConfiguredTargetKey> configuredTargetKeyExtractor; private final ConfiguredTargetAccessor accessor; /** * Stores every configuration in the transitive closure of the build graph as a map from its * user-friendly hash to the configuration itself. * * <p>This is used to find configured targets in, e.g. {@code somepath} queries. Given {@code * somepath(//foo, //bar)}, cquery finds the configured targets for {@code //foo} and {@code * //bar} by creating a {@link ConfiguredTargetKey} from their labels and <i>some</i> * configuration, then querying the {@link WalkableGraph} to find the matching configured target. * * <p>Having this map lets cquery choose from all available configurations in the graph, * particularly includings configurations that aren't the host or top-level. * * <p>This can also be used in cquery's {@code config} function to match against explicitly * specified configs. This, in particular, is where having user-friendly hashes is invaluable. */ private final ImmutableMap<String, BuildConfigurationValue> transitiveConfigurations; @Override protected KeyExtractor<KeyedConfiguredTarget, ConfiguredTargetKey> getConfiguredTargetKeyExtractor() { return configuredTargetKeyExtractor; } public ConfiguredTargetQueryEnvironment( boolean keepGoing, ExtendedEventHandler eventHandler, Iterable<QueryFunction> extraFunctions, TopLevelConfigurations topLevelConfigurations, BuildConfigurationValue hostConfiguration, Collection<SkyKey> transitiveConfigurationKeys, PathFragment parserPrefix, PathPackageLocator pkgPath, Supplier<WalkableGraph> walkableGraphSupplier, Set<Setting> settings) throws InterruptedException { super( keepGoing, eventHandler, extraFunctions, topLevelConfigurations, hostConfiguration, parserPrefix, pkgPath, walkableGraphSupplier, settings); this.accessor = new ConfiguredTargetAccessor(walkableGraphSupplier.get(), this); this.configuredTargetKeyExtractor = KeyedConfiguredTarget::getConfiguredTargetKey; this.transitiveConfigurations = getTransitiveConfigurations(transitiveConfigurationKeys, walkableGraphSupplier.get()); } public ConfiguredTargetQueryEnvironment( boolean keepGoing, ExtendedEventHandler eventHandler, Iterable<QueryFunction> extraFunctions, TopLevelConfigurations topLevelConfigurations, BuildConfigurationValue hostConfiguration, Collection<SkyKey> transitiveConfigurationKeys, PathFragment parserPrefix, PathPackageLocator pkgPath, Supplier<WalkableGraph> walkableGraphSupplier, CqueryOptions cqueryOptions) throws InterruptedException { this( keepGoing, eventHandler, extraFunctions, topLevelConfigurations, hostConfiguration, transitiveConfigurationKeys, parserPrefix, pkgPath, walkableGraphSupplier, cqueryOptions.toSettings()); this.cqueryOptions = cqueryOptions; } private static ImmutableList<QueryFunction> populateFunctions() { return new ImmutableList.Builder<QueryFunction>() .addAll(QueryEnvironment.DEFAULT_QUERY_FUNCTIONS) .addAll(getCqueryFunctions()) .build(); } private static ImmutableList<QueryFunction> getCqueryFunctions() { return ImmutableList.of(new ConfigFunction()); } /** * Returns a supplied {@link BuildConfigurationValue} if both have the same build options, * otherwise throws an exception. * * <p>Noting the background of {@link BuildConfigurationKey#toComparableString}, multiple {@link * BuildConfigurationKey} instances can correspond to the same {@link BuildConfigurationValue}, * especially when trimming is involved. We are only interested in configurations whose options * differ - intricacies around differing fragments can be disregarded. */ private static BuildConfigurationValue mergeEqualBuildConfiguration( BuildConfigurationValue left, BuildConfigurationValue right) { Preconditions.checkArgument( left.getOptions().equals(right.getOptions()), "Non-matching configurations: (%s, %s)", left, right); return left; } private static ImmutableMap<String, BuildConfigurationValue> getTransitiveConfigurations( Collection<SkyKey> transitiveConfigurationKeys, WalkableGraph graph) throws InterruptedException { // mergeEqualBuildConfiguration can only fail if two BuildConfigurationValue have the same // checksum but are not equal. This would be a black swan event. return graph.getSuccessfulValues(transitiveConfigurationKeys).values().stream() .map(BuildConfigurationValue.class::cast) .sorted(Comparator.comparing(BuildConfigurationValue::checksum)) .collect( toImmutableMap( BuildConfigurationValue::checksum, Function.identity(), ConfiguredTargetQueryEnvironment::mergeEqualBuildConfiguration)); } @Override public ImmutableList<NamedThreadSafeOutputFormatterCallback<KeyedConfiguredTarget>> getDefaultOutputFormatters( TargetAccessor<KeyedConfiguredTarget> accessor, ExtendedEventHandler eventHandler, OutputStream out, SkyframeExecutor skyframeExecutor, BuildConfigurationValue hostConfiguration, @Nullable TransitionFactory<RuleTransitionData> trimmingTransitionFactory, PackageManager packageManager) throws QueryException, InterruptedException { AspectResolver aspectResolver = cqueryOptions.aspectDeps.createResolver(packageManager, eventHandler); return ImmutableList.of( new LabelAndConfigurationOutputFormatterCallback( eventHandler, cqueryOptions, out, skyframeExecutor, accessor, true), new LabelAndConfigurationOutputFormatterCallback( eventHandler, cqueryOptions, out, skyframeExecutor, accessor, false), new TransitionsOutputFormatterCallback( eventHandler, cqueryOptions, out, skyframeExecutor, accessor, hostConfiguration, trimmingTransitionFactory), new ProtoOutputFormatterCallback( eventHandler, cqueryOptions, out, skyframeExecutor, accessor, aspectResolver, OutputType.BINARY), new ProtoOutputFormatterCallback( eventHandler, cqueryOptions, out, skyframeExecutor, accessor, aspectResolver, OutputType.TEXT), new ProtoOutputFormatterCallback( eventHandler, cqueryOptions, out, skyframeExecutor, accessor, aspectResolver, OutputType.JSON), new BuildOutputFormatterCallback( eventHandler, cqueryOptions, out, skyframeExecutor, accessor), new GraphOutputFormatterCallback( eventHandler, cqueryOptions, out, skyframeExecutor, accessor, kct -> getFwdDeps(ImmutableList.of(kct))), new StarlarkOutputFormatterCallback( eventHandler, cqueryOptions, out, skyframeExecutor, accessor)); } @Override public String getOutputFormat() { return cqueryOptions.outputFormat; } @Override public ConfiguredTargetAccessor getAccessor() { return accessor; } @Override public QueryTaskFuture<Void> getTargetsMatchingPattern( QueryExpression owner, String pattern, Callback<KeyedConfiguredTarget> callback) { TargetPattern patternToEval; try { patternToEval = getPattern(pattern); } catch (TargetParsingException tpe) { try { handleError(owner, tpe.getMessage(), tpe.getDetailedExitCode()); } catch (QueryException qe) { return immediateFailedFuture(qe); } return immediateSuccessfulFuture(null); } AsyncFunction<TargetParsingException, Void> reportBuildFileErrorAsyncFunction = exn -> { handleError(owner, exn.getMessage(), exn.getDetailedExitCode()); return Futures.immediateFuture(null); }; return QueryTaskFutureImpl.ofDelegate( Futures.catchingAsync( patternToEval.evalAdaptedForAsync( resolver, getIgnoredPackagePrefixesPathFragments(), /* excludedSubdirectories= */ ImmutableSet.of(), (Callback<Target>) partialResult -> { List<KeyedConfiguredTarget> transformedResult = new ArrayList<>(); for (Target target : partialResult) { transformedResult.addAll( getConfiguredTargetsForConfigFunction(target.getLabel())); } callback.process(transformedResult); }, QueryException.class), TargetParsingException.class, reportBuildFileErrorAsyncFunction, MoreExecutors.directExecutor())); } /** * Returns the {@link ConfiguredTarget} for the given label and configuration if it exists, else * null. */ @Nullable private KeyedConfiguredTarget getConfiguredTarget( Label label, BuildConfigurationValue configuration) throws InterruptedException { return getValueFromKey( ConfiguredTargetKey.builder().setLabel(label).setConfiguration(configuration).build()); } @Override @Nullable protected KeyedConfiguredTarget getValueFromKey(SkyKey key) throws InterruptedException { ConfiguredTargetValue value = getConfiguredTargetValue(key); return value == null ? null : KeyedConfiguredTarget.create((ConfiguredTargetKey) key, value.getConfiguredTarget()); } /** * Returns all configured targets in Skyframe with the given label. * * <p>If there are no matches, returns an empty list. */ private List<KeyedConfiguredTarget> getConfiguredTargetsForConfigFunction(Label label) throws InterruptedException { ImmutableList.Builder<KeyedConfiguredTarget> ans = ImmutableList.builder(); for (BuildConfigurationValue config : transitiveConfigurations.values()) { KeyedConfiguredTarget kct = getConfiguredTarget(label, config); if (kct != null) { ans.add(kct); } } KeyedConfiguredTarget nullConfiguredTarget = getNullConfiguredTarget(label); if (nullConfiguredTarget != null) { ans.add(nullConfiguredTarget); } return ans.build(); } /** * Processes the targets in {@code targets} with the requested {@code configuration} * * @param pattern the original pattern that {@code targets} were parsed from. Used for error * message. * @param targets the set of {@link ConfiguredTarget}s whose labels represent the targets being * requested. * @param configPrefix the configuration to request {@code targets} in. This can be the * configuration's checksum, any prefix of its checksum, or the special identifiers "host", * "target", or "null". * @param callback the callback to receive the results of this method. * @return {@link QueryTaskCallable} that returns the correctly configured targets. */ QueryTaskCallable<Void> getConfiguredTargetsForConfigFunction( String pattern, ThreadSafeMutableSet<KeyedConfiguredTarget> targets, String configPrefix, Callback<KeyedConfiguredTarget> callback) { // There's no technical reason other callers beside ConfigFunction can't call this. But they'd // need to adjust the error messaging below to not make it config()-specific. Please don't just // remove that line: the counter-priority is making error messages as clear, precise, and // actionable as possible. return () -> { List<KeyedConfiguredTarget> transformedResult = new ArrayList<>(); boolean userFriendlyConfigName = true; for (KeyedConfiguredTarget target : targets) { Label label = getCorrectLabel(target); KeyedConfiguredTarget keyedConfiguredTarget; switch (configPrefix) { case "host": keyedConfiguredTarget = getHostConfiguredTarget(label); break; case "target": keyedConfiguredTarget = getTargetConfiguredTarget(label); break; case "null": keyedConfiguredTarget = getNullConfiguredTarget(label); break; default: ImmutableList<String> matchingConfigs = transitiveConfigurations.keySet().stream() .filter(fullConfig -> fullConfig.startsWith(configPrefix)) .collect(ImmutableList.toImmutableList()); if (matchingConfigs.size() == 1) { keyedConfiguredTarget = getConfiguredTarget( label, Verify.verifyNotNull(transitiveConfigurations.get(matchingConfigs.get(0)))); userFriendlyConfigName = false; } else if (matchingConfigs.size() >= 2) { throw new QueryException( String.format( "Configuration ID '%s' is ambiguous.\n" + "'%s' is a prefix of multiple configurations:\n " + Joiner.on("\n ").join(matchingConfigs) + "\n\n" + "Use a longer prefix to uniquely identify one configuration.", configPrefix, configPrefix), ConfigurableQuery.Code.INCORRECT_CONFIG_ARGUMENT_ERROR); } else { throw new QueryException( String.format("Unknown configuration ID '%s'.\n", configPrefix) + "config()'s second argument must identify a unique configuration.\n" + "\n" + "Valid values:\n" + " 'target' for the default configuration\n" + " 'host' for the host configuration\n" + " 'null' for source files (which have no configuration)\n" + " an arbitrary configuration's full or short ID\n" + "\n" + "A short ID is any prefix of a full ID. cquery shows short IDs. 'bazel " + "config' shows full IDs.\n" + "\n" + "For more help, see https://docs.bazel.build/cquery.html.", ConfigurableQuery.Code.INCORRECT_CONFIG_ARGUMENT_ERROR); } } if (keyedConfiguredTarget != null) { transformedResult.add(keyedConfiguredTarget); } } if (transformedResult.isEmpty()) { throw new QueryException( String.format( "No target (in) %s could be found in the %s", pattern, userFriendlyConfigName ? "'" + configPrefix + "' configuration" : "configuration with checksum '" + configPrefix + "'"), ConfigurableQuery.Code.TARGET_MISSING); } callback.process(transformedResult); return null; }; } /** * This method has to exist because {@link AliasConfiguredTarget#getLabel()} returns the label of * the "actual" target instead of the alias target. Grr. */ @Override public Label getCorrectLabel(KeyedConfiguredTarget target) { // Dereference any aliases that might be present. return target.getConfiguredTarget().getOriginalLabel(); } @Nullable @Override protected KeyedConfiguredTarget getHostConfiguredTarget(Label label) throws InterruptedException { return getConfiguredTarget(label, hostConfiguration); } @Nullable @Override protected KeyedConfiguredTarget getTargetConfiguredTarget(Label label) throws InterruptedException { if (topLevelConfigurations.isTopLevelTarget(label)) { return getConfiguredTarget( label, topLevelConfigurations.getConfigurationForTopLevelTarget(label)); } else { KeyedConfiguredTarget toReturn; for (BuildConfigurationValue configuration : topLevelConfigurations.getConfigurations()) { toReturn = getConfiguredTarget(label, configuration); if (toReturn != null) { return toReturn; } } return null; } } @Nullable @Override protected KeyedConfiguredTarget getNullConfiguredTarget(Label label) throws InterruptedException { return getConfiguredTarget(label, null); } @Nullable @Override protected RuleConfiguredTarget getRuleConfiguredTarget(KeyedConfiguredTarget configuredTarget) { if (configuredTarget.getConfiguredTarget() instanceof RuleConfiguredTarget) { return (RuleConfiguredTarget) configuredTarget.getConfiguredTarget(); } return null; } @Nullable @Override protected BuildConfigurationValue getConfiguration(KeyedConfiguredTarget target) { try { return target.getConfigurationKey() == null ? null : (BuildConfigurationValue) graph.getValue(target.getConfigurationKey()); } catch (InterruptedException e) { throw new IllegalStateException("Unexpected interruption during configured target query", e); } } @Override protected ConfiguredTargetKey getSkyKey(KeyedConfiguredTarget target) { return target.getConfiguredTargetKey(); } @Override public ThreadSafeMutableSet<KeyedConfiguredTarget> createThreadSafeMutableSet() { return new ThreadSafeMutableKeyExtractorBackedSetImpl<>( configuredTargetKeyExtractor, KeyedConfiguredTarget.class, SkyQueryEnvironment.DEFAULT_THREAD_COUNT); } }
/** * * Copyright (c) Microsoft and contributors. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. * */ // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. package com.microsoft.windowsazure.management.sql; import com.microsoft.windowsazure.core.ServiceOperations; import com.microsoft.windowsazure.core.utils.BOMInputStream; import com.microsoft.windowsazure.core.utils.XmlUtility; import com.microsoft.windowsazure.exception.ServiceException; import com.microsoft.windowsazure.management.sql.models.Quota; import com.microsoft.windowsazure.management.sql.models.QuotaGetResponse; import com.microsoft.windowsazure.management.sql.models.QuotaListResponse; import com.microsoft.windowsazure.tracing.CloudTracing; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.concurrent.Callable; import java.util.concurrent.Future; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.client.methods.HttpGet; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.xml.sax.SAXException; /** * The Azure SQL Database Management API includes operations for getting Azure * SQL Database Server quotas. Specifically, using the APIs you can get a * specific quota or list all of the quotas for the Azure SQL Database Server. */ public class QuotaOperationsImpl implements ServiceOperations<SqlManagementClientImpl>, QuotaOperations { /** * Initializes a new instance of the QuotaOperationsImpl class. * * @param client Reference to the service client. */ QuotaOperationsImpl(SqlManagementClientImpl client) { this.client = client; } private SqlManagementClientImpl client; /** * Gets a reference to the * microsoft.windowsazure.management.sql.SqlManagementClientImpl. * @return The Client value. */ public SqlManagementClientImpl getClient() { return this.client; } /** * Retrieves the specified quota from the server. * * @param serverName Required. The name of the Azure SQL Database Server * from which to retrieve the quota. * @param quotaName Required. The name of the quota to retrieve. * @return Represents the response structure for the Quota Get operation. */ @Override public Future<QuotaGetResponse> getAsync(final String serverName, final String quotaName) { return this.getClient().getExecutorService().submit(new Callable<QuotaGetResponse>() { @Override public QuotaGetResponse call() throws Exception { return get(serverName, quotaName); } }); } /** * Retrieves the specified quota from the server. * * @param serverName Required. The name of the Azure SQL Database Server * from which to retrieve the quota. * @param quotaName Required. The name of the quota to retrieve. * @throws IOException Signals that an I/O exception of some sort has * occurred. This class is the general class of exceptions produced by * failed or interrupted I/O operations. * @throws ServiceException Thrown if an unexpected response is found. * @throws ParserConfigurationException Thrown if there was a serious * configuration error with the document parser. * @throws SAXException Thrown if there was an error parsing the XML * response. * @return Represents the response structure for the Quota Get operation. */ @Override public QuotaGetResponse get(String serverName, String quotaName) throws IOException, ServiceException, ParserConfigurationException, SAXException { // Validate if (serverName == null) { throw new NullPointerException("serverName"); } if (quotaName == null) { throw new NullPointerException("quotaName"); } // Tracing boolean shouldTrace = CloudTracing.getIsEnabled(); String invocationId = null; if (shouldTrace) { invocationId = Long.toString(CloudTracing.getNextInvocationId()); HashMap<String, Object> tracingParameters = new HashMap<String, Object>(); tracingParameters.put("serverName", serverName); tracingParameters.put("quotaName", quotaName); CloudTracing.enter(invocationId, this, "getAsync", tracingParameters); } // Construct URL String url = "/" + (this.getClient().getCredentials().getSubscriptionId() != null ? this.getClient().getCredentials().getSubscriptionId().trim() : "") + "/services/sqlservers/servers/" + serverName.trim() + "/serverquotas/" + quotaName.trim(); String baseUrl = this.getClient().getBaseUri().toString(); // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl.charAt(baseUrl.length() - 1) == '/') { baseUrl = baseUrl.substring(0, (baseUrl.length() - 1) + 0); } if (url.charAt(0) == '/') { url = url.substring(1); } url = baseUrl + "/" + url; url = url.replace(" ", "%20"); // Create HTTP transport objects HttpGet httpRequest = new HttpGet(url); // Set Headers httpRequest.setHeader("x-ms-version", "2012-03-01"); // Send Request HttpResponse httpResponse = null; try { if (shouldTrace) { CloudTracing.sendRequest(invocationId, httpRequest); } httpResponse = this.getClient().getHttpClient().execute(httpRequest); if (shouldTrace) { CloudTracing.receiveResponse(invocationId, httpResponse); } int statusCode = httpResponse.getStatusLine().getStatusCode(); if (statusCode != HttpStatus.SC_OK) { ServiceException ex = ServiceException.createFromXml(httpRequest, null, httpResponse, httpResponse.getEntity()); if (shouldTrace) { CloudTracing.error(invocationId, ex); } throw ex; } // Create Result QuotaGetResponse result = null; // Deserialize Response InputStream responseContent = httpResponse.getEntity().getContent(); result = new QuotaGetResponse(); DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); documentBuilderFactory.setNamespaceAware(true); DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); Document responseDoc = documentBuilder.parse(new BOMInputStream(responseContent)); Element serviceResourceElement = XmlUtility.getElementByTagNameNS(responseDoc, "http://schemas.microsoft.com/windowsazure", "ServiceResource"); if (serviceResourceElement != null) { Element serviceResourceElement2 = XmlUtility.getElementByTagNameNS(serviceResourceElement, "http://schemas.microsoft.com/windowsazure", "ServiceResource"); if (serviceResourceElement2 != null) { Quota serviceResourceInstance = new Quota(); result.setQuota(serviceResourceInstance); Element valueElement = XmlUtility.getElementByTagNameNS(serviceResourceElement2, "http://schemas.microsoft.com/windowsazure", "Value"); if (valueElement != null) { String valueInstance; valueInstance = valueElement.getTextContent(); serviceResourceInstance.setValue(valueInstance); } Element nameElement = XmlUtility.getElementByTagNameNS(serviceResourceElement2, "http://schemas.microsoft.com/windowsazure", "Name"); if (nameElement != null) { String nameInstance; nameInstance = nameElement.getTextContent(); serviceResourceInstance.setName(nameInstance); } Element typeElement = XmlUtility.getElementByTagNameNS(serviceResourceElement2, "http://schemas.microsoft.com/windowsazure", "Type"); if (typeElement != null) { String typeInstance; typeInstance = typeElement.getTextContent(); serviceResourceInstance.setType(typeInstance); } Element stateElement = XmlUtility.getElementByTagNameNS(serviceResourceElement2, "http://schemas.microsoft.com/windowsazure", "State"); if (stateElement != null) { String stateInstance; stateInstance = stateElement.getTextContent(); serviceResourceInstance.setState(stateInstance); } } } result.setStatusCode(statusCode); if (httpResponse.getHeaders("x-ms-request-id").length > 0) { result.setRequestId(httpResponse.getFirstHeader("x-ms-request-id").getValue()); } if (shouldTrace) { CloudTracing.exit(invocationId, result); } return result; } finally { if (httpResponse != null && httpResponse.getEntity() != null) { httpResponse.getEntity().getContent().close(); } } } /** * Returns a list of quotas from the server. * * @param serverName Required. The name of the Azure SQL Database Server * from which to get the quotas. * @return Represents the response structure for the Quota List operation. */ @Override public Future<QuotaListResponse> listAsync(final String serverName) { return this.getClient().getExecutorService().submit(new Callable<QuotaListResponse>() { @Override public QuotaListResponse call() throws Exception { return list(serverName); } }); } /** * Returns a list of quotas from the server. * * @param serverName Required. The name of the Azure SQL Database Server * from which to get the quotas. * @throws IOException Signals that an I/O exception of some sort has * occurred. This class is the general class of exceptions produced by * failed or interrupted I/O operations. * @throws ServiceException Thrown if an unexpected response is found. * @throws ParserConfigurationException Thrown if there was a serious * configuration error with the document parser. * @throws SAXException Thrown if there was an error parsing the XML * response. * @return Represents the response structure for the Quota List operation. */ @Override public QuotaListResponse list(String serverName) throws IOException, ServiceException, ParserConfigurationException, SAXException { // Validate if (serverName == null) { throw new NullPointerException("serverName"); } // Tracing boolean shouldTrace = CloudTracing.getIsEnabled(); String invocationId = null; if (shouldTrace) { invocationId = Long.toString(CloudTracing.getNextInvocationId()); HashMap<String, Object> tracingParameters = new HashMap<String, Object>(); tracingParameters.put("serverName", serverName); CloudTracing.enter(invocationId, this, "listAsync", tracingParameters); } // Construct URL String url = "/" + (this.getClient().getCredentials().getSubscriptionId() != null ? this.getClient().getCredentials().getSubscriptionId().trim() : "") + "/services/sqlservers/servers/" + serverName.trim() + "/serverquotas"; String baseUrl = this.getClient().getBaseUri().toString(); // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl.charAt(baseUrl.length() - 1) == '/') { baseUrl = baseUrl.substring(0, (baseUrl.length() - 1) + 0); } if (url.charAt(0) == '/') { url = url.substring(1); } url = baseUrl + "/" + url; url = url.replace(" ", "%20"); // Create HTTP transport objects HttpGet httpRequest = new HttpGet(url); // Set Headers httpRequest.setHeader("x-ms-version", "2012-03-01"); // Send Request HttpResponse httpResponse = null; try { if (shouldTrace) { CloudTracing.sendRequest(invocationId, httpRequest); } httpResponse = this.getClient().getHttpClient().execute(httpRequest); if (shouldTrace) { CloudTracing.receiveResponse(invocationId, httpResponse); } int statusCode = httpResponse.getStatusLine().getStatusCode(); if (statusCode != HttpStatus.SC_OK) { ServiceException ex = ServiceException.createFromXml(httpRequest, null, httpResponse, httpResponse.getEntity()); if (shouldTrace) { CloudTracing.error(invocationId, ex); } throw ex; } // Create Result QuotaListResponse result = null; // Deserialize Response InputStream responseContent = httpResponse.getEntity().getContent(); result = new QuotaListResponse(); DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); documentBuilderFactory.setNamespaceAware(true); DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); Document responseDoc = documentBuilder.parse(new BOMInputStream(responseContent)); Element serviceResourcesSequenceElement = XmlUtility.getElementByTagNameNS(responseDoc, "http://schemas.microsoft.com/windowsazure", "ServiceResources"); if (serviceResourcesSequenceElement != null) { for (int i1 = 0; i1 < com.microsoft.windowsazure.core.utils.XmlUtility.getElementsByTagNameNS(serviceResourcesSequenceElement, "http://schemas.microsoft.com/windowsazure", "ServiceResource").size(); i1 = i1 + 1) { org.w3c.dom.Element serviceResourcesElement = ((org.w3c.dom.Element) com.microsoft.windowsazure.core.utils.XmlUtility.getElementsByTagNameNS(serviceResourcesSequenceElement, "http://schemas.microsoft.com/windowsazure", "ServiceResource").get(i1)); Quota serviceResourceInstance = new Quota(); result.getQuotas().add(serviceResourceInstance); Element valueElement = XmlUtility.getElementByTagNameNS(serviceResourcesElement, "http://schemas.microsoft.com/windowsazure", "Value"); if (valueElement != null) { String valueInstance; valueInstance = valueElement.getTextContent(); serviceResourceInstance.setValue(valueInstance); } Element nameElement = XmlUtility.getElementByTagNameNS(serviceResourcesElement, "http://schemas.microsoft.com/windowsazure", "Name"); if (nameElement != null) { String nameInstance; nameInstance = nameElement.getTextContent(); serviceResourceInstance.setName(nameInstance); } Element typeElement = XmlUtility.getElementByTagNameNS(serviceResourcesElement, "http://schemas.microsoft.com/windowsazure", "Type"); if (typeElement != null) { String typeInstance; typeInstance = typeElement.getTextContent(); serviceResourceInstance.setType(typeInstance); } Element stateElement = XmlUtility.getElementByTagNameNS(serviceResourcesElement, "http://schemas.microsoft.com/windowsazure", "State"); if (stateElement != null) { String stateInstance; stateInstance = stateElement.getTextContent(); serviceResourceInstance.setState(stateInstance); } } } result.setStatusCode(statusCode); if (httpResponse.getHeaders("x-ms-request-id").length > 0) { result.setRequestId(httpResponse.getFirstHeader("x-ms-request-id").getValue()); } if (shouldTrace) { CloudTracing.exit(invocationId, result); } return result; } finally { if (httpResponse != null && httpResponse.getEntity() != null) { httpResponse.getEntity().getContent().close(); } } } }
package com.oath.cyclops.anym; import java.io.PrintStream; import java.io.PrintWriter; import java.util.concurrent.ScheduledExecutorService; import java.util.function.*; import com.oath.cyclops.types.foldable.ConvertableSequence; import com.oath.cyclops.types.stream.Connectable; import com.oath.cyclops.types.traversable.IterableX; import cyclops.control.Maybe; import cyclops.control.Option; import cyclops.data.Seq; import cyclops.data.Vector; import cyclops.function.Monoid; import cyclops.function.Reducer; import cyclops.monads.AnyM; import cyclops.monads.WitnessType; import com.oath.cyclops.types.stream.ToStream; import org.reactivestreams.Subscription; @Deprecated //use cyclops-pure Do instead public interface NestedFoldable<W extends WitnessType<W>,T> extends ToStream<T> { public AnyM<W,? extends IterableX<T>> nestedFoldables(); default <X extends Throwable> AnyM<W,? extends Subscription> forEach(long numberOfElements, Consumer<? super T> consumer, Consumer<? super Throwable> consumerError){ return nestedFoldables().map(n->n.forEach(numberOfElements,consumer,consumerError)); } default <X extends Throwable> AnyM<W,? extends Subscription> forEach(long numberOfElements, Consumer<? super T> consumer, Consumer<? super Throwable> consumerError, Runnable onComplete){ return nestedFoldables().map(n->n.forEach(numberOfElements,consumer,consumerError,onComplete)); } /** * * <pre> * {@code * ReactiveSeq.of("hello","2","world","4").foldMap(Reducers.toCountInt()); * * //4 * } * </pre> * * @param reducer * Monoid to reduce values * @return Reduce result */ default <R> AnyM<W,R> foldMap(final Reducer<R,T> reducer) { return nestedFoldables().map(s -> s.foldMap(reducer)); } /** * * <pre> * {@code * ReactiveSeq.of("one","two","three","four") * .foldMap(this::toInt,Reducers.toTotalInt()); * * //10 * * int toInt(String s){ * if("one".equals(s)) * return 1; * if("two".equals(s)) * return 2; * if("three".equals(s)) * return 3; * if("four".equals(s)) * return 4; * return -1; * } * } * </pre> * * @param mapper * Function to transform Monad type * @param reducer * Monoid to reduce values * @return Reduce result */ default <R> AnyM<W,R> foldMap(final Function<? super T, ? extends R> mapper, final Monoid<R> reducer) { return nestedFoldables().map(s -> s.foldMap(mapper, reducer)); } default AnyM<W,Seq<T>> foldLeft(final Iterable<? extends Monoid<T>> reducer) { return nestedFoldables().map(s -> s.foldLeft(reducer)); } default AnyM<W,T> foldLeft(final Monoid<T> reducer) { return nestedFoldables().map(s -> s.foldLeft(reducer)); } default AnyM<W,T> foldLeft(final T identity, final BinaryOperator<T> accumulator) { return nestedFoldables().map(s -> s.foldLeft(identity, accumulator)); } default <U> AnyM<W,U> foldLeft(final U identity, final BiFunction<U, ? super T, U> accumulator) { return nestedFoldables().map(s -> s.foldLeft(identity, accumulator)); } default <U> AnyM<W,U> foldLeft(final U identity, final BiFunction<U, ? super T, U> accumulator, final BinaryOperator<U> combiner) { return nestedFoldables().map(s -> s.foldLeft(identity, accumulator,combiner)); } default AnyM<W,Option<T>> foldLeft(final BinaryOperator<T> accumulator) { return nestedFoldables().map(s -> s.foldLeft(accumulator)); } /** * * <pre> * {@code * ReactiveSeq.of("a","b","c").foldRight(Reducers.toString("")); * * // "cab" * } * </pre> * * @param reducer * Use supplied Monoid to reduce values starting via foldRight * @return Reduced result */ default AnyM<W,T> foldRight(final Monoid<T> reducer) { return nestedFoldables().map(s -> s.foldRight(reducer)); } /** * Immutable reduction from right to left * * <pre> * {@code * assertTrue(ReactiveSeq.of("a","b","c").foldRight("", String::concat).equals("cba")); * } * </pre> * * @param identity value that results in the input parameter to the accumulator function being returned. * E.g. for multiplication 1 is the identity value, for addition 0 is the identity value * @param accumulator function that combines the accumulated value and the next one * @return AnyM containing the results of the nest fold right */ default AnyM<W,T> foldRight(final T identity, final BinaryOperator<T> accumulator) { return nestedFoldables().map(s -> s.foldRight(identity, accumulator)); } /** * Immutable reduction from right to left * <pre> * {@code * assertTrue(ReactiveSeq.of("a","b","c").foldRight("", (a,b)->a+b).equals("cba")); * } * </pre> * * @param identity value that results in the input parameter to the accumulator function being returned. * E.g. for multiplication 1 is the identity value, for addition 0 is the identity value * @param accumulator function that combines the accumulated value and the next one * @return AnyM containing the results of the nest fold right */ default <U> AnyM<W,U> foldRight(final U identity, final BiFunction<? super T, U, U> accumulator) { return nestedFoldables().map(s -> s.foldRight(identity, accumulator)); } /** * Attempt to transform this Monad to the same type as the supplied Monoid (using * mapToType on the monoid interface) Then use Monoid to reduce values * * <pre> * {@code * ReactiveSeq.of(1,2,3).foldRightMapToType(Reducers.toString("")); * * // "321" * } * </pre> * * * @param reducer * Monoid to reduce values * @return Reduce result */ default <R> AnyM<W,R> foldMapRight(final Reducer<R,T> reducer) { return nestedFoldables().map(s -> s.foldMapRight(reducer)); } /** * <pre> * {@code * assertEquals("123".length(),ReactiveSeq.of(1, 2, 3).join().length()); * } * </pre> * * @return Stream as concatenated String */ default AnyM<W,String> join() { return nestedFoldables().map(s -> s.join()); } /** * <pre> * {@code * assertEquals("1, 2, 3".length(), ReactiveSeq.of(1, 2, 3).join(", ").length()); * } * </pre> * * @return Stream as concatenated String */ default AnyM<W,String> join(final String sep) { return nestedFoldables().map(s -> s.join(sep)); } /** * <pre> * {@code * assertEquals("^1|2|3$".length(), of(1, 2, 3).join("|", "^", "$").length()); * } * </pre> * * @return Stream as concatenated String */ default AnyM<W,String> join(final String sep, final String start, final String end) { return nestedFoldables().map(s -> s.join(sep, start, end)); } default void print(final PrintStream str) { nestedFoldables().peek(s -> s.print(str)) .forEach(c -> { }); } default void print(final PrintWriter writer) { nestedFoldables().peek(s -> s.print(writer)) .forEach(c -> { }); } default void printOut() { nestedFoldables().peek(s -> s.printOut()) .forEach(c -> { }); } default void printErr() { nestedFoldables().peek(s -> s.printErr()) .forEach(c -> { }); } /** * Use classifier function to group elements in this Sequence into a Map * * <pre> * {@code * * Map<Integer, List<Integer>> map1 = of(1, 2, 3, 4).groupBy(i -> i % 2); * assertEquals(asList(2, 4), map1.getValue(0)); * assertEquals(asList(1, 3), map1.getValue(1)); * assertEquals(2, map1.size()); * * } * * </pre> */ default <K> AnyM<W,cyclops.data.HashMap<K, Vector<T>>> groupBy(final Function<? super T, ? extends K> classifier) { return nestedFoldables().map(s -> s.groupBy(classifier)); } default AnyM<W,Option<T>> headOption() { return nestedFoldables().map(s -> s.headOption()); } /** * * <pre> * {@code * assertTrue(ReactiveSeq.of(1,2,3,4).startsWith(Arrays.asList(1,2,3))); * } * </pre> * * @param iterable * @return True if Monad starts with Iterable sequence of data */ default AnyM<W,Boolean> startsWith(final Iterable<T> iterable) { return nestedFoldables().map(s -> s.startsWith(iterable)); } /** * <pre> * {@code * assertTrue(ReactiveSeq.of(1,2,3,4,5,6) * .endsWith(Arrays.asList(5,6))); * * } * * @param iterable Values to check * @return true if SequenceM ends with values in the supplied iterable */ default AnyM<W,Boolean> endsWith(final Iterable<T> iterable) { return nestedFoldables().map(s -> s.endsWith(iterable)); } default <R> AnyM<W,R> toNested(Function<? super ConvertableSequence<T>, ? extends R> fn) { return nestedFoldables().map(s -> fn.apply(s.to())); } /** * <pre> * {@code * assertThat(ReactiveSeq.of(1,2,3,4) * .map(u->{throw new RuntimeException();}) * .recover(e->"hello") * .firstValue(),equalTo("hello")); * } * </pre> * * @return first value in this Stream */ default AnyM<W,T> firstValue(T alt) { return nestedFoldables().map(s -> s.firstValue(alt)); } /** * <pre> * {@code * * //1 * ReactiveSeq.of(1).single(); * * //UnsupportedOperationException * ReactiveSeq.of().single(); * * //UnsupportedOperationException * ReactiveSeq.of(1,2,3).single(); * } * </pre> * * @return a single value or an UnsupportedOperationException if 0/1 values * in this Stream */ default AnyM<W,T> singleOrElse(T alt) { return nestedFoldables().map(s -> s.singleOrElse(alt)); } default AnyM<W,Maybe<T>> single(final Predicate<? super T> predicate) { return nestedFoldables().map(s -> s.single(predicate)); } /** * <pre> * {@code * * //Optional[1] * ReactiveSeq.of(1).single(); * * //Optional.zero * ReactiveSeq.of().singleOpional(); * * //Optional.zero * ReactiveSeq.of(1,2,3).single(); * } * </pre> * * @return An Optional with single value if this Stream has exactly one * element, otherwise Optional Empty */ default AnyM<W,Maybe<T>> single() { return nestedFoldables().map(s -> s.single()); } /** * Return the elementAt index or Optional.zero * * <pre> * {@code * assertThat(ReactiveSeq.of(1,2,3,4,5).elementAt(2).getValue(),equalTo(3)); * } * </pre> * * @param index * to extract element from * @return elementAt index */ default AnyM<W,Maybe<T>> get(final long index) { return nestedFoldables().map(s -> s.elementAt(index)); } /** * Execute this Stream on a schedule * * <pre> * {@code * //run at 8PM every night * ReactiveSeq.generate(()->"next job:"+formatDate(new Date())) * .map(this::processJob) * .schedule("0 20 * * *",Executors.newScheduledThreadPool(1)); * } * </pre> * * Connect to the Scheduled Stream * * <pre> * {@code * * Connectable<Data> dataStream = ReactiveSeq.generate(() -> "next job:" + formatDate(new Date())).map(this::processJob) * .schedule("0 20 * * *", Executors.newScheduledThreadPool(1)); * * data.connect() * .forEach(this::logToDB); * } * </pre> * * * * @param cron * Expression that determines when each job will run * @param ex * ScheduledExecutorService * @return Connectable Connectable of emitted from scheduled Stream */ default Connectable<T> schedule(final String cron, final ScheduledExecutorService ex) { return stream().schedule(cron, ex); } /** * Execute this Stream on a schedule * * <pre> * {@code * //run every 60 seconds after last job completes * ReactiveSeq.generate(()->"next job:"+formatDate(new Date())) * .map(this::processJob) * .scheduleFixedDelay(60_000,Executors.newScheduledThreadPool(1)); * } * </pre> * * Connect to the Scheduled Stream * * <pre> * {@code * Connectable<Data> dataStream = ReactiveSeq.generate(() -> "next job:" + formatDate(new Date())).map(this::processJob) * .scheduleFixedDelay(60_000, Executors.newScheduledThreadPool(1)); * * data.connect().forEach(this::logToDB); * } * </pre> * * * @param delay * Between last element completes passing through the Stream * until the next one starts * @param ex * ScheduledExecutorService * @return Connectable Connectable of emitted from scheduled Stream */ default Connectable<T> scheduleFixedDelay(final long delay, final ScheduledExecutorService ex) { return stream().scheduleFixedDelay(delay, ex); } /** * Execute this Stream on a schedule * * <pre> * {@code * //run every 60 seconds * ReactiveSeq.generate(()->"next job:"+formatDate(new Date())) * .map(this::processJob) * .scheduleFixedRate(60_000,Executors.newScheduledThreadPool(1)); * } * </pre> * * Connect to the Scheduled Stream * * <pre> * {@code * * Connectable<Data> dataStream = ReactiveSeq.generate(() -> "next job:" + formatDate(new Date())).map(this::processJob) * .scheduleFixedRate(60_000, Executors.newScheduledThreadPool(1)); * * data.connect() * .forEach(this::logToDB); * } * </pre> * * @param rate * Time in millis between job runs * @param ex * ScheduledExecutorService * @return Connectable Connectable of emitted from scheduled Stream */ default Connectable<T> scheduleFixedRate(final long rate, final ScheduledExecutorService ex) { return stream().scheduleFixedRate(rate, ex); } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.simplesystemsmanagement.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ssm-2014-11-06/DescribeMaintenanceWindowTasks" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DescribeMaintenanceWindowTasksResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * Information about the tasks in the maintenance window. * </p> */ private com.amazonaws.internal.SdkInternalList<MaintenanceWindowTask> tasks; /** * <p> * The token to use when requesting the next set of items. If there are no additional items to return, the string is * empty. * </p> */ private String nextToken; /** * <p> * Information about the tasks in the maintenance window. * </p> * * @return Information about the tasks in the maintenance window. */ public java.util.List<MaintenanceWindowTask> getTasks() { if (tasks == null) { tasks = new com.amazonaws.internal.SdkInternalList<MaintenanceWindowTask>(); } return tasks; } /** * <p> * Information about the tasks in the maintenance window. * </p> * * @param tasks * Information about the tasks in the maintenance window. */ public void setTasks(java.util.Collection<MaintenanceWindowTask> tasks) { if (tasks == null) { this.tasks = null; return; } this.tasks = new com.amazonaws.internal.SdkInternalList<MaintenanceWindowTask>(tasks); } /** * <p> * Information about the tasks in the maintenance window. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setTasks(java.util.Collection)} or {@link #withTasks(java.util.Collection)} if you want to override the * existing values. * </p> * * @param tasks * Information about the tasks in the maintenance window. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeMaintenanceWindowTasksResult withTasks(MaintenanceWindowTask... tasks) { if (this.tasks == null) { setTasks(new com.amazonaws.internal.SdkInternalList<MaintenanceWindowTask>(tasks.length)); } for (MaintenanceWindowTask ele : tasks) { this.tasks.add(ele); } return this; } /** * <p> * Information about the tasks in the maintenance window. * </p> * * @param tasks * Information about the tasks in the maintenance window. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeMaintenanceWindowTasksResult withTasks(java.util.Collection<MaintenanceWindowTask> tasks) { setTasks(tasks); return this; } /** * <p> * The token to use when requesting the next set of items. If there are no additional items to return, the string is * empty. * </p> * * @param nextToken * The token to use when requesting the next set of items. If there are no additional items to return, the * string is empty. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The token to use when requesting the next set of items. If there are no additional items to return, the string is * empty. * </p> * * @return The token to use when requesting the next set of items. If there are no additional items to return, the * string is empty. */ public String getNextToken() { return this.nextToken; } /** * <p> * The token to use when requesting the next set of items. If there are no additional items to return, the string is * empty. * </p> * * @param nextToken * The token to use when requesting the next set of items. If there are no additional items to return, the * string is empty. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeMaintenanceWindowTasksResult withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getTasks() != null) sb.append("Tasks: ").append(getTasks()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeMaintenanceWindowTasksResult == false) return false; DescribeMaintenanceWindowTasksResult other = (DescribeMaintenanceWindowTasksResult) obj; if (other.getTasks() == null ^ this.getTasks() == null) return false; if (other.getTasks() != null && other.getTasks().equals(this.getTasks()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getTasks() == null) ? 0 : getTasks().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public DescribeMaintenanceWindowTasksResult clone() { try { return (DescribeMaintenanceWindowTasksResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* Generated By:JavaCC: Do not edit this line. JSON_Parser.java */ /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.atlas.json.io.parserjavacc.javacc ; @SuppressWarnings("all") public class JSON_Parser extends JSON_ParserBase implements JSON_ParserConstants { // ---- Parser entry points final public void unit() throws ParseException { Token t ; startParse(-1, -1) ; Object(); t = jj_consume_token(0); finishParse(t.beginLine, t.beginColumn) ; } final public void any() throws ParseException { Token t ; startParse(-1, -1) ; switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case STRING_LITERAL2: case INTEGER: case DECIMAL: case DOUBLE: case POSITIVE_INTEGER: case POSITIVE_DECIMAL: case POSITIVE_DOUBLE: case NEGATIVE_INTEGER: case NEGATIVE_DECIMAL: case NEGATIVE_DOUBLE: case TRUE: case FALSE: case NULL: case LBRACE: case LBRACKET: Value(); break; default: jj_la1[0] = jj_gen; ; } t = jj_consume_token(0); finishParse(t.beginLine, t.beginColumn) ; } // ---- Structures final public void Value() throws ParseException { switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case STRING_LITERAL2: case INTEGER: case DECIMAL: case DOUBLE: case POSITIVE_INTEGER: case POSITIVE_DECIMAL: case POSITIVE_DOUBLE: case NEGATIVE_INTEGER: case NEGATIVE_DECIMAL: case NEGATIVE_DOUBLE: case TRUE: case FALSE: case NULL: SimpleValue(); break; case LBRACE: Object(); break; case LBRACKET: Array(); break; default: jj_la1[1] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } final public void Object() throws ParseException { Token t ; t = jj_consume_token(LBRACE); startObject(t.beginLine, t.beginColumn) ; switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case STRING_LITERAL2: Members(t); break; default: jj_la1[2] = jj_gen; ; } t = jj_consume_token(RBRACE); finishObject(t.beginLine, t.beginColumn); } final public void Members(Token t) throws ParseException { Pair(t); label_1: while (true) { switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case COMMA: ; break; default: jj_la1[3] = jj_gen; break label_1; } t = jj_consume_token(COMMA); Pair(t); } } final public void Pair(Token t) throws ParseException { startPair(t.beginLine, t.beginColumn) ; String(); t = jj_consume_token(COLON); keyPair(t.beginLine, t.beginColumn) ; Value(); finishPair(t.beginLine, t.beginColumn) ; } final public void Array() throws ParseException { Token t ; t = jj_consume_token(LBRACKET); startArray(t.beginLine, t.beginColumn) ; switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case STRING_LITERAL2: case INTEGER: case DECIMAL: case DOUBLE: case POSITIVE_INTEGER: case POSITIVE_DECIMAL: case POSITIVE_DOUBLE: case NEGATIVE_INTEGER: case NEGATIVE_DECIMAL: case NEGATIVE_DOUBLE: case TRUE: case FALSE: case NULL: case LBRACE: case LBRACKET: Elements(t); break; default: jj_la1[4] = jj_gen; ; } t = jj_consume_token(RBRACKET); finishArray(t.beginLine, t.beginColumn) ; } final public void Elements(Token t) throws ParseException { ArrayValue(t.beginLine, t.beginColumn); label_2: while (true) { switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case COMMA: ; break; default: jj_la1[5] = jj_gen; break label_2; } t = jj_consume_token(COMMA); ArrayValue(t.beginLine, t.beginColumn); } } final public void ArrayValue(long line, long col) throws ParseException { Value(); element(line, col) ; } // ---- final public void SimpleValue() throws ParseException { switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case STRING_LITERAL2: String(); break; case INTEGER: case DECIMAL: case DOUBLE: case POSITIVE_INTEGER: case POSITIVE_DECIMAL: case POSITIVE_DOUBLE: case NEGATIVE_INTEGER: case NEGATIVE_DECIMAL: case NEGATIVE_DOUBLE: Number(); break; case TRUE: True(); break; case FALSE: False(); break; case NULL: Null(); break; default: jj_la1[6] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } final public void Number() throws ParseException { Token t ; switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case INTEGER: t = jj_consume_token(INTEGER); valueInteger(t.image, t.beginLine, t.beginColumn) ; break; case DECIMAL: t = jj_consume_token(DECIMAL); valueDecimal(t.image, t.beginLine, t.beginColumn) ; break; case DOUBLE: t = jj_consume_token(DOUBLE); valueDouble(t.image, t.beginLine, t.beginColumn) ; break; case POSITIVE_INTEGER: t = jj_consume_token(POSITIVE_INTEGER); valueInteger(t.image, t.beginLine, t.beginColumn) ; break; case POSITIVE_DECIMAL: t = jj_consume_token(POSITIVE_DECIMAL); valueDecimal(t.image, t.beginLine, t.beginColumn) ; break; case POSITIVE_DOUBLE: t = jj_consume_token(POSITIVE_DOUBLE); valueDouble(t.image, t.beginLine, t.beginColumn) ; break; case NEGATIVE_INTEGER: t = jj_consume_token(NEGATIVE_INTEGER); valueInteger(t.image, t.beginLine, t.beginColumn) ; break; case NEGATIVE_DECIMAL: t = jj_consume_token(NEGATIVE_DECIMAL); valueDecimal(t.image, t.beginLine, t.beginColumn) ; break; case NEGATIVE_DOUBLE: t = jj_consume_token(NEGATIVE_DOUBLE); valueDouble(t.image, t.beginLine, t.beginColumn) ; break; default: jj_la1[7] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } // Token to Java Object : These rules exist to inject the // necessary Java objects and code for the tokens. final public void String() throws ParseException { Token t ; t = jj_consume_token(STRING_LITERAL2); valueString(t.image, t.beginLine, t.beginColumn); } final public void True() throws ParseException { Token t ; t = jj_consume_token(TRUE); valueBoolean(true, t.beginLine, t.beginColumn) ; } final public void False() throws ParseException { Token t ; t = jj_consume_token(FALSE); valueBoolean(false, t.beginLine, t.beginColumn) ; } final public void Null() throws ParseException { Token t ; t = jj_consume_token(NULL); valueNull(t.beginLine, t.beginColumn) ; } /** Generated Token Manager. */ public JSON_ParserTokenManager token_source; JavaCharStream jj_input_stream; /** Current token. */ public Token token; /** Next token. */ public Token jj_nt; private int jj_ntk; private int jj_gen; final private int[] jj_la1 = new int[8]; static private int[] jj_la1_0; static private int[] jj_la1_1; static { jj_la1_init_0(); jj_la1_init_1(); } private static void jj_la1_init_0() { jj_la1_0 = new int[] {0x5eff880,0x5eff880,0x80,0x40000000,0x5eff880,0x40000000,0xeff880,0xff800,}; } private static void jj_la1_init_1() { jj_la1_1 = new int[] {0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,}; } /** Constructor with InputStream. */ public JSON_Parser(java.io.InputStream stream) { this(stream, null); } /** Constructor with InputStream and supplied encoding */ public JSON_Parser(java.io.InputStream stream, String encoding) { try { jj_input_stream = new JavaCharStream(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } token_source = new JSON_ParserTokenManager(jj_input_stream); token = new Token(); jj_ntk = -1; jj_gen = 0; for (int i = 0; i < 8; i++) jj_la1[i] = -1; } /** Reinitialise. */ public void ReInit(java.io.InputStream stream) { ReInit(stream, null); } /** Reinitialise. */ public void ReInit(java.io.InputStream stream, String encoding) { try { jj_input_stream.ReInit(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } token_source.ReInit(jj_input_stream); token = new Token(); jj_ntk = -1; jj_gen = 0; for (int i = 0; i < 8; i++) jj_la1[i] = -1; } /** Constructor. */ public JSON_Parser(java.io.Reader stream) { jj_input_stream = new JavaCharStream(stream, 1, 1); token_source = new JSON_ParserTokenManager(jj_input_stream); token = new Token(); jj_ntk = -1; jj_gen = 0; for (int i = 0; i < 8; i++) jj_la1[i] = -1; } /** Reinitialise. */ public void ReInit(java.io.Reader stream) { jj_input_stream.ReInit(stream, 1, 1); token_source.ReInit(jj_input_stream); token = new Token(); jj_ntk = -1; jj_gen = 0; for (int i = 0; i < 8; i++) jj_la1[i] = -1; } /** Constructor with generated Token Manager. */ public JSON_Parser(JSON_ParserTokenManager tm) { token_source = tm; token = new Token(); jj_ntk = -1; jj_gen = 0; for (int i = 0; i < 8; i++) jj_la1[i] = -1; } /** Reinitialise. */ public void ReInit(JSON_ParserTokenManager tm) { token_source = tm; token = new Token(); jj_ntk = -1; jj_gen = 0; for (int i = 0; i < 8; i++) jj_la1[i] = -1; } private Token jj_consume_token(int kind) throws ParseException { Token oldToken; if ((oldToken = token).next != null) token = token.next; else token = token.next = token_source.getNextToken(); jj_ntk = -1; if (token.kind == kind) { jj_gen++; return token; } token = oldToken; jj_kind = kind; throw generateParseException(); } /** Get the next Token. */ final public Token getNextToken() { if (token.next != null) token = token.next; else token = token.next = token_source.getNextToken(); jj_ntk = -1; jj_gen++; return token; } /** Get the specific Token. */ final public Token getToken(int index) { Token t = token; for (int i = 0; i < index; i++) { if (t.next != null) t = t.next; else t = t.next = token_source.getNextToken(); } return t; } private int jj_ntk() { if ((jj_nt=token.next) == null) return (jj_ntk = (token.next=token_source.getNextToken()).kind); else return (jj_ntk = jj_nt.kind); } private java.util.List<int[]> jj_expentries = new java.util.ArrayList<int[]>(); private int[] jj_expentry; private int jj_kind = -1; /** Generate ParseException. */ public ParseException generateParseException() { jj_expentries.clear(); boolean[] la1tokens = new boolean[34]; if (jj_kind >= 0) { la1tokens[jj_kind] = true; jj_kind = -1; } for (int i = 0; i < 8; i++) { if (jj_la1[i] == jj_gen) { for (int j = 0; j < 32; j++) { if ((jj_la1_0[i] & (1<<j)) != 0) { la1tokens[j] = true; } if ((jj_la1_1[i] & (1<<j)) != 0) { la1tokens[32+j] = true; } } } } for (int i = 0; i < 34; i++) { if (la1tokens[i]) { jj_expentry = new int[1]; jj_expentry[0] = i; jj_expentries.add(jj_expentry); } } int[][] exptokseq = new int[jj_expentries.size()][]; for (int i = 0; i < jj_expentries.size(); i++) { exptokseq[i] = jj_expentries.get(i); } return new ParseException(token, exptokseq, tokenImage); } /** Enable tracing. */ final public void enable_tracing() { } /** Disable tracing. */ final public void disable_tracing() { } }
/* * Copyright 2017 Exorath * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.exorath.exoHUD.locations.simple; import com.exorath.exoHUD.*; import com.exorath.exoHUD.locations.simple.SimpleLocation; import io.reactivex.Completable; import io.reactivex.schedulers.TestScheduler; import org.junit.Before; import org.junit.Test; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; /** * Created by toonsev on 9/25/2016. */ public class SimpleLocationTest { private List<DisplayPackage> onDisplayCalls, onDisplayRemoveCalls; private List<Boolean> onHideCalls; private SimpleLocation simpleLocation; @Before public void setup() { onDisplayCalls = new ArrayList<>(); onDisplayRemoveCalls = new ArrayList<>(); onHideCalls = new ArrayList<>(); simpleLocation = new SimpleLocation() { @Override public void onDisplay(DisplayPackage displayPackage) { onDisplayCalls.add(displayPackage); } @Override public void onDisplayRemove(DisplayPackage displayPackage) { onDisplayRemoveCalls.add(displayPackage); } @Override public void onHide(boolean hidden) { onHideCalls.add(hidden); } }; } //hidden @Test public void isHiddenReturnsFalseByDefaultTest() { assertFalse(simpleLocation.isHidden()); } @Test public void setHiddenTrueCallsOnHideMethodOnceTest() { simpleLocation.setHidden(true); assertEquals(1, onHideCalls.size()); } @Test public void setHiddenTrueCallsOnHideTrueMethodTest() { simpleLocation.setHidden(true); assertTrue(onHideCalls.get(0)); } @Test public void setHiddenFalseCallsOnHideMethodOnceTest() { simpleLocation.setHidden(true); assertEquals(1, onHideCalls.size()); } @Test public void setHiddenFalseCallsOnHideFalseMethodTest() { simpleLocation.setHidden(true); assertTrue(onHideCalls.get(0)); } @Test public void HiddenReturnsTrueWhenSetHiddenTrueCalledTest() { simpleLocation.setHidden(true); assertTrue(simpleLocation.isHidden()); } @Test public void HiddenReturnsFalseWhenSetHiddenFalseCalledTest() { simpleLocation.setHidden(false); assertFalse(simpleLocation.isHidden()); } @Test public void addDisplayDoesNotCallOnDisplayWhenSetHiddenTrueCalledTest() { simpleLocation.setHidden(true); DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0.9d, () -> Completable.never())); simpleLocation.addDisplayPackage(displayPackage); assertEquals(0, onDisplayCalls.size()); } @Test public void addDisplayDoesNotCallOnDisplayWhenSetHiddenFalseCalledTest() { simpleLocation.setHidden(false); DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0.9d, () -> Completable.never())); simpleLocation.addDisplayPackage(displayPackage); assertEquals(1, onDisplayCalls.size()); } //hide threshold @Test public void addDisplayDoesNotCallOnDisplayWhenThresholdIsHigherThenDisplayedPackage() { simpleLocation.setHideThreshold(1d); DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0.9d, () -> Completable.never())); simpleLocation.addDisplayPackage(displayPackage); assertEquals(0, onDisplayCalls.size()); } @Test public void addDisplayCallsOnDisplayWhenThresholdIsLowerThenDisplayedPackage() { simpleLocation.setHideThreshold(0.9d); DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(1d, () -> Completable.never())); simpleLocation.addDisplayPackage(displayPackage); assertEquals(1, onDisplayCalls.size()); } @Test public void setHideTresholdCallsOnDisplayRemoveWhenThresholdIsHigher() { DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(1d, () -> Completable.never())); simpleLocation.addDisplayPackage(displayPackage); simpleLocation.setHideThreshold(2d); assertEquals(1, onDisplayRemoveCalls.size()); } @Test public void setHideThresholdDoesNotCallsOnDisplayRemoveWhenThresholdIsLowerTest() { DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(1d, () -> Completable.never())); simpleLocation.addDisplayPackage(displayPackage); simpleLocation.setHideThreshold(0.5d); assertEquals(0, onDisplayRemoveCalls.size()); } @Test public void setHideThresholdDoesNotCallsOnDisplayRemoveWhenThresholdIsSameTest() { DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(10.543d, () -> Completable.never())); simpleLocation.addDisplayPackage(displayPackage); simpleLocation.setHideThreshold(10.543d); assertEquals(0, onDisplayRemoveCalls.size()); } //DisplayPackage @Test public void addDisplayPackageCallsOnDisplayOnceTest() { DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0, () -> Completable.never())); simpleLocation.addDisplayPackage(displayPackage); assertEquals(1, onDisplayCalls.size()); } @Test public void addDisplayPackageCallsOnDisplayWithSameDisplayPackageParamTest() { DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0, () -> Completable.never())); simpleLocation.addDisplayPackage(displayPackage); assertEquals(displayPackage, onDisplayCalls.get(0)); } @Test public void addDisplayPackageRemoveDisplayPackageOnDisplayRemoveCalledOnceTest() { DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0, () -> Completable.never())); simpleLocation.addDisplayPackage(displayPackage); simpleLocation.removeDisplayPackage(displayPackage); assertEquals(1, onDisplayRemoveCalls.size()); } @Test public void addDisplayPackageRemoveDisplayPackageOnDisplayRemoveSameDisplayPackageTest() { DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0, () -> Completable.never())); simpleLocation.addDisplayPackage(displayPackage); simpleLocation.removeDisplayPackage(displayPackage); assertEquals(displayPackage, onDisplayRemoveCalls.get(0)); } @Test public void removeDisplayPackageWithNotAddedPackageReturnsFalseTest() { DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0, () -> Completable.never())); boolean returned = simpleLocation.removeDisplayPackage(displayPackage); assertFalse(returned); } @Test public void removeDisplayPackageWithNotAddedPackageDoesNotCallOnDisplayRemoveTest() { DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0, () -> Completable.never())); simpleLocation.removeDisplayPackage(displayPackage); assertEquals(0, onDisplayRemoveCalls.size()); } //Text @Test public void addTextCallsOnDisplayOnceTest() { HUDText hudText = new HUDTextMock("test1", "test2").getMock(); DisplayProperties properties = DisplayProperties.create(0, () -> Completable.never()); simpleLocation.addText(hudText, properties); assertEquals(1, onDisplayCalls.size()); } @Test public void addTextReturnsDisplayPackageWithOneHUDTextTest() { HUDText hudText = new HUDTextMock("test1", "test2").getMock(); DisplayProperties properties = DisplayProperties.create(0, () -> Completable.never()); DisplayPackage displayPackage = simpleLocation.addText(hudText, properties); assertEquals(1, displayPackage.getHudPackage().getTexts().size()); } @Test public void addTextReturnsDisplayPackageWithSameHUDTextTest() { HUDText hudText = new HUDTextMock("test1", "test2").getMock(); DisplayProperties properties = DisplayProperties.create(0, () -> Completable.never()); DisplayPackage displayPackage = simpleLocation.addText(hudText, properties); assertEquals(hudText, displayPackage.getHudPackage().getTexts().get(0)); } @Test public void addTextReturnsDisplayPackageWithSameHUDPropertiesTest() { HUDText hudText = new HUDTextMock("test1", "test2").getMock(); DisplayProperties properties = DisplayProperties.create(0, () -> Completable.never()); DisplayPackage displayPackage = simpleLocation.addText(hudText, properties); assertEquals(properties, displayPackage.getProperties()); } @Test public void removeTextCallsOnDisplayRemoveOnceTest() { HUDText hudText = new HUDTextMock("test1", "test2").getMock(); DisplayProperties properties = DisplayProperties.create(0, () -> Completable.never()); DisplayPackage displayPackage = simpleLocation.addText(hudText, properties); simpleLocation.removeDisplayPackage(displayPackage); assertEquals(1, onDisplayRemoveCalls.size()); } @Test public void removeTextCallsOnDisplayRemoveWithReturnedDisplayPackageTest() { HUDText hudText = new HUDTextMock("test1", "test2").getMock(); DisplayProperties properties = DisplayProperties.create(0, () -> Completable.never()); DisplayPackage displayPackage = simpleLocation.addText(hudText, properties); simpleLocation.removeDisplayPackage(displayPackage); assertEquals(displayPackage, onDisplayRemoveCalls.get(0)); } //Removeable test @Test public void addDisplayWithInstantRemoveableCallsOnDisplayRemoveOnceTest() { DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0, () -> Completable.complete())); simpleLocation.addDisplayPackage(displayPackage); assertEquals(1, onDisplayRemoveCalls.size()); } @Test public void addDisplayWithInstantRemoveableCallsOnDisplayRemoveWithDisplayPackageParameterTest() { DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0, () -> Completable.complete())); simpleLocation.addDisplayPackage(displayPackage); assertEquals(displayPackage, onDisplayRemoveCalls.get(0)); } @Test public void addDisplayWithNeverRemoveableDoesNotCallOnDisplayRemoveTest() { DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0, () -> Completable.never())); simpleLocation.addDisplayPackage(displayPackage); assertEquals(0, onDisplayRemoveCalls.size()); } @Test public void addDisplayWithTimedRemoveableDoesNotCallOnDisplayRemoveWhileTimeNotExpiredTest() { TestScheduler scheduler = new TestScheduler(); DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0, () -> Completable.timer(2, TimeUnit.HOURS, scheduler))); simpleLocation.addDisplayPackage(displayPackage); scheduler.advanceTimeBy(1, TimeUnit.HOURS); assertEquals(0, onDisplayRemoveCalls.size()); } @Test public void addDisplayWithTimedRemoveableCallsOnDisplayRemoveWhenTimeExpiredOnceTest() { TestScheduler scheduler = new TestScheduler(); DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0, () -> Completable.timer(2, TimeUnit.HOURS, scheduler))); simpleLocation.addDisplayPackage(displayPackage); scheduler.advanceTimeBy(2, TimeUnit.HOURS); assertEquals(1, onDisplayRemoveCalls.size()); } @Test public void addDisplayWithTimedRemoveableCallsOnDisplayRemoveWhenTimeExpiredWithDisplayPackageParameterTest() { TestScheduler scheduler = new TestScheduler(); DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0, () -> Completable.timer(2, TimeUnit.HOURS, scheduler))); simpleLocation.addDisplayPackage(displayPackage); scheduler.advanceTimeBy(2, TimeUnit.HOURS); assertEquals(displayPackage, onDisplayRemoveCalls.get(0)); } //Display Overriding tests (tests when two displays are added which methods get called) @Test public void onDisplayRemoveCalledOnceWhenDisplayPackageAddedAndDisplayPackageAddedWithHigherPriorityTest() { DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0, () -> Completable.never())); DisplayPackage displayPackage2 = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(1, () -> Completable.never())); simpleLocation.addDisplayPackage(displayPackage); simpleLocation.addDisplayPackage(displayPackage2); assertEquals(1, onDisplayRemoveCalls.size()); } @Test public void onDisplayRemoveCalledWithFirstDisplayPackageWhenDisplayPackageAddedAndDisplayPackageAddedWithHigherPriorityTest() { DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0, () -> Completable.never())); DisplayPackage displayPackage2 = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(1, () -> Completable.never())); simpleLocation.addDisplayPackage(displayPackage); simpleLocation.addDisplayPackage(displayPackage2); assertEquals(displayPackage, onDisplayRemoveCalls.get(0)); } @Test public void onDisplayRemoveNotCalledWhenDisplayPackageAddedAndDisplayPackageAddedWithLowerPriorityTest() { DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0, () -> Completable.never())); DisplayPackage displayPackage2 = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(-0.1, () -> Completable.never())); simpleLocation.addDisplayPackage(displayPackage); simpleLocation.addDisplayPackage(displayPackage2); assertEquals(0, onDisplayRemoveCalls.size()); } @Test public void onDisplayRemoveNotCalledWhenDisplayPackageAddedAndDisplayPackageAddedWithSamePriorityTest() { DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0, () -> Completable.never())); DisplayPackage displayPackage2 = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0, () -> Completable.never())); simpleLocation.addDisplayPackage(displayPackage); simpleLocation.addDisplayPackage(displayPackage2); assertEquals(0, onDisplayRemoveCalls.size()); } @Test public void onDisplayCalledTwiceWhenDisplayPackageAddedAndDisplayPackageAddedWithHigherPriorityTest() { DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0, () -> Completable.never())); DisplayPackage displayPackage2 = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(1, () -> Completable.never())); simpleLocation.addDisplayPackage(displayPackage); simpleLocation.addDisplayPackage(displayPackage2); assertEquals(2, onDisplayCalls.size()); } @Test public void onDisplaySecondCallArgumentEqualsSecondDisplayPackageWhenDisplayPackageAddedAndDisplayPackageAddedWithHigherPriorityTest() { DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0, () -> Completable.never())); DisplayPackage displayPackage2 = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(1, () -> Completable.never())); simpleLocation.addDisplayPackage(displayPackage); simpleLocation.addDisplayPackage(displayPackage2); assertEquals(displayPackage2, onDisplayCalls.get(1)); } @Test public void onDisplayCalledOnceWhenDisplayPackageAddedAndDisplayPackageAddedWithLowerPriorityTest() { DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0, () -> Completable.never())); DisplayPackage displayPackage2 = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(-0.1, () -> Completable.never())); simpleLocation.addDisplayPackage(displayPackage); simpleLocation.addDisplayPackage(displayPackage2); assertEquals(1, onDisplayCalls.size()); } @Test public void onDisplayCalledOnceWhenDisplayPackageAddedAndDisplayPackageAddedWithSamePriorityTest() { DisplayPackage displayPackage = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0, () -> Completable.never())); DisplayPackage displayPackage2 = new DisplayPackage(HUDPackage.create(new ArrayList<>()), DisplayProperties.create(0, () -> Completable.never())); simpleLocation.addDisplayPackage(displayPackage); simpleLocation.addDisplayPackage(displayPackage2); assertEquals(1, onDisplayCalls.size()); } }
/* The MIT License (MIT) Copyright (c) 2016 The Orange Dots Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.tod.utils.logging; import java.awt.Color; import java.io.BufferedReader; import java.io.DataOutputStream; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; import javax.script.ScriptEngine; import javax.script.ScriptEngineManager; import javax.script.ScriptException; import jdk.nashorn.api.scripting.ScriptObjectMirror; import org.apache.log4j.AppenderSkeleton; import org.apache.log4j.Level; import org.apache.log4j.helpers.LogLog; import org.apache.log4j.spi.LoggingEvent; public class SlackAppender extends AppenderSkeleton { private String url; private static ScriptEngine engine = new ScriptEngineManager().getEngineByName("nashorn"); public void setUrl(String url) { this.url = url; } public String getUrl() { return this.url; } private String userName = null; public void setUserName(String userName) { this.userName = userName; } public String getUserName() { return this.userName; } private String channel; public String getChannel() { return channel; } public void setChannel(String channel) { this.channel = channel; } private String icon_emoji; public String getEmoji() { return icon_emoji; } public void setEmoji(String emoji) { this.icon_emoji = emoji; } public SlackAppender() { } public SlackAppender(boolean isActive) { super(isActive); } @Override public void close() { } @Override public boolean requiresLayout() { return false; } @Override protected void append(LoggingEvent event) { try { LogLog.debug("Sending 'POST' request to URL: " + url); URL obj = new URL(url); HttpURLConnection con = (HttpURLConnection) obj.openConnection(); con.setRequestMethod("POST"); con.setRequestProperty("Content-type", "application/json"); ScriptObjectMirror payload = getJSObject(); if (this.userName != null) { payload.put("userName", escapeString(this.userName)); } if (this.channel != null) { payload.put("channel", escapeString(this.channel)); } if (this.icon_emoji != null) { payload.put("icon_emoji", escapeString(this.icon_emoji)); } ScriptObjectMirror attachments = getJSArray(); payload.put("attachments", attachments); ScriptObjectMirror attachment = getJSObject(); attachments.put("0", attachment); //TODO make colors configurable Color color; switch (event.getLevel().toInt()) { case Level.FATAL_INT: case Level.ERROR_INT: color = Color.RED; break; case Level.WARN_INT: color = Color.ORANGE; break; case Level.INFO_INT: color = Color.BLACK; break; case Level.DEBUG_INT: color = Color.BLUE; break; case Level.TRACE_INT: color = Color.GREEN; break; default: color = Color.GRAY; break; } attachment.put("color", "#" + Integer.toHexString(color.getRGB()).substring(2)); attachment.put("pretext", escapeString(event.getMessage().toString())); attachment.put("title", escapeString(event.getLevel().toString())); String[] stack = event.getThrowableStrRep(); if (stack != null && stack.length > 0) { attachment.put("text", escapeString(String.join("\n", stack))); } StringBuffer fallbackText = new StringBuffer(); fallbackText.append(attachment.get("title")); fallbackText.append(attachment.get(" ")); fallbackText.append(attachment.get("pretext")); if (attachment.containsKey("text")) { fallbackText.append("\n"); fallbackText.append(attachment.get("text")); } attachment.put("fallback", fallbackText.toString()); String payloadString = toJSONString(payload); LogLog.debug("Prequest payload: " + payloadString); // Send post request con.setDoOutput(true); DataOutputStream wr = new DataOutputStream(con.getOutputStream()); wr.writeBytes(payloadString); wr.flush(); wr.close(); LogLog.debug("Response Code: " + con.getResponseCode()); BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream())); String inputLine; StringBuffer response = new StringBuffer(); while ((inputLine = in.readLine()) != null) { response.append(inputLine); } in.close(); LogLog.debug("Response: " + response.toString()); } catch (Exception e) { e.printStackTrace(); LogLog.error("Error posting to Slack", e); } } private static ScriptObjectMirror getJSObject() throws ScriptException { return (ScriptObjectMirror) engine.eval("new Object()"); } private static ScriptObjectMirror getJSArray() throws ScriptException { return (ScriptObjectMirror) engine.eval("new Array()"); } private static String toJSONString(ScriptObjectMirror object) throws ScriptException { ScriptObjectMirror json = (ScriptObjectMirror) engine.eval("JSON"); return json.callMember("stringify", object).toString(); } private static String escapeString(String text) { if (text == null) { return null; } /* * https://api.slack.com/docs/formatting * * Replace the ampersand, &, with &amp; * Replace the less-than sign, < with &lt; * Replace the greater-than sign, > with &gt; */ return text.replaceAll("&", "&amp;").replaceAll("<", "&lt;").replaceAll(">", "&gt;"); } }
/*- * * * Copyright 2015 Skymind,Inc. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * */ package org.deeplearning4j.nn.conf; import com.google.common.collect.Sets; import lombok.Data; import lombok.NoArgsConstructor; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.ClassUtils; import org.deeplearning4j.nn.api.OptimizationAlgorithm; import org.deeplearning4j.nn.api.layers.LayerConstraint; import org.deeplearning4j.nn.conf.distribution.Distribution; import org.deeplearning4j.nn.conf.dropout.Dropout; import org.deeplearning4j.nn.conf.dropout.IDropout; import org.deeplearning4j.nn.conf.graph.GraphVertex; import org.deeplearning4j.nn.conf.inputs.InputType; import org.deeplearning4j.nn.conf.layers.*; import org.deeplearning4j.nn.conf.layers.misc.FrozenLayer; import org.deeplearning4j.nn.conf.layers.variational.ReconstructionDistribution; import org.deeplearning4j.nn.conf.serde.ComputationGraphConfigurationDeserializer; import org.deeplearning4j.nn.conf.serde.MultiLayerConfigurationDeserializer; import org.deeplearning4j.nn.conf.stepfunctions.StepFunction; import org.deeplearning4j.nn.conf.weightnoise.IWeightNoise; import org.deeplearning4j.nn.weights.WeightInit; import org.deeplearning4j.util.reflections.DL4JSubTypesScanner; import org.nd4j.linalg.activations.Activation; import org.nd4j.linalg.activations.IActivation; import org.nd4j.linalg.activations.impl.ActivationSigmoid; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.learning.config.IUpdater; import org.nd4j.linalg.learning.config.Sgd; import org.nd4j.linalg.lossfunctions.ILossFunction; import org.nd4j.shade.jackson.databind.*; import org.nd4j.shade.jackson.databind.deser.BeanDeserializerModifier; import org.nd4j.shade.jackson.databind.introspect.AnnotatedClass; import org.nd4j.shade.jackson.databind.jsontype.NamedType; import org.nd4j.shade.jackson.databind.module.SimpleModule; import org.nd4j.shade.jackson.dataformat.yaml.YAMLFactory; import org.reflections.ReflectionUtils; import org.reflections.Reflections; import org.reflections.util.ClasspathHelper; import org.reflections.util.ConfigurationBuilder; import org.reflections.util.FilterBuilder; import java.io.IOException; import java.io.Serializable; import java.lang.reflect.Modifier; import java.net.URL; import java.util.*; /** * A Serializable configuration * for neural nets that covers per layer parameters * * @author Adam Gibson */ @Data @NoArgsConstructor @Slf4j public class NeuralNetConfiguration implements Serializable, Cloneable { /** * System property for custom layers, preprocessors, graph vertices etc. Enabled by default. * Run JVM with "-Dorg.deeplearning4j.config.custom.enabled=false" to disable classpath scanning for * Overriding the default (i.e., disabling) this is only useful if (a) no custom layers/preprocessors etc will be * used, and (b) minimizing startup/initialization time for new JVMs is very important. * Results are cached, so there is no cost to custom layers after the first network has been constructed. */ public static final String CUSTOM_FUNCTIONALITY = "org.deeplearning4j.config.custom.enabled"; protected Layer layer; //batch size: primarily used for conv nets. Will be reinforced if set. protected boolean miniBatch = true; protected int numIterations; //number of line search iterations protected int maxNumLineSearchIterations; protected long seed; protected OptimizationAlgorithm optimizationAlgo; //gradient keys used for ensuring order when getting and setting the gradient protected List<String> variables = new ArrayList<>(); //whether to constrain the gradient to unit norm or not protected StepFunction stepFunction; //minimize or maximize objective protected boolean minimize = true; protected Map<String, Double> l1ByParam = new HashMap<>(); protected Map<String, Double> l2ByParam = new HashMap<>(); protected boolean pretrain; // this field defines preOutput cache protected CacheMode cacheMode; //Counter for the number of parameter updates so far for this layer. //Note that this is only used for pretrain layers (RBM, VAE) - MultiLayerConfiguration and ComputationGraphConfiguration //contain counters for standard backprop training. // This is important for learning rate schedules, for example, and is stored here to ensure it is persisted // for Spark and model serialization protected int iterationCount = 0; //Counter for the number of epochs completed so far. Used for per-epoch schedules protected int epochCount = 0; private static ObjectMapper mapper = initMapper(); private static final ObjectMapper mapperYaml = initMapperYaml(); private static Set<Class<?>> subtypesClassCache = null; /** * Creates and returns a deep copy of the configuration. */ @Override public NeuralNetConfiguration clone() { try { NeuralNetConfiguration clone = (NeuralNetConfiguration) super.clone(); if (clone.layer != null) clone.layer = clone.layer.clone(); if (clone.stepFunction != null) clone.stepFunction = clone.stepFunction.clone(); if (clone.variables != null) clone.variables = new ArrayList<>(clone.variables); if (clone.l1ByParam != null) clone.l1ByParam = new HashMap<>(clone.l1ByParam); if (clone.l2ByParam != null) clone.l2ByParam = new HashMap<>(clone.l2ByParam); return clone; } catch (CloneNotSupportedException e) { throw new RuntimeException(e); } } public List<String> variables() { return new ArrayList<>(variables); } public List<String> variables(boolean copy) { if (copy) return variables(); return variables; } public void addVariable(String variable) { if (!variables.contains(variable)) { variables.add(variable); setLayerParamLR(variable); } } public void clearVariables() { variables.clear(); l1ByParam.clear(); l2ByParam.clear(); } public void resetVariables() { for (String s : variables) { setLayerParamLR(s); } } public void setLayerParamLR(String variable) { double l1 = layer.getL1ByParam(variable); if (Double.isNaN(l1)) l1 = 0.0; //Not set double l2 = layer.getL2ByParam(variable); if (Double.isNaN(l2)) l2 = 0.0; //Not set l1ByParam.put(variable, l1); l2ByParam.put(variable, l2); } public double getL1ByParam(String variable) { return l1ByParam.get(variable); } public double getL2ByParam(String variable) { return l2ByParam.get(variable); } /** * Fluent interface for building a list of configurations */ public static class ListBuilder extends MultiLayerConfiguration.Builder { private int layerCounter = -1; //Used only for .layer(Layer) method private Map<Integer, Builder> layerwise; private Builder globalConfig; // Constructor public ListBuilder(Builder globalConfig, Map<Integer, Builder> layerMap) { this.globalConfig = globalConfig; this.layerwise = layerMap; } public ListBuilder(Builder globalConfig) { this(globalConfig, new HashMap<Integer, Builder>()); } public ListBuilder backprop(boolean backprop) { this.backprop = backprop; return this; } public ListBuilder pretrain(boolean pretrain) { this.pretrain = pretrain; return this; } public ListBuilder layer(int ind, @NonNull Layer layer) { if (layerwise.containsKey(ind)) { log.info("Layer index {} already exists, layer of type {} will be replace by layer type {}", ind, layerwise.get(ind).getClass().getSimpleName(), layer.getClass().getSimpleName()); layerwise.get(ind).layer(layer); } else { layerwise.put(ind, globalConfig.clone().layer(layer)); } if(layerCounter < ind){ //Edge case: user is mixing .layer(Layer) and .layer(int, Layer) calls //This should allow a .layer(A, X) and .layer(Y) to work such that layer Y is index (A+1) layerCounter = ind; } return this; } public ListBuilder layer(Layer layer){ return layer(++layerCounter, layer); } public Map<Integer, Builder> getLayerwise() { return layerwise; } @Override public ListBuilder setInputType(InputType inputType){ return (ListBuilder)super.setInputType(inputType); } /** * A convenience method for setting input types: note that for example .inputType().convolutional(h,w,d) * is equivalent to .setInputType(InputType.convolutional(h,w,d)) */ public ListBuilder.InputTypeBuilder inputType(){ return new InputTypeBuilder(); } /** * Build the multi layer network * based on this neural network and * overr ridden parameters * * @return the configuration to build */ public MultiLayerConfiguration build() { List<NeuralNetConfiguration> list = new ArrayList<>(); if (layerwise.isEmpty()) throw new IllegalStateException("Invalid configuration: no layers defined"); for (int i = 0; i < layerwise.size(); i++) { if (layerwise.get(i) == null) { throw new IllegalStateException("Invalid configuration: layer number " + i + " not specified. Expect layer " + "numbers to be 0 to " + (layerwise.size() - 1) + " inclusive (number of layers defined: " + layerwise.size() + ")"); } if (layerwise.get(i).getLayer() == null) throw new IllegalStateException("Cannot construct network: Layer config for" + "layer with index " + i + " is not defined)"); //Layer names: set to default, if not set if (layerwise.get(i).getLayer().getLayerName() == null) { layerwise.get(i).getLayer().setLayerName("layer" + i); } list.add(layerwise.get(i).build()); } return new MultiLayerConfiguration.Builder().backprop(backprop).inputPreProcessors(inputPreProcessors) .pretrain(pretrain).backpropType(backpropType).tBPTTForwardLength(tbpttFwdLength) .tBPTTBackwardLength(tbpttBackLength).setInputType(this.inputType) .trainingWorkspaceMode(globalConfig.trainingWorkspaceMode).cacheMode(globalConfig.cacheMode) .inferenceWorkspaceMode(globalConfig.inferenceWorkspaceMode).confs(list).build(); } /** Helper class for setting input types */ public class InputTypeBuilder { /** * See {@link InputType#convolutional(int, int, int)} */ public ListBuilder convolutional(int height, int width, int depth){ return ListBuilder.this.setInputType(InputType.convolutional(height, width, depth)); } /** * * See {@link InputType#convolutionalFlat(int, int, int)} */ public ListBuilder convolutionalFlat(int height, int width, int depth){ return ListBuilder.this.setInputType(InputType.convolutionalFlat(height, width, depth)); } /** * See {@link InputType#feedForward(int)} */ public ListBuilder feedForward(int size){ return ListBuilder.this.setInputType(InputType.feedForward(size)); } /** * See {@link InputType#recurrent(int)}} */ public ListBuilder recurrent(int size){ return ListBuilder.this.setInputType(InputType.recurrent(size)); } } } /** * Return this configuration as json * * @return this configuration represented as json */ public String toYaml() { ObjectMapper mapper = mapperYaml(); try { String ret = mapper.writeValueAsString(this); return ret; } catch (org.nd4j.shade.jackson.core.JsonProcessingException e) { throw new RuntimeException(e); } } /** * Create a neural net configuration from json * * @param json the neural net configuration from json * @return */ public static NeuralNetConfiguration fromYaml(String json) { ObjectMapper mapper = mapperYaml(); try { NeuralNetConfiguration ret = mapper.readValue(json, NeuralNetConfiguration.class); return ret; } catch (IOException e) { throw new RuntimeException(e); } } /** * Return this configuration as json * * @return this configuration represented as json */ public String toJson() { ObjectMapper mapper = mapper(); try { String ret = mapper.writeValueAsString(this); return ret; } catch (org.nd4j.shade.jackson.core.JsonProcessingException e) { throw new RuntimeException(e); } } /** * Create a neural net configuration from json * * @param json the neural net configuration from json * @return */ public static NeuralNetConfiguration fromJson(String json) { ObjectMapper mapper = mapper(); try { NeuralNetConfiguration ret = mapper.readValue(json, NeuralNetConfiguration.class); return ret; } catch (IOException e) { throw new RuntimeException(e); } } /** * Object mapper for serialization of configurations * * @return */ public static ObjectMapper mapperYaml() { return mapperYaml; } private static ObjectMapper initMapperYaml() { ObjectMapper ret = new ObjectMapper(new YAMLFactory()); configureMapper(ret); return ret; } /** * Object mapper for serialization of configurations * * @return */ public static ObjectMapper mapper() { return mapper; } /** * Reinitialize and return the Jackson/json ObjectMapper with additional named types. * This can be used to add additional subtypes at runtime (i.e., for JSON mapping with * types defined outside of the main DL4J codebase) */ public static ObjectMapper reinitMapperWithSubtypes(Collection<NamedType> additionalTypes) { mapper.registerSubtypes(additionalTypes.toArray(new NamedType[additionalTypes.size()])); //Recreate the mapper (via copy), as mapper won't use registered subtypes after first use mapper = mapper.copy(); return mapper; } private static ObjectMapper initMapper() { ObjectMapper ret = new ObjectMapper(); configureMapper(ret); return ret; } private static void configureMapper(ObjectMapper ret) { ret.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); ret.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); ret.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true); ret.enable(SerializationFeature.INDENT_OUTPUT); SimpleModule customDeserializerModule = new SimpleModule(); customDeserializerModule.setDeserializerModifier(new BeanDeserializerModifier() { @Override public JsonDeserializer<?> modifyDeserializer(DeserializationConfig config, BeanDescription beanDesc, JsonDeserializer<?> deserializer) { //Use our custom deserializers to handle backward compatibility for updaters -> IUpdater if (beanDesc.getBeanClass() == MultiLayerConfiguration.class) { return new MultiLayerConfigurationDeserializer(deserializer); } else if (beanDesc.getBeanClass() == ComputationGraphConfiguration.class) { return new ComputationGraphConfigurationDeserializer(deserializer); } return deserializer; } }); ret.registerModule(customDeserializerModule); registerSubtypes(ret); } private static synchronized void registerSubtypes(ObjectMapper mapper) { //Register concrete subtypes for JSON serialization List<Class<?>> classes = Arrays.<Class<?>>asList(InputPreProcessor.class, ILossFunction.class, IActivation.class, Layer.class, GraphVertex.class, ReconstructionDistribution.class); List<String> classNames = new ArrayList<>(6); for (Class<?> c : classes) classNames.add(c.getName()); // First: scan the classpath and find all instances of the 'baseClasses' classes if (subtypesClassCache == null) { //Check system property: String prop = System.getProperty(CUSTOM_FUNCTIONALITY); if (prop != null && !Boolean.parseBoolean(prop)) { subtypesClassCache = Collections.emptySet(); } else { List<Class<?>> interfaces = Arrays.<Class<?>>asList(InputPreProcessor.class, ILossFunction.class, IActivation.class, ReconstructionDistribution.class); List<Class<?>> classesList = Arrays.<Class<?>>asList(Layer.class, GraphVertex.class); Collection<URL> urls = ClasspathHelper.forClassLoader(); List<URL> scanUrls = new ArrayList<>(); for (URL u : urls) { String path = u.getPath(); if (!path.matches(".*/jre/lib/.*jar")) { //Skip JRE/JDK JARs scanUrls.add(u); } } Reflections reflections = new Reflections(new ConfigurationBuilder().filterInputsBy(new FilterBuilder() .exclude("^(?!.*\\.class$).*$") //Consider only .class files (to avoid debug messages etc. on .dlls, etc //Exclude the following: the assumption here is that no custom functionality will ever be present // under these package name prefixes. These are all common dependencies for DL4J .exclude("^org.nd4j.*").exclude("^org.datavec.*").exclude("^org.bytedeco.*") //JavaCPP .exclude("^com.fasterxml.*")//Jackson .exclude("^org.apache.*") //Apache commons, Spark, log4j etc .exclude("^org.projectlombok.*").exclude("^com.twelvemonkeys.*").exclude("^org.joda.*") .exclude("^org.slf4j.*").exclude("^com.google.*").exclude("^org.reflections.*") .exclude("^ch.qos.*") //Logback ).addUrls(scanUrls).setScanners(new DL4JSubTypesScanner(interfaces, classesList))); org.reflections.Store store = reflections.getStore(); Iterable<String> subtypesByName = store.getAll(DL4JSubTypesScanner.class.getSimpleName(), classNames); Set<? extends Class<?>> subtypeClasses = Sets.newHashSet(ReflectionUtils.forNames(subtypesByName)); subtypesClassCache = new HashSet<>(); for (Class<?> c : subtypeClasses) { if (Modifier.isAbstract(c.getModifiers()) || Modifier.isInterface(c.getModifiers())) { //log.info("Skipping abstract/interface: {}",c); continue; } subtypesClassCache.add(c); } } } //Second: get all currently registered subtypes for this mapper Set<Class<?>> registeredSubtypes = new HashSet<>(); for (Class<?> c : classes) { AnnotatedClass ac = AnnotatedClass.construct(c, mapper.getSerializationConfig().getAnnotationIntrospector(), null); Collection<NamedType> types = mapper.getSubtypeResolver().collectAndResolveSubtypes(ac, mapper.getSerializationConfig(), mapper.getSerializationConfig().getAnnotationIntrospector()); for (NamedType nt : types) { registeredSubtypes.add(nt.getType()); } } //Third: register all _concrete_ subtypes that are not already registered List<NamedType> toRegister = new ArrayList<>(); for (Class<?> c : subtypesClassCache) { //Check if it's concrete or abstract... if (Modifier.isAbstract(c.getModifiers()) || Modifier.isInterface(c.getModifiers())) { //log.info("Skipping abstract/interface: {}",c); continue; } if (!registeredSubtypes.contains(c)) { String name; if (ClassUtils.isInnerClass(c)) { Class<?> c2 = c.getDeclaringClass(); name = c2.getSimpleName() + "$" + c.getSimpleName(); } else { name = c.getSimpleName(); } toRegister.add(new NamedType(c, name)); if (log.isDebugEnabled()) { for (Class<?> baseClass : classes) { if (baseClass.isAssignableFrom(c)) { log.debug("Registering class for JSON serialization: {} as subtype of {}", c.getName(), baseClass.getName()); break; } } } } } mapper.registerSubtypes(toRegister.toArray(new NamedType[toRegister.size()])); } @Data public static class Builder implements Cloneable { protected IActivation activationFn = new ActivationSigmoid(); protected WeightInit weightInit = WeightInit.XAVIER; protected double biasInit = 0.0; protected Distribution dist = null; protected double l1 = Double.NaN; protected double l2 = Double.NaN; protected double l1Bias = Double.NaN; protected double l2Bias = Double.NaN; protected IDropout idropOut; protected IWeightNoise weightNoise; protected IUpdater iUpdater = new Sgd(); protected IUpdater biasUpdater = null; protected Layer layer; protected boolean miniBatch = true; protected int numIterations = 1; protected int maxNumLineSearchIterations = 5; protected long seed = System.currentTimeMillis(); protected OptimizationAlgorithm optimizationAlgo = OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT; protected StepFunction stepFunction = null; protected boolean minimize = true; protected GradientNormalization gradientNormalization = GradientNormalization.None; protected double gradientNormalizationThreshold = 1.0; protected boolean pretrain = false; protected List<LayerConstraint> allParamConstraints; protected List<LayerConstraint> weightConstraints; protected List<LayerConstraint> biasConstraints; protected WorkspaceMode trainingWorkspaceMode = WorkspaceMode.NONE; protected WorkspaceMode inferenceWorkspaceMode = WorkspaceMode.SEPARATE; protected CacheMode cacheMode = CacheMode.NONE; protected ConvolutionMode convolutionMode = ConvolutionMode.Truncate; public Builder() { // } public Builder(NeuralNetConfiguration newConf) { if (newConf != null) { minimize = newConf.minimize; maxNumLineSearchIterations = newConf.maxNumLineSearchIterations; layer = newConf.layer; numIterations = newConf.numIterations; optimizationAlgo = newConf.optimizationAlgo; seed = newConf.seed; stepFunction = newConf.stepFunction; miniBatch = newConf.miniBatch; pretrain = newConf.pretrain; } } /** * Process input as minibatch vs full dataset. * Default set to true. */ public Builder miniBatch(boolean miniBatch) { this.miniBatch = miniBatch; return this; } /** * This method defines Workspace mode being used during training: * NONE: workspace won't be used * SINGLE: one workspace will be used during whole iteration loop * SEPARATE: separate workspaces will be used for feedforward and backprop iteration loops * * @param workspaceMode * @return */ public Builder trainingWorkspaceMode(@NonNull WorkspaceMode workspaceMode) { this.trainingWorkspaceMode = workspaceMode; return this; } /** * This method defines Workspace mode being used during inference: * NONE: workspace won't be used * SINGLE: one workspace will be used during whole iteration loop * SEPARATE: separate workspaces will be used for feedforward and backprop iteration loops * * @param workspaceMode * @return */ public Builder inferenceWorkspaceMode(@NonNull WorkspaceMode workspaceMode) { this.inferenceWorkspaceMode = workspaceMode; return this; } /** * This method defines how/if preOutput cache is handled: * NONE: cache disabled (default value) * HOST: Host memory will be used * DEVICE: GPU memory will be used (on CPU backends effect will be the same as for HOST) * * @param cacheMode * @return */ public Builder cacheMode(@NonNull CacheMode cacheMode) { this.cacheMode = cacheMode; return this; } /** * Objective function to minimize or maximize cost function * Default set to minimize true. */ public Builder minimize(boolean minimize) { this.minimize = minimize; return this; } /** * Maximum number of line search iterations. * Only applies for line search optimizers: Line Search SGD, Conjugate Gradient, LBFGS * is NOT applicable for standard SGD * * @param maxNumLineSearchIterations > 0 * @return */ public Builder maxNumLineSearchIterations(int maxNumLineSearchIterations) { this.maxNumLineSearchIterations = maxNumLineSearchIterations; return this; } /** * Layer class. */ public Builder layer(Layer layer) { this.layer = layer; return this; } /** * Step function to apply for back track line search. * Only applies for line search optimizers: Line Search SGD, Conjugate Gradient, LBFGS * Options: DefaultStepFunction (default), NegativeDefaultStepFunction * GradientStepFunction (for SGD), NegativeGradientStepFunction */ @Deprecated public Builder stepFunction(StepFunction stepFunction) { this.stepFunction = stepFunction; return this; } /** * Create a ListBuilder (for creating a MultiLayerConfiguration)<br> * Usage:<br> * <pre> * {@code .list() * .layer(new DenseLayer.Builder()...build()) * ... * .layer(new OutputLayer.Builder()...build()) * } * </pre> */ public ListBuilder list() { return new ListBuilder(this); } /** * Create a ListBuilder (for creating a MultiLayerConfiguration) with the specified layers<br> * Usage:<br> * <pre> * {@code .list( * new DenseLayer.Builder()...build(), * ..., * new OutputLayer.Builder()...build()) * } * </pre> * * @param layers The layer configurations for the network */ public ListBuilder list(Layer... layers) { if (layers == null || layers.length == 0) throw new IllegalArgumentException("Cannot create network with no layers"); Map<Integer, Builder> layerMap = new HashMap<>(); for (int i = 0; i < layers.length; i++) { Builder b = this.clone(); b.layer(layers[i]); layerMap.put(i, b); } return new ListBuilder(this, layerMap); } /** * Create a GraphBuilder (for creating a ComputationGraphConfiguration). */ public ComputationGraphConfiguration.GraphBuilder graphBuilder() { return new ComputationGraphConfiguration.GraphBuilder(this); } /** * Number of optimization iterations. Should be set to 1 for >99% of use cases (possible exception: * very tiny full batch dataset training) */ public Builder iterations(int numIterations) { this.numIterations = numIterations; return this; } /** * Random number generator seed. Used for reproducability between runs */ public Builder seed(long seed) { this.seed = seed; Nd4j.getRandom().setSeed(seed); return this; } /** * Optimization algorithm to use. Most common: OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT * * @param optimizationAlgo Optimization algorithm to use when training */ public Builder optimizationAlgo(OptimizationAlgorithm optimizationAlgo) { this.optimizationAlgo = optimizationAlgo; return this; } @Override public Builder clone() { try { Builder clone = (Builder) super.clone(); if (clone.layer != null) clone.layer = clone.layer.clone(); if (clone.stepFunction != null) clone.stepFunction = clone.stepFunction.clone(); return clone; } catch (CloneNotSupportedException e) { throw new RuntimeException(e); } } /** * Activation function / neuron non-linearity * * @see #activation(Activation) */ public Builder activation(IActivation activationFunction) { this.activationFn = activationFunction; return this; } /** * Activation function / neuron non-linearity */ public Builder activation(Activation activation) { return activation(activation.getActivationFunction()); } /** * Weight initialization scheme. * * @see org.deeplearning4j.nn.weights.WeightInit */ public Builder weightInit(WeightInit weightInit) { this.weightInit = weightInit; return this; } /** * Constant for bias initialization. Default: 0.0 * * @param biasInit Constant for bias initialization */ public Builder biasInit(double biasInit) { this.biasInit = biasInit; return this; } /** * Distribution to sample initial weights from. Used in conjunction with * .weightInit(WeightInit.DISTRIBUTION). */ public Builder dist(Distribution dist) { this.dist = dist; return this; } /** * L1 regularization coefficient for the weights. */ public Builder l1(double l1) { this.l1 = l1; return this; } /** * L2 regularization coefficient for the weights. */ public Builder l2(double l2) { this.l2 = l2; return this; } /** * L1 regularization coefficient for the bias. */ public Builder l1Bias(double l1Bias) { this.l1Bias = l1Bias; return this; } /** * L2 regularization coefficient for the bias. */ public Builder l2Bias(double l2Bias) { this.l2Bias = l2Bias; return this; } /** * Dropout probability. This is the probability of <it>retaining</it> each input activation value for a layer. * dropOut(x) will keep an input activation with probability x, and set to 0 with probability 1-x.<br> * dropOut(0.0) is a special value / special case - when set to 0.0., dropout is disabled (not applied). Note * that a dropout value of 1.0 is functionally equivalent to no dropout: i.e., 100% probability of retaining * each input activation.<br> * <p> * Note 1: Dropout is applied at training time only - and is automatically not applied at test time * (for evaluation, etc)<br> * Note 2: This sets the probability per-layer. Care should be taken when setting lower values for * complex networks (too much information may be lost with aggressive (very low) dropout values).<br> * Note 3: Frequently, dropout is not applied to (or, has higher retain probability for) input (first layer) * layers. Dropout is also often not applied to output layers. This needs to be handled MANUALLY by the user * - set .dropout(0) on those layers when using global dropout setting.<br> * Note 4: Implementation detail (most users can ignore): DL4J uses inverted dropout, as described here: * <a href="http://cs231n.github.io/neural-networks-2/">http://cs231n.github.io/neural-networks-2/</a> * </p> * * @param inputRetainProbability Dropout probability (probability of retaining each input activation value for a layer) * @see #dropOut(IDropout) */ public Builder dropOut(double inputRetainProbability) { return dropOut(new Dropout(inputRetainProbability)); } /** * Set the dropout for all layers in this network * * @param dropout Dropout, such as {@link Dropout}, {@link org.deeplearning4j.nn.conf.dropout.GaussianDropout}, * {@link org.deeplearning4j.nn.conf.dropout.GaussianNoise} etc * @return */ public Builder dropOut(IDropout dropout){ this.idropOut = dropout; return this; } /** * Set the weight noise (such as {@link org.deeplearning4j.nn.conf.weightnoise.DropConnect} and * {@link org.deeplearning4j.nn.conf.weightnoise.WeightNoise}) for the layers in this network. * * @param weightNoise Weight noise instance to use */ public Builder weightNoise(IWeightNoise weightNoise){ this.weightNoise = weightNoise; return this; } /** * @deprecated Use {@link #updater(IUpdater)} */ @Deprecated public Builder updater(Updater updater) { return updater(updater.getIUpdaterWithDefaultConfig()); } /** * Gradient updater configuration. For example, {@link org.nd4j.linalg.learning.config.Adam} * or {@link org.nd4j.linalg.learning.config.Nesterovs} * * @param updater Updater to use */ public Builder updater(IUpdater updater) { this.iUpdater = updater; return this; } /** * Gradient updater configuration, for the biases only. If not set, biases will use the updater as * set by {@link #updater(IUpdater)} * * @param updater Updater to use for bias parameters */ public Builder biasUpdater(IUpdater updater){ this.biasUpdater = updater; return this; } /** * Gradient normalization strategy. Used to specify gradient renormalization, gradient clipping etc. * See {@link GradientNormalization} for details * * @param gradientNormalization Type of normalization to use. Defaults to None. * @see GradientNormalization */ public Builder gradientNormalization(GradientNormalization gradientNormalization) { this.gradientNormalization = gradientNormalization; return this; } /** * Threshold for gradient normalization, only used for GradientNormalization.ClipL2PerLayer, * GradientNormalization.ClipL2PerParamType, and GradientNormalization.ClipElementWiseAbsoluteValue<br> * Not used otherwise.<br> * L2 threshold for first two types of clipping, or absolute value threshold for last type of clipping. */ public Builder gradientNormalizationThreshold(double threshold) { this.gradientNormalizationThreshold = threshold; return this; } /** * Sets the convolution mode for convolutional layers, which impacts padding and output sizes. * See {@link ConvolutionMode} for details. Defaults to ConvolutionMode.TRUNCATE * @param convolutionMode Convolution mode to use */ public Builder convolutionMode(ConvolutionMode convolutionMode) { this.convolutionMode = convolutionMode; return this; } /** * Set constraints to be applied to all layers. Default: no constraints.<br> * Constraints can be used to enforce certain conditions (non-negativity of parameters, max-norm regularization, * etc). These constraints are applied at each iteration, after the parameters have been updated. * * @param constraints Constraints to apply to all parameters of all layers */ public Builder constrainAllParameters(LayerConstraint... constraints){ this.allParamConstraints = Arrays.asList(constraints); return this; } /** * Set constraints to be applied to all layers. Default: no constraints.<br> * Constraints can be used to enforce certain conditions (non-negativity of parameters, max-norm regularization, * etc). These constraints are applied at each iteration, after the parameters have been updated. * * @param constraints Constraints to apply to all bias parameters of all layers */ public Builder constrainBias(LayerConstraint... constraints) { this.biasConstraints = Arrays.asList(constraints); return this; } /** * Set constraints to be applied to all layers. Default: no constraints.<br> * Constraints can be used to enforce certain conditions (non-negativity of parameters, max-norm regularization, * etc). These constraints are applied at each iteration, after the parameters have been updated. * * @param constraints Constraints to apply to all weight parameters of all layers */ public Builder constrainWeights(LayerConstraint... constraints) { this.weightConstraints = Arrays.asList(constraints); return this; } /** * Return a configuration based on this builder * * @return */ public NeuralNetConfiguration build() { NeuralNetConfiguration conf = new NeuralNetConfiguration(); conf.minimize = minimize; conf.maxNumLineSearchIterations = maxNumLineSearchIterations; conf.layer = layer; conf.numIterations = numIterations; conf.optimizationAlgo = optimizationAlgo; conf.seed = seed; conf.stepFunction = stepFunction; conf.miniBatch = miniBatch; conf.pretrain = pretrain; conf.cacheMode = this.cacheMode; configureLayer(layer); if (layer instanceof FrozenLayer) { configureLayer(((FrozenLayer) layer).getLayer()); } return conf; } private void configureLayer(Layer layer) { String layerName; if (layer == null || layer.getLayerName() == null) layerName = "Layer not named"; else layerName = layer.getLayerName(); if (layer != null) { copyConfigToLayer(layerName, layer); } if (layer instanceof FrozenLayer) { copyConfigToLayer(layerName, ((FrozenLayer) layer).getLayer()); } if (layer instanceof ConvolutionLayer) { ConvolutionLayer cl = (ConvolutionLayer) layer; if (cl.getConvolutionMode() == null) { cl.setConvolutionMode(convolutionMode); } } if (layer instanceof SubsamplingLayer) { SubsamplingLayer sl = (SubsamplingLayer) layer; if (sl.getConvolutionMode() == null) { sl.setConvolutionMode(convolutionMode); } } LayerValidation.generalValidation(layerName, layer, idropOut, l2, l2Bias, l1, l1Bias, dist, allParamConstraints, weightConstraints, biasConstraints); } private void copyConfigToLayer(String layerName, Layer layer) { if (layer.getIDropout() == null) layer.setIDropout(idropOut); if (layer instanceof BaseLayer) { BaseLayer bLayer = (BaseLayer) layer; if (Double.isNaN(bLayer.getL1())) bLayer.setL1(l1); if (Double.isNaN(bLayer.getL2())) bLayer.setL2(l2); if (bLayer.getActivationFn() == null) bLayer.setActivationFn(activationFn); if (bLayer.getWeightInit() == null) bLayer.setWeightInit(weightInit); if (Double.isNaN(bLayer.getBiasInit())) bLayer.setBiasInit(biasInit); //Configure weight noise: if(weightNoise != null && ((BaseLayer) layer).getWeightNoise() == null){ ((BaseLayer) layer).setWeightNoise(weightNoise.clone()); } //Configure updaters: if(iUpdater != null && bLayer.getIUpdater() == null){ bLayer.setIUpdater(iUpdater); } if(biasUpdater != null && bLayer.getBiasUpdater() == null){ bLayer.setBiasUpdater(biasUpdater); } if(bLayer.getIUpdater() == null && iUpdater == null && bLayer.initializer().numParams(bLayer) > 0){ //No updater set anywhere IUpdater u = new Sgd(); bLayer.setIUpdater(u); log.warn("*** No updater configuration is set for layer {} - defaulting to {} ***", layerName, u); } if (bLayer.getGradientNormalization() == null) bLayer.setGradientNormalization(gradientNormalization); if (Double.isNaN(bLayer.getGradientNormalizationThreshold())) bLayer.setGradientNormalizationThreshold(gradientNormalizationThreshold); } } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.compute.model; /** * Model definition for DiskAggregatedList. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class DiskAggregatedList extends com.google.api.client.json.GenericJson { /** * [Output Only] Unique identifier for the resource; defined by the server. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String id; /** * A list of DisksScopedList resources. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, DisksScopedList> items; /** * [Output Only] Type of resource. Always compute#diskAggregatedList for aggregated lists of * persistent disks. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kind; /** * [Output Only] This token allows you to get the next page of results for list requests. If the * number of results is larger than maxResults, use the nextPageToken as a value for the query * parameter pageToken in the next list request. Subsequent list requests will have their own * nextPageToken to continue paging through the results. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String nextPageToken; /** * [Output Only] Server-defined URL for this resource. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String selfLink; /** * [Output Only] Informational warning message. * The value may be {@code null}. */ @com.google.api.client.util.Key private Warning warning; /** * [Output Only] Unique identifier for the resource; defined by the server. * @return value or {@code null} for none */ public java.lang.String getId() { return id; } /** * [Output Only] Unique identifier for the resource; defined by the server. * @param id id or {@code null} for none */ public DiskAggregatedList setId(java.lang.String id) { this.id = id; return this; } /** * A list of DisksScopedList resources. * @return value or {@code null} for none */ public java.util.Map<String, DisksScopedList> getItems() { return items; } /** * A list of DisksScopedList resources. * @param items items or {@code null} for none */ public DiskAggregatedList setItems(java.util.Map<String, DisksScopedList> items) { this.items = items; return this; } /** * [Output Only] Type of resource. Always compute#diskAggregatedList for aggregated lists of * persistent disks. * @return value or {@code null} for none */ public java.lang.String getKind() { return kind; } /** * [Output Only] Type of resource. Always compute#diskAggregatedList for aggregated lists of * persistent disks. * @param kind kind or {@code null} for none */ public DiskAggregatedList setKind(java.lang.String kind) { this.kind = kind; return this; } /** * [Output Only] This token allows you to get the next page of results for list requests. If the * number of results is larger than maxResults, use the nextPageToken as a value for the query * parameter pageToken in the next list request. Subsequent list requests will have their own * nextPageToken to continue paging through the results. * @return value or {@code null} for none */ public java.lang.String getNextPageToken() { return nextPageToken; } /** * [Output Only] This token allows you to get the next page of results for list requests. If the * number of results is larger than maxResults, use the nextPageToken as a value for the query * parameter pageToken in the next list request. Subsequent list requests will have their own * nextPageToken to continue paging through the results. * @param nextPageToken nextPageToken or {@code null} for none */ public DiskAggregatedList setNextPageToken(java.lang.String nextPageToken) { this.nextPageToken = nextPageToken; return this; } /** * [Output Only] Server-defined URL for this resource. * @return value or {@code null} for none */ public java.lang.String getSelfLink() { return selfLink; } /** * [Output Only] Server-defined URL for this resource. * @param selfLink selfLink or {@code null} for none */ public DiskAggregatedList setSelfLink(java.lang.String selfLink) { this.selfLink = selfLink; return this; } /** * [Output Only] Informational warning message. * @return value or {@code null} for none */ public Warning getWarning() { return warning; } /** * [Output Only] Informational warning message. * @param warning warning or {@code null} for none */ public DiskAggregatedList setWarning(Warning warning) { this.warning = warning; return this; } @Override public DiskAggregatedList set(String fieldName, Object value) { return (DiskAggregatedList) super.set(fieldName, value); } @Override public DiskAggregatedList clone() { return (DiskAggregatedList) super.clone(); } /** * [Output Only] Informational warning message. */ public static final class Warning extends com.google.api.client.json.GenericJson { /** * [Output Only] A warning code, if applicable. For example, Compute Engine returns * NO_RESULTS_ON_PAGE if there are no results in the response. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String code; /** * [Output Only] Metadata about this warning in key: value format. For example: "data": [ { "key": * "scope", "value": "zones/us-east1-d" } * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<Data> data; static { // hack to force ProGuard to consider Data used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(Data.class); } /** * [Output Only] A human-readable description of the warning code. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String message; /** * [Output Only] A warning code, if applicable. For example, Compute Engine returns * NO_RESULTS_ON_PAGE if there are no results in the response. * @return value or {@code null} for none */ public java.lang.String getCode() { return code; } /** * [Output Only] A warning code, if applicable. For example, Compute Engine returns * NO_RESULTS_ON_PAGE if there are no results in the response. * @param code code or {@code null} for none */ public Warning setCode(java.lang.String code) { this.code = code; return this; } /** * [Output Only] Metadata about this warning in key: value format. For example: "data": [ { "key": * "scope", "value": "zones/us-east1-d" } * @return value or {@code null} for none */ public java.util.List<Data> getData() { return data; } /** * [Output Only] Metadata about this warning in key: value format. For example: "data": [ { "key": * "scope", "value": "zones/us-east1-d" } * @param data data or {@code null} for none */ public Warning setData(java.util.List<Data> data) { this.data = data; return this; } /** * [Output Only] A human-readable description of the warning code. * @return value or {@code null} for none */ public java.lang.String getMessage() { return message; } /** * [Output Only] A human-readable description of the warning code. * @param message message or {@code null} for none */ public Warning setMessage(java.lang.String message) { this.message = message; return this; } @Override public Warning set(String fieldName, Object value) { return (Warning) super.set(fieldName, value); } @Override public Warning clone() { return (Warning) super.clone(); } /** * Model definition for DiskAggregatedListWarningData. */ public static final class Data extends com.google.api.client.json.GenericJson { /** * [Output Only] A key that provides more detail on the warning being returned. For example, for * warnings where there are no results in a list request for a particular zone, this key might be * scope and the key value might be the zone name. Other examples might be a key indicating a * deprecated resource and a suggested replacement, or a warning about invalid network settings * (for example, if an instance attempts to perform IP forwarding but is not enabled for IP * forwarding). * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String key; /** * [Output Only] A warning data value corresponding to the key. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String value; /** * [Output Only] A key that provides more detail on the warning being returned. For example, for * warnings where there are no results in a list request for a particular zone, this key might be * scope and the key value might be the zone name. Other examples might be a key indicating a * deprecated resource and a suggested replacement, or a warning about invalid network settings * (for example, if an instance attempts to perform IP forwarding but is not enabled for IP * forwarding). * @return value or {@code null} for none */ public java.lang.String getKey() { return key; } /** * [Output Only] A key that provides more detail on the warning being returned. For example, for * warnings where there are no results in a list request for a particular zone, this key might be * scope and the key value might be the zone name. Other examples might be a key indicating a * deprecated resource and a suggested replacement, or a warning about invalid network settings * (for example, if an instance attempts to perform IP forwarding but is not enabled for IP * forwarding). * @param key key or {@code null} for none */ public Data setKey(java.lang.String key) { this.key = key; return this; } /** * [Output Only] A warning data value corresponding to the key. * @return value or {@code null} for none */ public java.lang.String getValue() { return value; } /** * [Output Only] A warning data value corresponding to the key. * @param value value or {@code null} for none */ public Data setValue(java.lang.String value) { this.value = value; return this; } @Override public Data set(String fieldName, Object value) { return (Data) super.set(fieldName, value); } @Override public Data clone() { return (Data) super.clone(); } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.gateway; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.nodes.BaseNodeRequest; import org.elasticsearch.action.support.nodes.BaseNodeResponse; import org.elasticsearch.action.support.nodes.BaseNodesRequest; import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.shard.ShardStateMetaData; import org.elasticsearch.index.store.Store; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicReferenceArray; /** * This transport action is used to fetch the shard version from each node during primary allocation in {@link GatewayAllocator}. * We use this to find out which node holds the latest shard version and which of them used to be a primary in order to allocate * shards after node or cluster restarts. */ public class TransportNodesListGatewayStartedShards extends TransportNodesAction<TransportNodesListGatewayStartedShards.Request, TransportNodesListGatewayStartedShards.NodesGatewayStartedShards, TransportNodesListGatewayStartedShards.NodeRequest, TransportNodesListGatewayStartedShards.NodeGatewayStartedShards> implements AsyncShardFetch.List<TransportNodesListGatewayStartedShards.NodesGatewayStartedShards, TransportNodesListGatewayStartedShards.NodeGatewayStartedShards> { public static final String ACTION_NAME = "internal:gateway/local/started_shards"; private final NodeEnvironment nodeEnv; @Inject public TransportNodesListGatewayStartedShards(Settings settings, ClusterName clusterName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, NodeEnvironment env) { super(settings, ACTION_NAME, clusterName, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, Request.class, NodeRequest.class, ThreadPool.Names.FETCH_SHARD_STARTED); this.nodeEnv = env; } @Override public void list(ShardId shardId, IndexMetaData indexMetaData, String[] nodesIds, ActionListener<NodesGatewayStartedShards> listener) { execute(new Request(shardId, indexMetaData.getIndexUUID(), nodesIds), listener); } @Override protected String[] resolveNodes(Request request, ClusterState clusterState) { // default implementation may filter out non existent nodes. it's important to keep exactly the ids // we were given for accounting on the caller return request.nodesIds(); } @Override protected boolean transportCompress() { return true; // this can become big... } @Override protected NodeRequest newNodeRequest(String nodeId, Request request) { return new NodeRequest(nodeId, request); } @Override protected NodeGatewayStartedShards newNodeResponse() { return new NodeGatewayStartedShards(); } @Override protected NodesGatewayStartedShards newResponse(Request request, AtomicReferenceArray responses) { final List<NodeGatewayStartedShards> nodesList = new ArrayList<>(); final List<FailedNodeException> failures = new ArrayList<>(); for (int i = 0; i < responses.length(); i++) { Object resp = responses.get(i); if (resp instanceof NodeGatewayStartedShards) { // will also filter out null response for unallocated ones nodesList.add((NodeGatewayStartedShards) resp); } else if (resp instanceof FailedNodeException) { failures.add((FailedNodeException) resp); } else { logger.warn("unknown response type [{}], expected NodeLocalGatewayStartedShards or FailedNodeException", resp); } } return new NodesGatewayStartedShards(clusterName, nodesList.toArray(new NodeGatewayStartedShards[nodesList.size()]), failures.toArray(new FailedNodeException[failures.size()])); } @Override protected NodeGatewayStartedShards nodeOperation(NodeRequest request) { try { final ShardId shardId = request.getShardId(); final String indexUUID = request.getIndexUUID(); logger.trace("{} loading local shard state info", shardId); ShardStateMetaData shardStateMetaData = ShardStateMetaData.FORMAT.loadLatestState(logger, nodeEnv.availableShardPaths(request.shardId)); if (shardStateMetaData != null) { final IndexMetaData metaData = clusterService.state().metaData().index(shardId.index().name()); // it's a mystery why this is sometimes null if (metaData != null) { ShardPath shardPath = null; try { shardPath = ShardPath.loadShardPath(logger, nodeEnv, shardId, metaData.settings()); if (shardPath == null) { throw new IllegalStateException(shardId + " no shard path found"); } Store.tryOpenIndex(shardPath.resolveIndex()); } catch (Exception exception) { logger.trace("{} can't open index for shard [{}] in path [{}]", exception, shardId, shardStateMetaData, (shardPath != null) ? shardPath.resolveIndex() : ""); return new NodeGatewayStartedShards(clusterService.localNode(), shardStateMetaData.version, exception); } } // old shard metadata doesn't have the actual index UUID so we need to check if the actual uuid in the metadata // is equal to IndexMetaData.INDEX_UUID_NA_VALUE otherwise this shard doesn't belong to the requested index. if (indexUUID.equals(shardStateMetaData.indexUUID) == false && IndexMetaData.INDEX_UUID_NA_VALUE.equals(shardStateMetaData.indexUUID) == false) { logger.warn("{} shard state info found but indexUUID didn't match expected [{}] actual [{}]", shardId, indexUUID, shardStateMetaData.indexUUID); } else { logger.debug("{} shard state info found: [{}]", shardId, shardStateMetaData); return new NodeGatewayStartedShards(clusterService.localNode(), shardStateMetaData.version); } } logger.trace("{} no local shard info found", shardId); return new NodeGatewayStartedShards(clusterService.localNode(), -1); } catch (Exception e) { throw new ElasticsearchException("failed to load started shards", e); } } @Override protected boolean accumulateExceptions() { return true; } public static class Request extends BaseNodesRequest<Request> { private ShardId shardId; private String indexUUID; public Request() { } public Request(ShardId shardId, String indexUUID, String[] nodesIds) { super(nodesIds); this.shardId = shardId; this.indexUUID = indexUUID; } public ShardId shardId() { return this.shardId; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); shardId = ShardId.readShardId(in); indexUUID = in.readString(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); shardId.writeTo(out); out.writeString(indexUUID); } public String getIndexUUID() { return indexUUID; } } public static class NodesGatewayStartedShards extends BaseNodesResponse<NodeGatewayStartedShards> { private FailedNodeException[] failures; public NodesGatewayStartedShards(ClusterName clusterName, NodeGatewayStartedShards[] nodes, FailedNodeException[] failures) { super(clusterName, nodes); this.failures = failures; } @Override public FailedNodeException[] failures() { return failures; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); nodes = new NodeGatewayStartedShards[in.readVInt()]; for (int i = 0; i < nodes.length; i++) { nodes[i] = new NodeGatewayStartedShards(); nodes[i].readFrom(in); } } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeVInt(nodes.length); for (NodeGatewayStartedShards response : nodes) { response.writeTo(out); } } } public static class NodeRequest extends BaseNodeRequest { private ShardId shardId; private String indexUUID; public NodeRequest() { } NodeRequest(String nodeId, TransportNodesListGatewayStartedShards.Request request) { super(request, nodeId); this.shardId = request.shardId(); this.indexUUID = request.getIndexUUID(); } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); shardId = ShardId.readShardId(in); indexUUID = in.readString(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); shardId.writeTo(out); out.writeString(indexUUID); } public ShardId getShardId() { return shardId; } public String getIndexUUID() { return indexUUID; } } public static class NodeGatewayStartedShards extends BaseNodeResponse { private long version = -1; private Throwable storeException = null; public NodeGatewayStartedShards() { } public NodeGatewayStartedShards(DiscoveryNode node, long version) { this(node, version, null); } public NodeGatewayStartedShards(DiscoveryNode node, long version, Throwable storeException) { super(node); this.version = version; this.storeException = storeException; } public long version() { return this.version; } public Throwable storeException() { return this.storeException; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); version = in.readLong(); if (in.readBoolean()) { storeException = in.readThrowable(); } } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeLong(version); if (storeException != null) { out.writeBoolean(true); out.writeThrowable(storeException); } else { out.writeBoolean(false); } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapred; import java.io.*; import junit.framework.TestCase; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; /** * A JUnit test to test Mini Map-Reduce Cluster with multiple directories and * check for correct classpath */ public class TestMiniMRClasspath extends TestCase { static String launchWordCount(String fileSys, String jobTracker, JobConf conf, String input, int numMaps, int numReduces) throws IOException { final Path inDir = new Path("/testing/wc/input"); final Path outDir = new Path("/testing/wc/output"); FileSystem fs = FileSystem.getNamed(fileSys, conf); fs.delete(outDir, true); if (!fs.mkdirs(inDir)) { throw new IOException("Mkdirs failed to create " + inDir.toString()); } { DataOutputStream file = fs.create(new Path(inDir, "part-0")); file.writeBytes(input); file.close(); } FileSystem.setDefaultUri(conf, fileSys); conf.set("mapred.job.tracker", jobTracker); conf.setJobName("wordcount"); conf.setInputFormat(TextInputFormat.class); // the keys are words (strings) conf.setOutputKeyClass(Text.class); // the values are counts (ints) conf.setOutputValueClass(IntWritable.class); conf.set("mapred.mapper.class", "testjar.ClassWordCount$MapClass"); conf.set("mapred.combine.class", "testjar.ClassWordCount$Reduce"); conf.set("mapred.reducer.class", "testjar.ClassWordCount$Reduce"); FileInputFormat.setInputPaths(conf, inDir); FileOutputFormat.setOutputPath(conf, outDir); conf.setNumMapTasks(numMaps); conf.setNumReduceTasks(numReduces); // pass a job.jar already included in the hadoop build conf.setJar("build/test/testjar/testjob.jar"); JobClient.runJob(conf); StringBuffer result = new StringBuffer(); { Path[] parents = FileUtil.stat2Paths(fs.listStatus(outDir.getParent())); Path[] fileList = FileUtil.stat2Paths(fs.listStatus(outDir, new OutputLogFilter())); for (int i = 0; i < fileList.length; ++i) { BufferedReader file = new BufferedReader(new InputStreamReader(fs.open(fileList[i]))); String line = file.readLine(); while (line != null) { result.append(line); result.append("\n"); line = file.readLine(); } file.close(); } } return result.toString(); } static String launchExternal(String fileSys, String jobTracker, JobConf conf, String input, int numMaps, int numReduces) throws IOException { final Path inDir = new Path("/testing/ext/input"); final Path outDir = new Path("/testing/ext/output"); FileSystem fs = FileSystem.getNamed(fileSys, conf); fs.delete(outDir, true); if (!fs.mkdirs(inDir)) { throw new IOException("Mkdirs failed to create " + inDir.toString()); } { DataOutputStream file = fs.create(new Path(inDir, "part-0")); file.writeBytes(input); file.close(); } FileSystem.setDefaultUri(conf, fileSys); conf.set("mapred.job.tracker", jobTracker); conf.setJobName("wordcount"); conf.setInputFormat(TextInputFormat.class); // the keys are counts conf.setOutputValueClass(IntWritable.class); // the values are the messages conf.set("mapred.output.key.class", "testjar.ExternalWritable"); FileInputFormat.setInputPaths(conf, inDir); FileOutputFormat.setOutputPath(conf, outDir); conf.setNumMapTasks(numMaps); conf.setNumReduceTasks(numReduces); conf.set("mapred.mapper.class", "testjar.ExternalMapperReducer"); conf.set("mapred.reducer.class", "testjar.ExternalMapperReducer"); // pass a job.jar already included in the hadoop build conf.setJar("build/test/testjar/testjob.jar"); JobClient.runJob(conf); StringBuffer result = new StringBuffer(); Path[] fileList = FileUtil.stat2Paths(fs.listStatus(outDir, new OutputLogFilter())); for (int i = 0; i < fileList.length; ++i) { BufferedReader file = new BufferedReader(new InputStreamReader(fs.open(fileList[i]))); String line = file.readLine(); while (line != null) { result.append(line); line = file.readLine(); result.append("\n"); } file.close(); } return result.toString(); } public void testClassPath() throws IOException { String namenode = null; MiniDFSCluster dfs = null; MiniMRCluster mr = null; FileSystem fileSys = null; try { final int taskTrackers = 4; final int jobTrackerPort = 60050; Configuration conf = new Configuration(); dfs = new MiniDFSCluster(conf, 1, true, null); fileSys = dfs.getFileSystem(); namenode = fileSys.getName(); mr = new MiniMRCluster(taskTrackers, namenode, 3); JobConf jobConf = new JobConf(); String result; final String jobTrackerName = "localhost:" + mr.getJobTrackerPort(); result = launchWordCount(namenode, jobTrackerName, jobConf, "The quick brown fox\nhas many silly\n" + "red fox sox\n", 3, 1); assertEquals("The\t1\nbrown\t1\nfox\t2\nhas\t1\nmany\t1\n" + "quick\t1\nred\t1\nsilly\t1\nsox\t1\n", result); } finally { if (dfs != null) { dfs.shutdown(); } if (mr != null) { mr.shutdown(); } } } public void testExternalWritable() throws IOException { String namenode = null; MiniDFSCluster dfs = null; MiniMRCluster mr = null; FileSystem fileSys = null; try { final int taskTrackers = 4; Configuration conf = new Configuration(); dfs = new MiniDFSCluster(conf, 1, true, null); fileSys = dfs.getFileSystem(); namenode = fileSys.getName(); mr = new MiniMRCluster(taskTrackers, namenode, 3); JobConf jobConf = new JobConf(); String result; final String jobTrackerName = "localhost:" + mr.getJobTrackerPort(); result = launchExternal(namenode, jobTrackerName, jobConf, "Dennis was here!\nDennis again!", 3, 1); assertEquals("Dennis again!\t1\nDennis was here!\t1\n", result); } finally { if (dfs != null) { dfs.shutdown(); } if (mr != null) { mr.shutdown(); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.support.builder; import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.util.Iterator; import org.apache.camel.Exchange; import org.apache.camel.InvalidPayloadException; import org.apache.camel.support.ExchangeHelper; import org.apache.camel.support.ExpressionAdapter; import org.apache.camel.support.LanguageSupport; import org.apache.camel.util.IOHelper; import org.apache.camel.util.ObjectHelper; import org.apache.camel.util.Scanner; import org.apache.camel.util.StringHelper; /** * {@link org.apache.camel.Expression} to walk a {@link org.apache.camel.Message} body * using an {@link Iterator}, which grabs the content between a start and end token. * <p/> * The message body must be able to convert to {@link InputStream} type which is used as stream * to access the message body. * <p/> * For splitting XML files use {@link TokenXMLExpressionIterator} instead. */ public class TokenPairExpressionIterator extends ExpressionAdapter { protected final String startToken; protected final String endToken; protected final boolean includeTokens; public TokenPairExpressionIterator(String startToken, String endToken, boolean includeTokens) { StringHelper.notEmpty(startToken, "startToken"); StringHelper.notEmpty(endToken, "endToken"); this.startToken = startToken; this.endToken = endToken; this.includeTokens = includeTokens; } @Override public boolean matches(Exchange exchange) { // as a predicate we must close the stream, as we do not return an iterator that can be used // afterwards to iterate the input stream Object value = doEvaluate(exchange, true); return ObjectHelper.evaluateValuePredicate(value); } @Override public Object evaluate(Exchange exchange) { // as we return an iterator to access the input stream, we should not close it return doEvaluate(exchange, false); } /** * Strategy to evaluate the exchange * * @param exchange the exchange * @param closeStream whether to close the stream before returning from this method. * @return the evaluated value */ protected Object doEvaluate(Exchange exchange, boolean closeStream) { InputStream in = null; try { in = exchange.getIn().getMandatoryBody(InputStream.class); // we may read from a file, and want to support custom charset defined on the exchange String charset = ExchangeHelper.getCharsetName(exchange); return createIterator(exchange, in, charset); } catch (InvalidPayloadException e) { exchange.setException(e); // must close input stream IOHelper.close(in); return null; } finally { if (closeStream) { IOHelper.close(in); } } } /** * Strategy to create the iterator * * @param exchange the exchange * @param in input stream to iterate * @param charset charset * @return the iterator */ protected Iterator<?> createIterator(Exchange exchange, InputStream in, String charset) { String start = startToken; if (LanguageSupport.hasSimpleFunction(start)) { start = exchange.getContext().resolveLanguage("simple").createExpression(start).evaluate(exchange, String.class); } String end = endToken; if (LanguageSupport.hasSimpleFunction(end)) { end = exchange.getContext().resolveLanguage("simple").createExpression(end).evaluate(exchange, String.class); } TokenPairIterator iterator = new TokenPairIterator(start, end, includeTokens, in, charset); iterator.init(); return iterator; } @Override public String toString() { return "tokenize[body() using tokens: " + startToken + "..." + endToken + "]"; } /** * Iterator to walk the input stream */ static class TokenPairIterator implements Iterator<Object>, Closeable { final String startToken; String scanStartToken; final String endToken; String scanEndToken; final boolean includeTokens; final InputStream in; final String charset; Scanner scanner; Object image; TokenPairIterator(String startToken, String endToken, boolean includeTokens, InputStream in, String charset) { this.startToken = startToken; this.endToken = endToken; this.includeTokens = includeTokens; this.in = in; this.charset = charset; // make sure [ and ] is escaped as we use scanner which is reg exp based // where [ and ] have special meaning scanStartToken = startToken; if (scanStartToken.startsWith("[")) { scanStartToken = "\\" + scanStartToken; } if (scanStartToken.endsWith("]")) { scanStartToken = scanStartToken.substring(0, startToken.length() - 1) + "\\]"; } scanEndToken = endToken; if (scanEndToken.startsWith("[")) { scanEndToken = "\\" + scanEndToken; } if (scanEndToken.endsWith("]")) { scanEndToken = scanEndToken.substring(0, scanEndToken.length() - 1) + "\\]"; } } void init() { // use end token as delimiter this.scanner = new Scanner(in, charset, scanEndToken); // this iterator will do look ahead as we may have data // after the last end token, which the scanner would find // so we need to be one step ahead of the scanner this.image = scanner.hasNext() ? next(true) : null; } @Override public boolean hasNext() { return image != null; } @Override public Object next() { return next(false); } Object next(boolean first) { Object answer = image; // calculate next if (scanner.hasNext()) { image = getNext(first); } else { image = null; } if (answer == null) { // first time the image may be null answer = image; } return answer; } Object getNext(boolean first) { String next = scanner.next(); // only grab text after the start token if (next != null && next.contains(startToken)) { next = StringHelper.after(next, startToken); // include tokens in answer if (next != null && includeTokens) { StringBuilder sb = new StringBuilder(); next = sb.append(startToken).append(next).append(endToken).toString(); } } else { // must have start token, otherwise we have reached beyond last tokens // and should not return more data return null; } return next; } @Override public void remove() { // noop } @Override public void close() throws IOException { scanner.close(); } } }
/* * Copyright (c) 2010 Matthew J. Francis and Contributors of the Bobbin Project * This file is distributed under the MIT licence. See the LICENCE file for further information. */ package test.peer; import static org.junit.Assert.*; import java.io.IOException; import java.nio.ByteBuffer; import java.util.HashMap; import java.util.Map; import java.util.TreeMap; import org.itadaki.bobbin.peer.PeerOutboundQueue; import org.itadaki.bobbin.peer.protocol.PeerProtocolBuilder; import org.itadaki.bobbin.peer.protocol.PeerProtocolConstants; import org.itadaki.bobbin.torrentdb.BlockDescriptor; import org.itadaki.bobbin.torrentdb.Piece; import org.itadaki.bobbin.torrentdb.PieceDatabase; import org.itadaki.bobbin.torrentdb.ViewSignature; import org.itadaki.bobbin.util.BitField; import org.itadaki.bobbin.util.counter.StatisticCounter; import org.itadaki.bobbin.util.elastictree.ElasticTree; import org.junit.Test; import test.Util; import test.torrentdb.MockPieceDatabase; /** * Tests PeerOutboundQueue */ public class TestPeerOutboundQueue { /** * Tests that a bitfield message is written through the PeerOutboundQueue's Connection * @throws IOException */ @Test public void testBitfield() throws IOException { BitField bitField = new BitField (10); bitField.set (9); MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.sendBitfieldMessage (bitField); connection.mockExpectNoMoreOutput(); assertTrue (connection.mockIsWriteEnabled()); peerOutboundQueue.sendData(); connection.mockExpectOutput (PeerProtocolBuilder.bitfieldMessage (bitField)); connection.mockExpectNoMoreOutput(); } /** * Tests that a bitfield message is written through the PeerOutboundQueue's Connection in 1 byte * chunks * @throws IOException */ @Test public void testBitfieldSplit() throws IOException { BitField bitField = new BitField (10); bitField.set (9); MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.sendBitfieldMessage (bitField); for (int i = 0; i < 7; i++) { connection.mockSetPermittedWriteBytes (1); assertEquals (1, peerOutboundQueue.sendData()); } connection.mockExpectOutput (PeerProtocolBuilder.bitfieldMessage (bitField)); connection.mockExpectNoMoreOutput(); } /** * Tests that a choke message is written through the PeerOutboundQueue's Connection * @throws IOException */ @Test public void testChoke() throws IOException { MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.sendChokeMessage (true); connection.mockExpectNoMoreOutput(); assertTrue (connection.mockIsWriteEnabled()); peerOutboundQueue.sendData(); connection.mockExpectOutput (PeerProtocolBuilder.chokeMessage()); connection.mockExpectNoMoreOutput(); } /** * Tests that a choke message is written through the PeerOutboundQueue's Connection in 1 byte * chunks * @throws IOException */ @Test public void testChokeSplit() throws IOException { MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.sendChokeMessage (true); for (int i = 0; i < 5; i++) { connection.mockSetPermittedWriteBytes (1); assertEquals (1, peerOutboundQueue.sendData()); } connection.mockExpectOutput (PeerProtocolBuilder.chokeMessage()); connection.mockExpectNoMoreOutput(); } /** * Tests that an interested message is written through the PeerOutboundQueue's Connection * @throws IOException */ @Test public void testInterested() throws IOException { MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.sendInterestedMessage (true); connection.mockExpectNoMoreOutput(); assertTrue (connection.mockIsWriteEnabled()); peerOutboundQueue.sendData(); connection.mockExpectOutput (PeerProtocolBuilder.interestedMessage()); connection.mockExpectNoMoreOutput(); } /** * Tests that an interested message is written through the PeerOutboundQueue's Connection in 1 * byte chunks * @throws IOException */ @Test public void testInterestedSplit() throws IOException { MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.sendInterestedMessage (true); connection.mockExpectNoMoreOutput(); assertTrue (connection.mockIsWriteEnabled()); for (int i = 0; i < 5; i++) { connection.mockSetPermittedWriteBytes (1); assertEquals (1, peerOutboundQueue.sendData()); } connection.mockExpectOutput (PeerProtocolBuilder.interestedMessage()); connection.mockExpectNoMoreOutput(); } /** * Tests that a piece message is written through the PeerOutboundQueue's Connection * @throws Exception */ @Test public void testPiece() throws Exception { PieceDatabase pieceDatabase = MockPieceDatabase.create ("11", 65536); pieceDatabase.start (true); BlockDescriptor descriptor = new BlockDescriptor (1, 32768, 16384); byte[] expectedBlockData = new byte[16384]; System.arraycopy (Util.pseudoRandomBlock (1, 65536, 65536), 32768, expectedBlockData, 0, 16384); MockConnection connection = new MockConnection(); StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.sendPieceMessage (descriptor); connection.mockExpectNoMoreOutput(); assertTrue (connection.mockIsWriteEnabled()); peerOutboundQueue.sendData(); connection.mockExpectOutput (PeerProtocolBuilder.pieceMessage (descriptor, ByteBuffer.wrap (expectedBlockData))); connection.mockExpectNoMoreOutput(); } /** * Tests that a piece message is written through the PeerOutboundQueue's Connection in 1 byte * chunks * @throws Exception */ @Test public void testPieceSplit() throws Exception { PieceDatabase pieceDatabase = MockPieceDatabase.create ("11", 65536); pieceDatabase.start (true); BlockDescriptor descriptor = new BlockDescriptor (1, 32768, 16384); byte[] expectedBlockData = new byte[16384]; System.arraycopy (Util.pseudoRandomBlock (1, 65536, 65536), 32768, expectedBlockData, 0, 16384); MockConnection connection = new MockConnection(); StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); peerOutboundQueue.sendPieceMessage (descriptor); for (int i = 0; i < 16397; i++) { connection.mockSetPermittedWriteBytes (1); assertEquals (1, peerOutboundQueue.sendData()); } connection.mockExpectOutput (PeerProtocolBuilder.pieceMessage (descriptor, ByteBuffer.wrap (expectedBlockData))); connection.mockExpectNoMoreOutput(); } /** * Tests that a request message is written through the PeerOutboundQueue's Connection * @throws IOException */ @Test public void testRequest() throws IOException { BlockDescriptor descriptor = new BlockDescriptor (1, 32768, 16384); MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); peerOutboundQueue.setRequestsPlugged (false); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.sendRequestMessage (descriptor); connection.mockExpectNoMoreOutput(); assertTrue (connection.mockIsWriteEnabled()); peerOutboundQueue.sendData(); connection.mockExpectOutput (PeerProtocolBuilder.requestMessage (descriptor)); connection.mockExpectNoMoreOutput(); } /** * Tests that a request message is written through the PeerOutboundQueue's Connection in 1 byte * chunks * @throws IOException */ @Test public void testRequestSplit() throws IOException { BlockDescriptor descriptor = new BlockDescriptor (1, 32768, 16384); MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); peerOutboundQueue.setRequestsPlugged (false); peerOutboundQueue.sendRequestMessage (descriptor); for (int i = 0; i < 17; i++) { connection.mockSetPermittedWriteBytes (1); assertEquals (1, peerOutboundQueue.sendData()); } connection.mockExpectOutput (PeerProtocolBuilder.requestMessage (descriptor)); connection.mockExpectNoMoreOutput(); } /** * Tests that a cancel message is written through the PeerOutboundQueue's Connection * @throws IOException */ @Test public void testCancel() throws IOException { BlockDescriptor descriptor = new BlockDescriptor (1, 32768, 16384); MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); peerOutboundQueue.setRequestsPlugged (false); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.sendRequestMessage (descriptor); connection.mockExpectNoMoreOutput(); assertTrue (connection.mockIsWriteEnabled()); peerOutboundQueue.sendData(); peerOutboundQueue.sendCancelMessage (descriptor, false); peerOutboundQueue.sendData(); connection.mockExpectOutput (PeerProtocolBuilder.requestMessage (descriptor)); connection.mockExpectOutput (PeerProtocolBuilder.cancelMessage (descriptor)); connection.mockExpectNoMoreOutput(); } /** * Tests that a cancel message is written through the PeerOutboundQueue's Connection in 1 byte * chunks * @throws IOException */ @Test public void testCancelSplit() throws IOException { BlockDescriptor descriptor = new BlockDescriptor (1, 32768, 16384); MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); peerOutboundQueue.setRequestsPlugged (false); peerOutboundQueue.sendRequestMessage (descriptor); connection.mockExpectNoMoreOutput(); assertTrue (connection.mockIsWriteEnabled()); peerOutboundQueue.sendData(); peerOutboundQueue.sendCancelMessage (descriptor, false); for (int i = 0; i < 17; i++) { connection.mockSetPermittedWriteBytes (1); assertEquals (1, peerOutboundQueue.sendData()); } connection.mockExpectOutput (PeerProtocolBuilder.requestMessage (descriptor)); connection.mockExpectOutput (PeerProtocolBuilder.cancelMessage (descriptor)); connection.mockExpectNoMoreOutput(); } /** * Tests that a have message is written through the PeerOutboundQueue's Connection * @throws IOException */ @Test public void testHave() throws IOException { MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.sendHaveMessage (1234); connection.mockExpectNoMoreOutput(); assertTrue (connection.mockIsWriteEnabled()); peerOutboundQueue.sendData(); connection.mockExpectOutput (PeerProtocolBuilder.haveMessage (1234)); connection.mockExpectNoMoreOutput(); } /** * Tests that a have message is written through the PeerOutboundQueue's Connection in 1 byte chunks * @throws IOException */ @Test public void testHaveSplit() throws IOException { MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); peerOutboundQueue.sendHaveMessage (1234); for (int i = 0; i < 9; i++) { connection.mockSetPermittedWriteBytes (1); assertEquals (1, peerOutboundQueue.sendData()); } connection.mockExpectOutput (PeerProtocolBuilder.haveMessage (1234)); connection.mockExpectNoMoreOutput(); } /** * Tests that a Have All message is written through the PeerOutboundQueue's Connection * @throws IOException */ @Test public void testHaveAll() throws IOException { MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.sendHaveAllMessage(); connection.mockExpectNoMoreOutput(); assertTrue (connection.mockIsWriteEnabled()); peerOutboundQueue.sendData(); connection.mockExpectOutput (PeerProtocolBuilder.haveAllMessage()); connection.mockExpectNoMoreOutput(); } /** * Tests that a Have All message is written through the PeerOutboundQueue's Connection * @throws IOException */ @Test public void testHaveNone() throws IOException { MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.sendHaveNoneMessage(); connection.mockExpectNoMoreOutput(); assertTrue (connection.mockIsWriteEnabled()); peerOutboundQueue.sendData(); connection.mockExpectOutput (PeerProtocolBuilder.haveNoneMessage()); connection.mockExpectNoMoreOutput(); } /** * Tests that a Reject Request message is written through the PeerOutboundQueue's Connection * @throws IOException */ @Test public void testRejectRequest() throws IOException { MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); BlockDescriptor descriptor = new BlockDescriptor (1, 32768, 16384); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.sendHaveNoneMessage(); peerOutboundQueue.sendRejectRequestMessage (descriptor); connection.mockExpectNoMoreOutput(); assertTrue (connection.mockIsWriteEnabled()); peerOutboundQueue.sendData(); connection.mockExpectOutput (PeerProtocolBuilder.haveNoneMessage()); connection.mockExpectOutput (PeerProtocolBuilder.rejectRequestMessage (descriptor)); connection.mockExpectNoMoreOutput(); } /** * Tests that a piece message is cancelled unsent by a choke message * @throws Exception */ @Test public void testChokeDiscardsPiece() throws Exception { PieceDatabase pieceDatabase = MockPieceDatabase.create ("11", 65536); BlockDescriptor descriptor = new BlockDescriptor (1, 32768, 16384); byte[] expectedBlockData = new byte[16384]; System.arraycopy (Util.pseudoRandomBlock (1, 65536, 65536), 32768, expectedBlockData, 0, 16384); MockConnection connection = new MockConnection(); StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.sendPieceMessage (descriptor); peerOutboundQueue.sendChokeMessage (true); assertTrue (connection.mockIsWriteEnabled()); // Will explode if pieceMessage() is called peerOutboundQueue.sendData(); connection.mockExpectOutput (PeerProtocolBuilder.chokeMessage()); connection.mockExpectNoMoreOutput(); } /** * Tests that a not interested message is cancelled unsent by an interested message * @throws IOException */ @Test public void testNotInterestedCancelsInterested() throws IOException { MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.sendInterestedMessage (true); peerOutboundQueue.sendInterestedMessage (false); connection.mockExpectNoMoreOutput(); assertTrue (connection.mockIsWriteEnabled()); peerOutboundQueue.sendData(); connection.mockExpectNoMoreOutput(); } /** * Tests that a not interested message is cancelled unsent by an interested message * @throws IOException */ @Test public void testInterestedCancelsNotInterested() throws IOException { MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.sendInterestedMessage (false); peerOutboundQueue.sendInterestedMessage (true); connection.mockExpectNoMoreOutput(); assertTrue (connection.mockIsWriteEnabled()); peerOutboundQueue.sendData(); connection.mockExpectNoMoreOutput(); } /** * Tests that a queue limit is applied to inbound requests * @throws Exception */ @Test public void testPieceQueueLimit() throws Exception { PieceDatabase pieceDatabase = MockPieceDatabase.create ("11", 65536); pieceDatabase.start (true); BlockDescriptor descriptor = new BlockDescriptor (1, 32768, 16384); MockConnection connection = new MockConnection(); StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); assertFalse (connection.mockIsWriteEnabled()); for (int i = 0; i < PeerProtocolConstants.MAXIMUM_INBOUND_REQUESTS + 1; i++) { peerOutboundQueue.sendPieceMessage (descriptor); } connection.mockExpectNoMoreOutput(); assertTrue (connection.mockIsWriteEnabled()); peerOutboundQueue.sendData(); for (int i = 0; i < PeerProtocolConstants.MAXIMUM_INBOUND_REQUESTS; i++) { connection.mockExpectOutput (PeerProtocolBuilder.pieceMessage ( descriptor, pieceDatabase.readPiece (descriptor.getPieceNumber()).getBlock (descriptor) )); } connection.mockExpectNoMoreOutput(); } /** * Tests that a request is tracked once only * @throws IOException */ @Test public void testRequestTracked() throws IOException { BlockDescriptor descriptor = new BlockDescriptor (1, 32768, 16384); MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); peerOutboundQueue.setRequestsPlugged (false); connection.mockExpectNoMoreOutput(); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.sendHaveNoneMessage(); peerOutboundQueue.sendRequestMessage (descriptor); peerOutboundQueue.sendData(); connection.mockExpectOutput (PeerProtocolBuilder.haveNoneMessage()); connection.mockExpectOutput (PeerProtocolBuilder.requestMessage (descriptor)); connection.mockExpectNoMoreOutput(); assertTrue (peerOutboundQueue.rejectReceived (descriptor)); assertFalse (peerOutboundQueue.rejectReceived (descriptor)); } /** * Tests that a request message is cancelled unsent by a cancel message * @throws IOException */ @Test public void testCancelCancelsRequest() throws IOException { BlockDescriptor descriptor = new BlockDescriptor (1, 32768, 16384); MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); peerOutboundQueue.setRequestsPlugged (false); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.sendRequestMessage (descriptor); peerOutboundQueue.sendCancelMessage (descriptor, false); connection.mockExpectNoMoreOutput(); assertTrue (connection.mockIsWriteEnabled()); peerOutboundQueue.sendData(); connection.mockExpectNoMoreOutput(); } /** * Tests that a cancelled request is tracked once only * @throws IOException */ @Test public void testCancelTracked() throws IOException { BlockDescriptor descriptor = new BlockDescriptor (1, 32768, 16384); MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); peerOutboundQueue.setRequestsPlugged (false); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.sendHaveNoneMessage(); peerOutboundQueue.sendRequestMessage (descriptor); peerOutboundQueue.sendData(); peerOutboundQueue.sendCancelMessage (descriptor, true); peerOutboundQueue.sendData(); connection.mockExpectOutput (PeerProtocolBuilder.haveNoneMessage()); connection.mockExpectOutput (PeerProtocolBuilder.requestMessage (descriptor)); connection.mockExpectOutput (PeerProtocolBuilder.cancelMessage (descriptor)); connection.mockExpectNoMoreOutput(); assertTrue (peerOutboundQueue.rejectReceived (descriptor)); assertFalse (peerOutboundQueue.rejectReceived (descriptor)); } /** * Tests that a cancelled request completed by the receipt of a block is tracked once only * @throws IOException */ @Test public void testCancelBlockReceived() throws IOException { BlockDescriptor descriptor = new BlockDescriptor (1, 32768, 16384); MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); peerOutboundQueue.setRequestsPlugged (false); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.sendHaveNoneMessage(); peerOutboundQueue.sendRequestMessage (descriptor); peerOutboundQueue.sendData(); peerOutboundQueue.sendCancelMessage (descriptor, true); peerOutboundQueue.sendData(); connection.mockExpectOutput (PeerProtocolBuilder.haveNoneMessage()); connection.mockExpectOutput (PeerProtocolBuilder.requestMessage (descriptor)); connection.mockExpectOutput (PeerProtocolBuilder.cancelMessage (descriptor)); connection.mockExpectNoMoreOutput(); assertTrue (peerOutboundQueue.requestReceived (descriptor)); assertFalse (peerOutboundQueue.requestReceived (descriptor)); } /** * Tests that a request is requeued by requeueAllRequestMessages() * * @throws IOException */ @Test public void testRequeueAllRequestMessages() throws IOException { BlockDescriptor descriptor = new BlockDescriptor (1, 32768, 16384); MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); peerOutboundQueue.setRequestsPlugged (false); peerOutboundQueue.sendRequestMessage (descriptor); peerOutboundQueue.sendData(); connection.mockExpectOutput (PeerProtocolBuilder.requestMessage (descriptor)); connection.mockExpectNoMoreOutput(); peerOutboundQueue.requeueAllRequestMessages(); peerOutboundQueue.sendData(); connection.mockExpectOutput (PeerProtocolBuilder.requestMessage (descriptor)); connection.mockExpectNoMoreOutput(); } /** * Tests that a piece message is cancelled by a call to discardPieceMessage() * @throws Exception */ @Test public void testDiscardPieceMessage() throws Exception { PieceDatabase pieceDatabase = MockPieceDatabase.create ("11", 65536); BlockDescriptor descriptor = new BlockDescriptor (1, 32768, 16384); MockConnection connection = new MockConnection(); StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.sendPieceMessage (descriptor); peerOutboundQueue.discardPieceMessage (descriptor); connection.mockExpectNoMoreOutput(); assertTrue (connection.mockIsWriteEnabled()); peerOutboundQueue.sendData(); connection.mockExpectNoMoreOutput(); } /** * Tests hasOutstandingRequests() with no queued or sent requests * * @throws IOException */ @Test public void testhasOutstandingRequestsNone() throws IOException { MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); peerOutboundQueue.setRequestsPlugged (false); peerOutboundQueue.sendData(); connection.mockExpectNoMoreOutput(); assertFalse (peerOutboundQueue.hasOutstandingRequests()); } /** * Tests hasOutstandingRequests() with a queued request * * @throws IOException */ @Test public void testhasOutstandingRequestsQueued() throws IOException { BlockDescriptor descriptor = new BlockDescriptor (1, 32768, 16384); MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); peerOutboundQueue.setRequestsPlugged (false); peerOutboundQueue.sendData(); peerOutboundQueue.sendRequestMessage (descriptor); connection.mockExpectNoMoreOutput(); assertTrue (peerOutboundQueue.hasOutstandingRequests()); } /** * Tests hasOutstandingRequests() with a sent request * * @throws IOException */ @Test public void testhasOutstandingRequestsSent() throws IOException { BlockDescriptor descriptor = new BlockDescriptor (1, 32768, 16384); MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); peerOutboundQueue.setRequestsPlugged (false); peerOutboundQueue.sendRequestMessage (descriptor); peerOutboundQueue.sendData(); connection.mockExpectOutput (PeerProtocolBuilder.requestMessage (descriptor)); connection.mockExpectNoMoreOutput(); assertTrue (peerOutboundQueue.hasOutstandingRequests()); } /** * Tests getUnsentPieceCount() * @throws Exception */ @Test public void testUnsentPieceCount() throws Exception { PieceDatabase pieceDatabase = MockPieceDatabase.create ("11", 65536); pieceDatabase.start (true); BlockDescriptor descriptor = new BlockDescriptor (1, 32768, 16384); MockConnection connection = new MockConnection(); StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); assertEquals (0, peerOutboundQueue.getUnsentPieceCount()); peerOutboundQueue.sendPieceMessage (descriptor); connection.mockExpectNoMoreOutput(); assertEquals (1, peerOutboundQueue.getUnsentPieceCount()); peerOutboundQueue.sendData(); connection.mockExpectOutput (PeerProtocolBuilder.pieceMessage ( descriptor, pieceDatabase.readPiece (descriptor.getPieceNumber()).getBlock (descriptor) )); connection.mockExpectNoMoreOutput(); assertEquals (0, peerOutboundQueue.getUnsentPieceCount()); } /** * Tests getRequestsNeeded() * @throws IOException */ @Test public void testRequestsNeeded() throws IOException { BlockDescriptor descriptor = new BlockDescriptor (1, 32768, 16384); MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); peerOutboundQueue.setRequestsPlugged (false); assertFalse (connection.mockIsWriteEnabled()); assertEquals (PeerProtocolConstants.MAXIMUM_OUTBOUND_REQUESTS, peerOutboundQueue.getRequestsNeeded()); peerOutboundQueue.sendRequestMessage (descriptor); connection.mockExpectNoMoreOutput(); assertEquals (PeerProtocolConstants.MAXIMUM_OUTBOUND_REQUESTS - 1, peerOutboundQueue.getRequestsNeeded()); peerOutboundQueue.sendData(); connection.mockExpectOutput (PeerProtocolBuilder.requestMessage (descriptor)); connection.mockExpectNoMoreOutput(); assertEquals (PeerProtocolConstants.MAXIMUM_OUTBOUND_REQUESTS - 1, peerOutboundQueue.getRequestsNeeded()); } /** * Tests requestReceived() * @throws IOException */ @Test public void testRequestReceived() throws IOException { BlockDescriptor descriptor = new BlockDescriptor (1, 32768, 16384); MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); peerOutboundQueue.setRequestsPlugged (false); assertFalse (connection.mockIsWriteEnabled()); assertEquals (PeerProtocolConstants.MAXIMUM_OUTBOUND_REQUESTS, peerOutboundQueue.getRequestsNeeded()); peerOutboundQueue.sendRequestMessage (descriptor); connection.mockExpectNoMoreOutput(); assertEquals (PeerProtocolConstants.MAXIMUM_OUTBOUND_REQUESTS - 1, peerOutboundQueue.getRequestsNeeded()); peerOutboundQueue.sendData(); connection.mockExpectOutput (PeerProtocolBuilder.requestMessage (descriptor)); connection.mockExpectNoMoreOutput(); assertEquals (PeerProtocolConstants.MAXIMUM_OUTBOUND_REQUESTS - 1, peerOutboundQueue.getRequestsNeeded()); peerOutboundQueue.requestReceived (descriptor); assertEquals (PeerProtocolConstants.MAXIMUM_OUTBOUND_REQUESTS, peerOutboundQueue.getRequestsNeeded()); } /** * Tests sendExtensionHandshake() adding an extension * @throws IOException */ @Test public void testExtensionHandshakeAdd() throws IOException { Map<String,Integer> extensionsEnabled = new HashMap<String,Integer>(); extensionsEnabled.put ("bl_ah", 42); MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.sendExtensionHandshake (extensionsEnabled, null, null); connection.mockExpectNoMoreOutput(); assertTrue (connection.mockIsWriteEnabled()); peerOutboundQueue.sendData(); Map<String,Integer> expectedExtensions = new TreeMap<String,Integer>(); expectedExtensions.put ("bl_ah", 42); connection.mockExpectOutput (PeerProtocolBuilder.extensionHandshakeMessage (expectedExtensions, null)); connection.mockExpectNoMoreOutput(); } /** * Tests sendExtensionHandshake() removing an extension * @throws IOException */ @Test public void testExtensionHandshakeRemove() throws IOException { Map<String,Integer> extensionsEnabled = new HashMap<String,Integer>(); extensionsEnabled.put ("bl_ah", 42); MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.updateExtensionMapping (extensionsEnabled, null, null); peerOutboundQueue.sendExtensionHandshake (extensionsEnabled, null, null); peerOutboundQueue.sendExtensionHandshake (null, extensionsEnabled.keySet(), null); connection.mockExpectNoMoreOutput(); assertTrue (connection.mockIsWriteEnabled()); peerOutboundQueue.sendData(); Map<String,Integer> expectedExtensions = new TreeMap<String,Integer>(); expectedExtensions.put ("bl_ah", 42); connection.mockExpectOutput (PeerProtocolBuilder.extensionHandshakeMessage (expectedExtensions, null)); Map<String,Integer> expectedExtensions2 = new TreeMap<String,Integer>(); expectedExtensions2.put ("bl_ah", 0); connection.mockExpectOutput (PeerProtocolBuilder.extensionHandshakeMessage (expectedExtensions2, null)); connection.mockExpectNoMoreOutput(); } /** * Tests sendExtensionMessage() * @throws IOException */ @Test public void testExtensionMessage() throws IOException { Map<String,Integer> extensionsEnabled = new HashMap<String,Integer>(); extensionsEnabled.put ("bl_ah", 42); MockConnection connection = new MockConnection(); PieceDatabase pieceDatabase = null; StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.updateExtensionMapping (extensionsEnabled, null, null); peerOutboundQueue.sendExtensionMessage ("bl_ah", ByteBuffer.wrap (new byte[] { 1, 2, 3, 4 })); connection.mockExpectNoMoreOutput(); assertTrue (connection.mockIsWriteEnabled()); peerOutboundQueue.sendData(); Map<String,Integer> expectedExtensions = new TreeMap<String,Integer>(); expectedExtensions.put ("bl_ah", 42); connection.mockExpectOutput (PeerProtocolBuilder.extensionMessage (42, ByteBuffer.wrap (new byte[] { 1, 2, 3, 4}))); connection.mockExpectNoMoreOutput(); } /** * Tests sendPieceMessage() with the Merkle extension enabled * @throws Exception */ @Test public void testMerklePieceMessage() throws Exception { int pieceSize = 1024; int totalLength = 1024; BlockDescriptor blockDescriptor = new BlockDescriptor (0, 0, 1024); Map<String,Integer> extensionsEnabled = new HashMap<String,Integer>(); extensionsEnabled.put (PeerProtocolConstants.EXTENSION_MERKLE, (int)PeerProtocolConstants.EXTENDED_MESSAGE_TYPE_MERKLE); MockConnection connection = new MockConnection(); ElasticTree tree = ElasticTree.buildFromLeaves (pieceSize, totalLength, Util.pseudoRandomBlockHashes (pieceSize, totalLength)); PieceDatabase pieceDatabase = MockPieceDatabase.createEmptyMerkle (pieceSize, totalLength, tree.getView(totalLength).getRootHash()); pieceDatabase.start (true); pieceDatabase.writePiece (new Piece (0, ByteBuffer.wrap (Util.pseudoRandomBlock (0, pieceSize, pieceSize)), tree.getHashChain (0, pieceSize))); StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.updateExtensionMapping (extensionsEnabled, null, null); peerOutboundQueue.sendHaveNoneMessage(); peerOutboundQueue.sendExtensionHandshake (extensionsEnabled, null, null); peerOutboundQueue.sendPieceMessage (blockDescriptor); connection.mockExpectNoMoreOutput(); assertTrue (connection.mockIsWriteEnabled()); peerOutboundQueue.sendData(); connection.mockExpectOutput (PeerProtocolBuilder.haveNoneMessage()); Map<String,Integer> expectedExtensions = new TreeMap<String,Integer>(); expectedExtensions.put (PeerProtocolConstants.EXTENSION_MERKLE, (int)PeerProtocolConstants.EXTENDED_MESSAGE_TYPE_MERKLE); connection.mockExpectOutput (PeerProtocolBuilder.extensionHandshakeMessage (expectedExtensions, null)); connection.mockExpectOutput (PeerProtocolBuilder.merklePieceMessage (PeerProtocolConstants.EXTENDED_MESSAGE_TYPE_MERKLE, blockDescriptor, tree.getHashChain(0, pieceSize).getHashes(), ByteBuffer.wrap (Util.pseudoRandomBlock (0, pieceSize, pieceSize)))); connection.mockExpectNoMoreOutput(); pieceDatabase.terminate (true); } /** * Tests sendELasticSignatureMessage() with the Elastic extension enabled * @throws Exception */ @Test public void testElasticSignatureMessage() throws Exception { int pieceSize = 1024; int totalLength = 1024; long viewLength = 1024; Map<String,Integer> extensionsEnabled = new HashMap<String,Integer>(); extensionsEnabled.put (PeerProtocolConstants.EXTENSION_ELASTIC, (int)PeerProtocolConstants.EXTENDED_MESSAGE_TYPE_ELASTIC); MockConnection connection = new MockConnection(); ElasticTree tree = ElasticTree.buildFromLeaves (pieceSize, totalLength, Util.pseudoRandomBlockHashes (pieceSize, totalLength)); PieceDatabase pieceDatabase = MockPieceDatabase.createEmptyMerkle (pieceSize, totalLength, tree.getView(totalLength).getRootHash()); pieceDatabase.start (true); pieceDatabase.writePiece (new Piece (0, ByteBuffer.wrap (Util.pseudoRandomBlock (0, pieceSize, pieceSize)), tree.getHashChain (0, pieceSize))); StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); assertFalse (connection.mockIsWriteEnabled()); ViewSignature viewSignature = new ViewSignature (viewLength, ByteBuffer.wrap (tree.getView (viewLength).getRootHash()), ByteBuffer.allocate (40)); peerOutboundQueue.updateExtensionMapping (extensionsEnabled, null, null); peerOutboundQueue.sendHaveNoneMessage(); peerOutboundQueue.sendExtensionHandshake (extensionsEnabled, null, null); peerOutboundQueue.sendElasticSignatureMessage (viewSignature); connection.mockExpectNoMoreOutput(); assertTrue (connection.mockIsWriteEnabled()); peerOutboundQueue.sendData(); connection.mockExpectOutput (PeerProtocolBuilder.haveNoneMessage()); Map<String,Integer> expectedExtensions = new TreeMap<String,Integer>(); expectedExtensions.put (PeerProtocolConstants.EXTENSION_ELASTIC, (int)PeerProtocolConstants.EXTENDED_MESSAGE_TYPE_ELASTIC); connection.mockExpectOutput (PeerProtocolBuilder.extensionHandshakeMessage (expectedExtensions, null)); connection.mockExpectOutput (PeerProtocolBuilder.elasticSignatureMessage (PeerProtocolConstants.EXTENDED_MESSAGE_TYPE_ELASTIC, viewSignature)); connection.mockExpectNoMoreOutput(); pieceDatabase.terminate (true); } /** * Tests sendPieceMessage() with the Elastic extension enabled * @throws Exception */ @Test public void testElasticPieceMessage() throws Exception { int pieceSize = 1024; int totalLength = 1024; BlockDescriptor blockDescriptor = new BlockDescriptor (0, 0, 1024); long viewLength = 1024; Map<String,Integer> extensionsEnabled = new HashMap<String,Integer>(); extensionsEnabled.put (PeerProtocolConstants.EXTENSION_ELASTIC, (int)PeerProtocolConstants.EXTENDED_MESSAGE_TYPE_ELASTIC); MockConnection connection = new MockConnection(); ElasticTree tree = ElasticTree.buildFromLeaves (pieceSize, totalLength, Util.pseudoRandomBlockHashes (pieceSize, totalLength)); PieceDatabase pieceDatabase = MockPieceDatabase.createElastic ("0", totalLength); pieceDatabase.start (true); pieceDatabase.writePiece (new Piece (0, ByteBuffer.wrap (Util.pseudoRandomBlock (0, pieceSize, pieceSize)), tree.getHashChain (0, pieceSize))); StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.updateExtensionMapping (extensionsEnabled, null, null); peerOutboundQueue.sendHaveNoneMessage(); peerOutboundQueue.sendExtensionHandshake (extensionsEnabled, null, null); peerOutboundQueue.sendPieceMessage (blockDescriptor); connection.mockExpectNoMoreOutput(); assertTrue (connection.mockIsWriteEnabled()); peerOutboundQueue.sendData(); connection.mockExpectOutput (PeerProtocolBuilder.haveNoneMessage()); Map<String,Integer> expectedExtensions = new TreeMap<String,Integer>(); expectedExtensions.put (PeerProtocolConstants.EXTENSION_ELASTIC, (int)PeerProtocolConstants.EXTENDED_MESSAGE_TYPE_ELASTIC); connection.mockExpectOutput (PeerProtocolBuilder.extensionHandshakeMessage (expectedExtensions, null)); connection.mockExpectOutput (PeerProtocolBuilder.elasticPieceMessage (PeerProtocolConstants.EXTENDED_MESSAGE_TYPE_ELASTIC, blockDescriptor, viewLength, tree.getHashChain(0, pieceSize).getHashes(), ByteBuffer.wrap (Util.pseudoRandomBlock (0, pieceSize, pieceSize)))); connection.mockExpectNoMoreOutput(); pieceDatabase.terminate (true); } /** * Tests sendELasticBitfieldMessage() with the Elastic extension enabled * @throws Exception */ @Test public void testElasticBitfieldMessage() throws Exception { int pieceSize = 1024; int totalLength = 1024; BitField bitfield = new BitField (new byte[] { (byte)0xff, 0x00, (byte)0xee, (byte)0xf0 }, 28); Map<String,Integer> extensionsEnabled = new HashMap<String,Integer>(); extensionsEnabled.put (PeerProtocolConstants.EXTENSION_ELASTIC, (int)PeerProtocolConstants.EXTENDED_MESSAGE_TYPE_ELASTIC); MockConnection connection = new MockConnection(); ElasticTree tree = ElasticTree.buildFromLeaves (pieceSize, totalLength, Util.pseudoRandomBlockHashes (pieceSize, totalLength)); PieceDatabase pieceDatabase = MockPieceDatabase.createEmptyMerkle (pieceSize, totalLength, tree.getView(totalLength).getRootHash()); pieceDatabase.start (true); pieceDatabase.writePiece (new Piece (0, ByteBuffer.wrap (Util.pseudoRandomBlock (0, pieceSize, pieceSize)), tree.getHashChain (0, pieceSize))); StatisticCounter sentBlockCounter = new StatisticCounter(); PeerOutboundQueue peerOutboundQueue = new PeerOutboundQueue (connection, pieceDatabase, sentBlockCounter); assertFalse (connection.mockIsWriteEnabled()); peerOutboundQueue.updateExtensionMapping (extensionsEnabled, null, null); peerOutboundQueue.sendHaveNoneMessage(); peerOutboundQueue.sendExtensionHandshake (extensionsEnabled, null, null); peerOutboundQueue.sendElasticBitfieldMessage (bitfield); connection.mockExpectNoMoreOutput(); assertTrue (connection.mockIsWriteEnabled()); peerOutboundQueue.sendData(); connection.mockExpectOutput (PeerProtocolBuilder.haveNoneMessage()); Map<String,Integer> expectedExtensions = new TreeMap<String,Integer>(); expectedExtensions.put (PeerProtocolConstants.EXTENSION_ELASTIC, (int)PeerProtocolConstants.EXTENDED_MESSAGE_TYPE_ELASTIC); connection.mockExpectOutput (PeerProtocolBuilder.extensionHandshakeMessage (expectedExtensions, null)); connection.mockExpectOutput (PeerProtocolBuilder.elasticBitfieldMessage (PeerProtocolConstants.EXTENDED_MESSAGE_TYPE_ELASTIC, bitfield)); connection.mockExpectNoMoreOutput(); pieceDatabase.terminate (true); } }
/* * Copyright 2007 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.gwt.dev.jjs.impl; import com.google.gwt.dev.jjs.ast.Context; import com.google.gwt.dev.jjs.ast.JArrayLength; import com.google.gwt.dev.jjs.ast.JArrayRef; import com.google.gwt.dev.jjs.ast.JBinaryOperation; import com.google.gwt.dev.jjs.ast.JBinaryOperator; import com.google.gwt.dev.jjs.ast.JCastOperation; import com.google.gwt.dev.jjs.ast.JConditional; import com.google.gwt.dev.jjs.ast.JExpression; import com.google.gwt.dev.jjs.ast.JFieldRef; import com.google.gwt.dev.jjs.ast.JIntLiteral; import com.google.gwt.dev.jjs.ast.JLocalRef; import com.google.gwt.dev.jjs.ast.JMethodCall; import com.google.gwt.dev.jjs.ast.JNewArray; import com.google.gwt.dev.jjs.ast.JNewInstance; import com.google.gwt.dev.jjs.ast.JParameterRef; import com.google.gwt.dev.jjs.ast.JPostfixOperation; import com.google.gwt.dev.jjs.ast.JPrefixOperation; import com.google.gwt.dev.jjs.ast.JReferenceType; import com.google.gwt.dev.jjs.ast.JThisRef; import com.google.gwt.dev.jjs.ast.JVisitor; /** * Analyzes an expression and make a number of static analysis flags available * based on the information available solely through the expression. * * TODO: make this even smarter when we have real null analysis. */ public class ExpressionAnalyzer extends JVisitor { private boolean accessesField; private boolean accessesFieldNonFinal; private boolean accessesLocal; private boolean accessesParameter; private boolean assignmentToField; private boolean assignmentToLocal; private boolean assignmentToParameter; private boolean canThrowException; private boolean createsObject; private int inConditional; /** * Does this expression read or write fields within the scope of the * expression? */ public boolean accessesField() { return accessesField; } /** * Does this expression read or write non-final fields within the scope of the * expression? */ public boolean accessesFieldNonFinal() { return accessesFieldNonFinal; } /** * Does this expression read or write locals within the scope of the * expression? */ public boolean accessesLocal() { return accessesLocal; } /** * Does this expression read or write parameters within the scope of the * expression? */ public boolean accessesParameter() { return accessesParameter; } public boolean canThrowException() { return canThrowException; } public boolean createsObject() { return createsObject; } @Override public void endVisit(JArrayLength x, Context ctx) { // TODO: Is setting accessesField necessary for array.length access? accessesField = true; // Can throw an NPE when the array instance is null at runtime. JReferenceType refType = (JReferenceType) x.getInstance().getType(); canThrowException = refType.canBeNull(); } @Override public void endVisit(JArrayRef x, Context ctx) { /* * In Java, array references can throw IndexOutOfBoundsExceptions, but this * isn't the case for current GWT generated code. If we add a strict array * bounds check later, this flag would need to reflect it. */ // If JArrayRef is null, this can throw a NullPointerException. canThrowException = true; } @Override public void endVisit(JBinaryOperation x, Context ctx) { if (x.isAssignment()) { JExpression lhs = x.getLhs(); if (lhs instanceof JArrayRef) { // Array store operations can throw ArrayStoreExceptions canThrowException = true; } else { analyzeStore(lhs); } } } @Override public void endVisit(JCastOperation x, Context ctx) { // Can throw ClassCastException canThrowException = true; } @Override public void endVisit(JFieldRef x, Context ctx) { accessesField = true; if (!x.getTarget().isFinal()) { accessesFieldNonFinal = true; } if (x.hasClinit()) { recordMethodCall(); } JExpression instance = x.getInstance(); if (instance == null) { return; } // Field references using this are always safe if (instance instanceof JThisRef) { return; } if (x.getField().isStatic()) { // Can throw exceptions IFF a clinit is triggered. canThrowException = x.hasClinit(); } else { // Can throw exceptions IFF the instance is null. JReferenceType refType = (JReferenceType) instance.getType(); canThrowException = refType.canBeNull(); } } @Override public void endVisit(JLocalRef x, Context ctx) { accessesLocal = true; } @Override public void endVisit(JMethodCall x, Context ctx) { recordMethodCall(); } @Override public void endVisit(JNewArray x, Context ctx) { /* * If no array bounds, the new array is being automatically initialized. If * there are side-effects, they'll show up when we visit the initializers. */ if (x.dims == null) { return; } /* * Can throw NegativeArraySizeException if we initialize an array with * negative dimensions. */ for (JExpression expression : x.dims) { if (expression instanceof JIntLiteral) { int value = ((JIntLiteral) expression).getValue(); if (value >= 0) { continue; } } canThrowException = true; } } @Override public void endVisit(JNewInstance x, Context ctx) { createsObject = true; endVisit((JMethodCall) x, ctx); } @Override public void endVisit(JParameterRef x, Context ctx) { accessesParameter = true; } @Override public void endVisit(JPostfixOperation x, Context ctx) { // Unary operations that are modifying cause assignment side-effects. if (x.getOp().isModifying()) { analyzeStore(x.getArg()); } } @Override public void endVisit(JPrefixOperation x, Context ctx) { // Unary operations that are modifying cause assignment side-effects. if (x.getOp().isModifying()) { analyzeStore(x.getArg()); } } /** * Does this expression make assignments to variables within the scope of the * expression? */ public boolean hasAssignment() { return assignmentToField || assignmentToLocal || assignmentToParameter; } /** * Does this expression make assignments to fields within the scope of the * expression? */ public boolean hasAssignmentToField() { return assignmentToField; } /** * Does this expression make assignments to locals within the scope of the * expression? */ public boolean hasAssignmentToLocal() { return assignmentToLocal; } /** * Does this expression make assignments to parameters within the scope of the * expression? */ public boolean hasAssignmentToParameter() { return assignmentToParameter; } @Override public boolean visit(JBinaryOperation x, Context ctx) { if (x.getOp() == JBinaryOperator.AND || x.getOp() == JBinaryOperator.OR) { accept(x.getLhs()); inConditional++; accept(x.getRhs()); inConditional--; return false; } return true; } @Override public boolean visit(JConditional x, Context ctx) { accept(x.getIfTest()); inConditional++; accept(x.getThenExpr()); accept(x.getElseExpr()); inConditional--; return false; } /** * Determined if the current expression conditionally executes, based on its * parent expressions. */ protected boolean isInConditional() { return inConditional > 0; } private void analyzeStore(JExpression expr) { if (expr instanceof JFieldRef) { assignmentToField = true; } else if (expr instanceof JParameterRef) { assignmentToParameter = true; } else if (expr instanceof JLocalRef) { assignmentToLocal = true; } } /** * We can't assume anything about method calls right now, except that it can't * access any of our locals or parameters. * * TODO: what about accessing arrays? Should be treated like field refs I * guess. */ private void recordMethodCall() { assignmentToField = true; accessesField = true; accessesFieldNonFinal = true; canThrowException = true; createsObject = true; } }
/* * Copyright (c) 1998, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sbql4j8.com.sun.tools.doclets.formats.html; import java.io.*; import sbql4j8.com.sun.javadoc.*; import sbql4j8.com.sun.tools.doclets.formats.html.markup.*; import sbql4j8.com.sun.tools.doclets.internal.toolkit.*; import sbql4j8.com.sun.tools.doclets.internal.toolkit.util.*; /** * Class to generate Tree page for a package. The name of the file generated is * "package-tree.html" and it is generated in the respective package directory. * * <p><b>This is NOT part of any supported API. * If you write code that depends on this, you do so at your own risk. * This code and its internal interfaces are subject to change or * deletion without notice.</b> * * @author Atul M Dambalkar * @author Bhavesh Patel (Modified) */ public class PackageTreeWriter extends AbstractTreeWriter { /** * Package for which tree is to be generated. */ protected PackageDoc packagedoc; /** * The previous package name in the alpha-order list. */ protected PackageDoc prev; /** * The next package name in the alpha-order list. */ protected PackageDoc next; /** * Constructor. * @throws IOException * @throws DocletAbortException */ public PackageTreeWriter(ConfigurationImpl configuration, DocPath path, PackageDoc packagedoc, PackageDoc prev, PackageDoc next) throws IOException { super(configuration, path, new ClassTree( configuration.classDocCatalog.allClasses(packagedoc), configuration)); this.packagedoc = packagedoc; this.prev = prev; this.next = next; } /** * Construct a PackageTreeWriter object and then use it to generate the * package tree page. * * @param pkg Package for which tree file is to be generated. * @param prev Previous package in the alpha-ordered list. * @param next Next package in the alpha-ordered list. * @param noDeprecated If true, do not generate any information for * deprecated classe or interfaces. * @throws DocletAbortException */ public static void generate(ConfigurationImpl configuration, PackageDoc pkg, PackageDoc prev, PackageDoc next, boolean noDeprecated) { PackageTreeWriter packgen; DocPath path = DocPath.forPackage(pkg).resolve(DocPaths.PACKAGE_TREE); try { packgen = new PackageTreeWriter(configuration, path, pkg, prev, next); packgen.generatePackageTreeFile(); packgen.close(); } catch (IOException exc) { configuration.standardmessage.error( "doclet.exception_encountered", exc.toString(), path.getPath()); throw new DocletAbortException(exc); } } /** * Generate a separate tree file for each package. */ protected void generatePackageTreeFile() throws IOException { Content body = getPackageTreeHeader(); Content headContent = getResource("doclet.Hierarchy_For_Package", Util.getPackageName(packagedoc)); Content heading = HtmlTree.HEADING(HtmlConstants.TITLE_HEADING, false, HtmlStyle.title, headContent); Content div = HtmlTree.DIV(HtmlStyle.header, heading); if (configuration.packages.length > 1) { addLinkToMainTree(div); } body.addContent(div); HtmlTree divTree = new HtmlTree(HtmlTag.DIV); divTree.addStyle(HtmlStyle.contentContainer); addTree(classtree.baseclasses(), "doclet.Class_Hierarchy", divTree); addTree(classtree.baseinterfaces(), "doclet.Interface_Hierarchy", divTree); addTree(classtree.baseAnnotationTypes(), "doclet.Annotation_Type_Hierarchy", divTree); addTree(classtree.baseEnums(), "doclet.Enum_Hierarchy", divTree); body.addContent(divTree); addNavLinks(false, body); addBottom(body); printHtmlDocument(null, true, body); } /** * Get the package tree header. * * @return a content tree for the header */ protected Content getPackageTreeHeader() { String title = packagedoc.name() + " " + configuration.getText("doclet.Window_Class_Hierarchy"); Content bodyTree = getBody(true, getWindowTitle(title)); addTop(bodyTree); addNavLinks(true, bodyTree); return bodyTree; } /** * Add a link to the tree for all the packages. * * @param div the content tree to which the link will be added */ protected void addLinkToMainTree(Content div) { Content span = HtmlTree.SPAN(HtmlStyle.packageHierarchyLabel, getResource("doclet.Package_Hierarchies")); div.addContent(span); HtmlTree ul = new HtmlTree (HtmlTag.UL); ul.addStyle(HtmlStyle.horizontal); ul.addContent(getNavLinkMainTree(configuration.getText("doclet.All_Packages"))); div.addContent(ul); } /** * Get link for the previous package tree file. * * @return a content tree for the link */ protected Content getNavLinkPrevious() { if (prev == null) { return getNavLinkPrevious(null); } else { DocPath path = DocPath.relativePath(packagedoc, prev); return getNavLinkPrevious(path.resolve(DocPaths.PACKAGE_TREE)); } } /** * Get link for the next package tree file. * * @return a content tree for the link */ protected Content getNavLinkNext() { if (next == null) { return getNavLinkNext(null); } else { DocPath path = DocPath.relativePath(packagedoc, next); return getNavLinkNext(path.resolve(DocPaths.PACKAGE_TREE)); } } /** * Get link to the package summary page for the package of this tree. * * @return a content tree for the package link */ protected Content getNavLinkPackage() { Content linkContent = getHyperLink(DocPaths.PACKAGE_SUMMARY, packageLabel); Content li = HtmlTree.LI(linkContent); return li; } }
package eu.toolchain.concurrent; import static eu.toolchain.concurrent.CoreAsync.buildCollectedException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; import lombok.RequiredArgsConstructor; /** * Helper class for {@link CoreAsync#collect(Collection, Function)} * * <p>The helper implements {@code Handle}, and is intended to be used by binding it as a * listener to the futures being collected. * * <p>This is a lock-free implementation capable of writing the results out of order. * * @param <T> the source type being collected * @param <U> the collected value */ class CollectHelper<T, U> implements Handle<T> { static final byte COMPLETED = 0x1; static final byte FAILED = 0x2; static final byte CANCELLED = 0x3; final int size; final Function<? super Collection<T>, ? extends U> collector; /** * A collection of all source stages. */ Collection<? extends Stage<?>> sources; final Completable<? super U> target; /** * The collected results, non-final to allow for setting to null. Allows for random writes * since final size is known at initialization. **/ Object[] values; /** * Flags for all collected states. */ byte[] states; /** * maintain position separate since the is a potential race condition between getting the * current position and setting the entry. This is avoided by only relying on countdown to trigger * when we are done. **/ final AtomicInteger write; /** * Maintain a separate countdown since the write position might be out of order, this causes all * threads to synchronize after the write **/ final AtomicInteger countdown; /** * State of the collector. **/ final AtomicBoolean failed; final AtomicBoolean done; CollectHelper( int size, Function<? super Collection<T>, ? extends U> collector, Collection<? extends Stage<?>> sources, Completable<? super U> target ) { if (size <= 0) { throw new IllegalArgumentException("size"); } this.size = size; this.collector = collector; this.sources = sources; this.target = target; this.values = new Object[size]; this.states = new byte[size]; this.write = new AtomicInteger(); this.countdown = new AtomicInteger(size); this.failed = new AtomicBoolean(); this.done = new AtomicBoolean(); } @Override public void completed(T result) { add(COMPLETED, result); } @Override public void failed(Throwable e) { add(FAILED, e); checkFailed(); } @Override public void cancelled() { add(CANCELLED, null); checkFailed(); } void checkFailed() { if (!failed.compareAndSet(false, true)) { return; } for (final Stage<?> source : sources) { source.cancel(); } // help garbage collection. sources = null; } /** * Checks in a doCall back. It also wraps up the group if all the callbacks have checked in. */ void add(final byte type, final Object value) { final int w = write.getAndIncrement(); if (w < size) { writeAt(w, type, value); } // countdown could wrap around, however we check the state of finished in here. // MUST be called after write to make sure that results and states are synchronized. final int c = countdown.decrementAndGet(); if (c < 0) { throw new IllegalStateException("already finished (countdown)"); } // if this thread is not the last thread to check-in, do nothing.. if (c != 0) { return; } // make sure this can only happen once. // This protects against countdown, and write wrapping around which should very rarely // happen. if (!done.compareAndSet(false, true)) { throw new IllegalStateException("already finished"); } done(collect()); } void writeAt(final int w, final byte state, final Object value) { states[w] = state; values[w] = value; } void done(Results r) { final Collection<T> results = r.results; final Collection<Throwable> errors = r.errors; final int cancelled = r.cancelled; if (!errors.isEmpty()) { target.fail(buildCollectedException(errors)); return; } if (cancelled > 0) { target.cancel(); return; } U result; try { result = collector.apply(results); } catch (final Exception error) { target.fail(error); return; } target.complete(result); } @SuppressWarnings("unchecked") Results collect() { final List<T> results = new ArrayList<>(); final List<Throwable> errors = new ArrayList<>(); int cancelled = 0; for (int i = 0; i < size; i++) { final byte type = states[i]; switch (type) { case COMPLETED: results.add((T) values[i]); break; case FAILED: errors.add((Throwable) values[i]); break; case CANCELLED: cancelled++; break; default: throw new IllegalArgumentException("Invalid entry type: " + type); } } // help garbage collector this.states = null; this.values = null; return new Results(results, errors, cancelled); } @RequiredArgsConstructor class Results { private final List<T> results; private final List<Throwable> errors; private final int cancelled; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jdbi.v3.core.mapper.reflect; import java.beans.ConstructorProperties; import java.lang.annotation.Annotation; import java.lang.reflect.Constructor; import java.lang.reflect.Parameter; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.OptionalInt; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Stream; import org.jdbi.v3.core.mapper.Nested; import org.jdbi.v3.core.mapper.PropagateNull; import org.jdbi.v3.core.mapper.RowMapper; import org.jdbi.v3.core.mapper.RowMapperFactory; import org.jdbi.v3.core.mapper.SingleColumnMapper; import org.jdbi.v3.core.mapper.reflect.internal.PojoMapper; import org.jdbi.v3.core.qualifier.QualifiedType; import org.jdbi.v3.core.qualifier.Qualifiers; import org.jdbi.v3.core.statement.StatementContext; import static org.jdbi.v3.core.mapper.reflect.JdbiConstructors.findFactoryFor; import static org.jdbi.v3.core.mapper.reflect.ReflectionMapperUtil.anyColumnsStartWithPrefix; import static org.jdbi.v3.core.mapper.reflect.ReflectionMapperUtil.findColumnIndex; import static org.jdbi.v3.core.mapper.reflect.ReflectionMapperUtil.getColumnNames; /** * A row mapper which maps the fields in a result set into a constructor. The default implementation will perform a * case insensitive mapping between the constructor parameter names and the column labels, * also considering camel-case to underscores conversion. * <p> * This mapper respects {@link Nested} annotations on constructor parameters. * <p> * Constructor parameters annotated as {@code @Nullable} may be omitted from the result set without * error. Any annotation named "Nullable" is respected--nay, worshipped--no matter which package it is from. */ public class ConstructorMapper<T> implements RowMapper<T> { private static final String DEFAULT_PREFIX = ""; private static final String UNMATCHED_CONSTRUCTOR_PARAMETERS = "Instance factory '%s' could not match any parameter to any columns in the result set. " + "Verify that the Java compiler is configured to emit parameter names, " + "that your result set has the columns expected, annotate the " + "parameter names explicitly with @ColumnName, or annotate nullable parameters as @Nullable"; private static final String UNMATCHED_CONSTRUCTOR_PARAMETER = "Instance factory '%s' parameter '%s' has no matching columns in the result set. " + "Verify that the Java compiler is configured to emit parameter names, " + "that your result set has the columns expected, annotate the " + "parameter names explicitly with @ColumnName, or annotate nullable parameters as @Nullable"; private static final String UNMATCHED_COLUMNS_STRICT = "Mapping instance factory %s could not match parameters for columns: %s"; private static final String MISSING_COLUMN_MAPPER = "Could not find column mapper for type '%s' of parameter '%s' for instance factory '%s'"; /** * Use the only declared constructor to map a class. * * @param clazz the class to find a constructor of * @return the factory */ public static RowMapperFactory factory(Class<?> clazz) { return RowMapperFactory.of(clazz, ConstructorMapper.of(clazz)); } /** * Use the only declared constructor to map a class. * * @param clazz the class to find a constructor of * @param prefix a prefix for the parameter names * @return the factory */ public static RowMapperFactory factory(Class<?> clazz, String prefix) { return RowMapperFactory.of(clazz, ConstructorMapper.of(clazz, prefix)); } /** * Use a {@code Constructor<T>} to map its declaring type. * * @param constructor the constructor to invoke * @return the factory */ public static RowMapperFactory factory(Constructor<?> constructor) { return RowMapperFactory.of(constructor.getDeclaringClass(), ConstructorMapper.of(constructor)); } /** * Use a {@code Constructor<T>} to map its declaring type. * * @param constructor the constructor to invoke * @param prefix a prefix to the constructor parameter names * @return the factory */ public static RowMapperFactory factory(Constructor<?> constructor, String prefix) { return RowMapperFactory.of(constructor.getDeclaringClass(), ConstructorMapper.of(constructor, prefix)); } /** * Return a ConstructorMapper for the given type. * * @param <T> the type to map * @param type the mapped type * @return the mapper */ public static <T> RowMapper<T> of(Class<T> type) { return ConstructorMapper.of(type, DEFAULT_PREFIX); } /** * Return a ConstructorMapper for the given type and prefix. * * @param <T> the type to map * @param type the mapped type * @param prefix the column name prefix * @return the mapper */ public static <T> RowMapper<T> of(Class<T> type, String prefix) { return new ConstructorMapper<>(findFactoryFor(type), prefix); } /** * Return a ConstructorMapper using the given constructor * * @param <T> the type to map * @param constructor the constructor to be used in mapping * @return the mapper */ public static <T> RowMapper<T> of(Constructor<T> constructor) { return ConstructorMapper.of(constructor, DEFAULT_PREFIX); } /** * Instantiate a ConstructorMapper using the given constructor and prefix * * @param <T> the type to map * @param constructor the constructor to be used in mapping * @param prefix the column name prefix * @return the mapper */ public static <T> RowMapper<T> of(Constructor<T> constructor, String prefix) { return new ConstructorMapper<>(new ConstructorInstanceFactory<>(constructor), prefix); } private final InstanceFactory<T> factory; private final String prefix; private final ConstructorProperties constructorProperties; private final Map<Parameter, ConstructorMapper<?>> nestedMappers = new ConcurrentHashMap<>(); private ConstructorMapper(InstanceFactory<T> factory, String prefix) { this.factory = factory; this.prefix = prefix.toLowerCase(); this.constructorProperties = factory.getAnnotation(ConstructorProperties.class); } @Override public T map(ResultSet rs, StatementContext ctx) throws SQLException { return specialize(rs, ctx).map(rs, ctx); } @Override public RowMapper<T> specialize(ResultSet rs, StatementContext ctx) throws SQLException { final List<String> columnNames = getColumnNames(rs); final List<ColumnNameMatcher> columnNameMatchers = ctx.getConfig(ReflectionMappers.class).getColumnNameMatchers(); final List<String> unmatchedColumns = new ArrayList<>(columnNames); RowMapper<T> mapper = specialize0(ctx, columnNames, columnNameMatchers, unmatchedColumns) .orElseThrow(() -> new IllegalArgumentException(String.format( UNMATCHED_CONSTRUCTOR_PARAMETERS, factory))); if (ctx.getConfig(ReflectionMappers.class).isStrictMatching() && anyColumnsStartWithPrefix(unmatchedColumns, prefix, columnNameMatchers)) { throw new IllegalArgumentException( String.format(UNMATCHED_COLUMNS_STRICT, factory, unmatchedColumns)); } return mapper; } private Optional<RowMapper<T>> specialize0(StatementContext ctx, List<String> columnNames, List<ColumnNameMatcher> columnNameMatchers, List<String> unmatchedColumns) { final int count = factory.getParameterCount(); final Parameter[] parameters = factory.getParameters(); boolean matchedColumns = false; final List<String> unmatchedParameters = new ArrayList<>(); final List<ParameterData> paramData = new ArrayList<>(); for (int i = 0; i < count; i++) { final Parameter parameter = parameters[i]; boolean nullable = isNullable(parameter); Nested anno = parameter.getAnnotation(Nested.class); if (anno == null) { final String paramName = prefix + paramName(parameters, i, constructorProperties); final OptionalInt columnIndex = findColumnIndex(paramName, columnNames, columnNameMatchers, () -> debugName(parameter)); if (columnIndex.isPresent()) { int colIndex = columnIndex.getAsInt(); final QualifiedType<?> type = QualifiedType.of(parameter.getParameterizedType()) .withAnnotations(ctx.getConfig(Qualifiers.class).findFor(parameter)); paramData.add(new ParameterData(i, parameter, ctx.findColumnMapperFor(type) .map(mapper -> new SingleColumnMapper<>(mapper, colIndex + 1)) .orElseThrow(() -> new IllegalArgumentException( String.format(MISSING_COLUMN_MAPPER, type, paramName, factory))))); matchedColumns = true; unmatchedColumns.remove(columnNames.get(colIndex)); } else if (nullable) { paramData.add(new ParameterData(i, parameter, (r, c) -> null)); } else { unmatchedParameters.add(paramName); } } else { final String nestedPrefix = prefix + anno.value(); final Optional<? extends RowMapper<?>> nestedMapper = nestedMappers .computeIfAbsent(parameter, p -> new ConstructorMapper<>(findFactoryFor(p.getType()), nestedPrefix)) .specialize0(ctx, columnNames, columnNameMatchers, unmatchedColumns); if (nestedMapper.isPresent()) { paramData.add(new ParameterData(i, parameter, nestedMapper.get())); matchedColumns = true; } else if (nullable) { paramData.add(new ParameterData(i, parameter, (r, c) -> null)); } else { unmatchedParameters.add(paramName(parameters, i, constructorProperties)); } } } if (!matchedColumns) { return Optional.empty(); } Collections.sort(paramData, Comparator.comparing( p -> p.propagateNull ? 1 : 0)); if (!unmatchedParameters.isEmpty()) { throw new IllegalArgumentException(String.format( UNMATCHED_CONSTRUCTOR_PARAMETER, factory, unmatchedParameters)); } final Optional<String> nullMarkerColumn = Optional.ofNullable(factory.getAnnotationIncludingType(PropagateNull.class)) .map(PropagateNull::value); return Optional.of((r, c) -> { if (PojoMapper.propagateNull(r, nullMarkerColumn)) { return null; } final Object[] params = new Object[count]; for (ParameterData p : paramData) { params[p.index] = p.mapper.map(r, c); if (p.propagateNull && (params[p.index] == null || p.isPrimitive && r.wasNull())) { return null; } } return factory.newInstance(params); }); } private boolean isNullable(Parameter parameter) { // Any annotation named @Nullable is honored. We're nice that way. return Stream.of(parameter.getAnnotations()) .map(Annotation::annotationType) .map(Class::getSimpleName) .anyMatch("Nullable"::equals); } private static String paramName(Parameter[] parameters, int position, ConstructorProperties parameterNames) { final Parameter parameter = parameters[position]; ColumnName dbName = parameter.getAnnotation(ColumnName.class); if (dbName != null) { return dbName.value(); } if (parameterNames != null) { return parameterNames.value()[position]; } return parameter.getName(); } private String debugName(Parameter parameter) { return String.format("%s constructor parameter %s", factory.getDeclaringClass().getSimpleName(), parameter.getName()); } private static class ParameterData { ParameterData(int index, Parameter parameter, RowMapper<?> mapper) { this.index = index; this.parameter = parameter; this.mapper = mapper; propagateNull = parameter.getAnnotation(PropagateNull.class) != null; isPrimitive = parameter.getType().isPrimitive(); } final int index; final Parameter parameter; final RowMapper<?> mapper; final boolean propagateNull; final boolean isPrimitive; } }
/* * Copyright (C) 2012-2015 DataStax Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.datastax.driver.core; import com.codahale.metrics.Gauge; import com.datastax.driver.core.exceptions.BusyConnectionException; import com.datastax.driver.core.exceptions.ConnectionException; import com.datastax.driver.core.policies.ConstantReconnectionPolicy; import com.google.common.util.concurrent.Uninterruptibles; import org.scassandra.cql.PrimitiveType; import org.scassandra.http.client.PrimingRequest; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import java.net.InetSocketAddress; import java.util.Iterator; import java.util.List; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import static com.datastax.driver.core.Assertions.assertThat; import static com.google.common.collect.Lists.newArrayList; import static java.util.Collections.singletonList; import static java.util.concurrent.TimeUnit.SECONDS; import static org.mockito.Mockito.*; import static org.scassandra.http.client.ClosedConnectionReport.CloseType.CLOSE; import static org.scassandra.http.client.PrimingRequest.then; import static org.testng.Assert.fail; public class HostConnectionPoolTest extends ScassandraTestBase.PerClassCluster { @BeforeClass(groups = {"short", "long"}) public void reinitializeCluster() { // Don't use the provided cluster, each test will create its own instead. cluster.close(); } /** * Sends 101 requests on pool, asserting that the connection used is in expectedConnections, except for the last request. */ private List<MockRequest> fillConnectionToThreshold(HostConnectionPool pool, List<Connection> expectedConnections) throws ConnectionException, BusyConnectionException, TimeoutException { List<MockRequest> requests = sendRequests(100, pool, expectedConnections); requests.add(MockRequest.send(pool)); return requests; } /** * Sends count requests on pool. */ private List<MockRequest> sendRequests(int count, HostConnectionPool pool) throws ConnectionException, BusyConnectionException, TimeoutException { return sendRequests(count, pool, null); } /** * Sends count requests on pool, asserting that the connection used is in expectedConnections. */ private List<MockRequest> sendRequests(int count, HostConnectionPool pool, List<Connection> expectedConnections) throws ConnectionException, BusyConnectionException, TimeoutException { List<MockRequest> requests = newArrayList(); for (int i = 0; i < count; i++) { MockRequest request = MockRequest.send(pool); requests.add(request); if (expectedConnections != null) assertThat(expectedConnections).contains(request.connection); } return requests; } @Override protected Cluster.Builder configure(Cluster.Builder builder) { // Use version 2 at highest. ProtocolVersion versionToUse = TestUtils.getDesiredProtocolVersion(ProtocolVersion.V2); return builder.withProtocolVersion(versionToUse); } /** * Completes count requests by simulating a successful response. */ private void completeRequests(int count, List<MockRequest> requests) { Iterator<MockRequest> requestIt = requests.iterator(); for (int i = 0; i < count; i++) { if (requestIt.hasNext()) { MockRequest request = requestIt.next(); request.simulateSuccessResponse(); requestIt.remove(); } else { break; } } } /** * Completes all requests by simulating a successful response. */ private void completeRequests(List<MockRequest> requests) { for (MockRequest request : requests) { request.simulateSuccessResponse(); } } /** * Ensures that if a fixed-sized pool has filled its core connections that borrowConnection will timeout instead * of creating a new connection. * * @jira_ticket JAVA-419 * @test_category connection:connection_pool * @since 2.0.10, 2.1.6 */ @Test(groups = "short") public void fixed_size_pool_should_fill_its_core_connections_and_then_timeout() throws ConnectionException, TimeoutException, BusyConnectionException { Cluster cluster = createClusterBuilder().build(); List<MockRequest> requests = newArrayList(); try { HostConnectionPool pool = createPool(cluster, 2, 2); assertThat(pool.connections.size()).isEqualTo(2); List<Connection> coreConnections = newArrayList(pool.connections); requests.addAll(sendRequests(256, pool, coreConnections)); try { MockRequest.send(pool); Assertions.fail("Expected a TimeoutException"); } catch (TimeoutException e) { /*expected*/} } finally { completeRequests(requests); cluster.close(); } } /** * Ensures that if a variable-sized pool has filled up to its maximum connections that borrowConnection will * timeout instead of creating a new connection. * * @jira_ticket JAVA-419 * @test_category connection:connection_pool * @since 2.0.10, 2.1.6 */ @Test(groups = "short") public void variable_size_pool_should_fill_its_connections_and_then_timeout() throws Exception { Cluster cluster = createClusterBuilder().build(); List<MockRequest> requests = newArrayList(); try { HostConnectionPool pool = createPool(cluster, 1, 2); Connection.Factory factory = spy(cluster.manager.connectionFactory); cluster.manager.connectionFactory = factory; assertThat(pool.connections.size()).isEqualTo(1); List<Connection> coreConnections = newArrayList(pool.connections); // Fill enough connections to hit the threshold. requests.addAll(fillConnectionToThreshold(pool, coreConnections)); // Allow time for new connection to be spawned. verify(factory, after(1000).times(1)).open(any(HostConnectionPool.class)); // Borrow more and ensure the connection returned is a non-core connection. for (int i = 0; i < 100; i++) { MockRequest request = MockRequest.send(pool); assertThat(coreConnections).doesNotContain(request.connection); requests.add(request); } // Fill remaining connections (28 + 28) - 1 requests.addAll(sendRequests(55, pool)); boolean timedOut = false; try { MockRequest.send(pool); } catch (TimeoutException e) { timedOut = true; } assertThat(timedOut).isTrue(); } finally { completeRequests(requests); cluster.close(); } } /** * Ensures that if the core connection pool is full that borrowConnection will create and use a new connection. * * @jira_ticket JAVA-419 * @test_category connection:connection_pool * @since 2.0.10, 2.1.6 */ @Test(groups = "short") public void should_add_extra_connection_when_core_full() throws Exception { Cluster cluster = createClusterBuilder().build(); List<MockRequest> requests = newArrayList(); try { HostConnectionPool pool = createPool(cluster, 1, 2); Connection.Factory factory = spy(cluster.manager.connectionFactory); cluster.manager.connectionFactory = factory; Connection core = pool.connections.get(0); // Fill core connection + 1 requests.addAll(fillConnectionToThreshold(pool, singletonList(core))); // Reaching the threshold should have triggered the creation of an extra one verify(factory, after(1000).times(1)).open(any(HostConnectionPool.class)); assertThat(pool.connections).hasSize(2); } finally { completeRequests(requests); cluster.close(); } } /** * Ensures that a trashed connection that has not been timed out should be resurrected into the connection pool if * borrowConnection is called and a new connection is needed. * * @jira_ticket JAVA-419 * @test_category connection:connection_pool * @since 2.0.10, 2.1.6 */ @Test(groups = "long") public void should_resurrect_trashed_connection_within_idle_timeout() throws Exception { Cluster cluster = createClusterBuilder().withPoolingOptions(new PoolingOptions().setIdleTimeoutSeconds(20)).build(); List<MockRequest> requests = newArrayList(); try { HostConnectionPool pool = createPool(cluster, 1, 2); Connection.Factory factory = spy(cluster.manager.connectionFactory); cluster.manager.connectionFactory = factory; Connection connection1 = pool.connections.get(0); requests.addAll(fillConnectionToThreshold(pool, singletonList(connection1))); verify(factory, after(1000).times(1)).open(any(HostConnectionPool.class)); assertThat(pool.connections).hasSize(2); Connection connection2 = pool.connections.get(1); assertThat(connection1.inFlight.get()).isEqualTo(101); assertThat(connection2.inFlight.get()).isEqualTo(0); // Go back under the capacity of 1 connection completeRequests(51, requests); assertThat(connection1.inFlight.get()).isEqualTo(50); assertThat(connection2.inFlight.get()).isEqualTo(0); // Given enough time, one connection gets trashed (and the implementation picks the first one) Uninterruptibles.sleepUninterruptibly(20, TimeUnit.SECONDS); assertThat(pool.connections).containsExactly(connection2); assertThat(pool.trash).containsExactly(connection1); // Now borrow enough to go just under the 1 connection threshold requests.addAll(sendRequests(50, pool)); assertThat(pool.connections).containsExactly(connection2); assertThat(pool.trash).containsExactly(connection1); assertThat(connection1.inFlight.get()).isEqualTo(50); assertThat(connection2.inFlight.get()).isEqualTo(50); // Borrowing one more time should resurrect the trashed connection requests.addAll(sendRequests(1, pool)); verify(factory, after(1000).times(1)).open(any(HostConnectionPool.class)); assertThat(pool.connections).containsExactly(connection2, connection1); assertThat(pool.trash).isEmpty(); assertThat(connection1.inFlight.get()).isEqualTo(50); assertThat(connection2.inFlight.get()).isEqualTo(51); } finally { completeRequests(requests); cluster.close(); } } /** * Ensures that a trashed connection that has been timed out should not be resurrected into the connection pool if * borrowConnection is called and a new connection is needed. * * @jira_ticket JAVA-419 * @test_category connection:connection_pool * @since 2.0.10, 2.1.6 */ @Test(groups = "long") public void should_not_resurrect_trashed_connection_after_idle_timeout() throws Exception { Cluster cluster = createClusterBuilder().withPoolingOptions(new PoolingOptions().setIdleTimeoutSeconds(20)).build(); List<MockRequest> requests = newArrayList(); try { HostConnectionPool pool = createPool(cluster, 1, 2); Connection.Factory factory = spy(cluster.manager.connectionFactory); cluster.manager.connectionFactory = factory; Connection connection1 = pool.connections.get(0); requests.addAll(fillConnectionToThreshold(pool, singletonList(connection1))); verify(factory, after(1000).times(1)).open(any(HostConnectionPool.class)); reset(factory); assertThat(pool.connections).hasSize(2); Connection connection2 = pool.connections.get(1); assertThat(connection1.inFlight.get()).isEqualTo(101); assertThat(connection2.inFlight.get()).isEqualTo(0); // Go back under the capacity of 1 connection completeRequests(51, requests); assertThat(connection1.inFlight.get()).isEqualTo(50); assertThat(connection2.inFlight.get()).isEqualTo(0); // Given enough time, one connection gets trashed (and the implementation picks the first one) Uninterruptibles.sleepUninterruptibly(20, TimeUnit.SECONDS); assertThat(pool.connections).containsExactly(connection2); assertThat(pool.trash).containsExactly(connection1); // Return trashed connection down to 0 inFlight completeRequests(50, requests); assertThat(connection1.inFlight.get()).isEqualTo(0); // Give enough time for trashed connection to be cleaned up from the trash: Uninterruptibles.sleepUninterruptibly(30, TimeUnit.SECONDS); assertThat(pool.connections).containsExactly(connection2); assertThat(pool.trash).isEmpty(); assertThat(connection1.isClosed()).isTrue(); // Fill the live connection to go over the threshold where a second one is needed requests.addAll(fillConnectionToThreshold(pool, singletonList(connection2))); assertThat(connection2.inFlight.get()).isEqualTo(101); verify(factory, after(1000).times(1)).open(any(HostConnectionPool.class)); // Borrow again to get the new connection MockRequest request = MockRequest.send(pool); requests.add(request); assertThat(request.connection) .isNotEqualTo(connection2) // should not be the full connection .isNotEqualTo(connection1); // should not be the previously trashed one } finally { completeRequests(requests); cluster.close(); } } /** * Ensures that a trashed connection that has been timed out should not be closed until it has 0 in flight requests. * * @jira_ticket JAVA-419 * @test_category connection:connection_pool * @since 2.0.10, 2.1.6 */ @Test(groups = "long") public void should_not_close_trashed_connection_until_no_in_flight() throws Exception { Cluster cluster = createClusterBuilder().withPoolingOptions(new PoolingOptions().setIdleTimeoutSeconds(20)).build(); List<MockRequest> requests = newArrayList(); try { HostConnectionPool pool = createPool(cluster, 1, 2); Connection.Factory factory = spy(cluster.manager.connectionFactory); cluster.manager.connectionFactory = factory; Connection connection1 = pool.connections.get(0); // Fill core connection enough to trigger creation of another one requests.addAll(fillConnectionToThreshold(pool, singletonList(connection1))); verify(factory, after(1000).times(1)).open(any(HostConnectionPool.class)); assertThat(pool.connections).hasSize(2); // Return enough times to get back under the threshold where one connection is enough completeRequests(50, requests); // Give enough time for one connection to be trashed. Due to the implementation, this will be the first one. // It still has in-flight requests so should not get closed. Uninterruptibles.sleepUninterruptibly(30, TimeUnit.SECONDS); assertThat(pool.trash).containsExactly(connection1); assertThat(connection1.inFlight.get()).isEqualTo(51); assertThat(connection1.isClosed()).isFalse(); // Consume all inFlight requests on the trashed connection. completeRequests(51, requests); // Sleep enough time for the connection to be consider idled and closed. Uninterruptibles.sleepUninterruptibly(30, TimeUnit.SECONDS); // The connection should be now closed. // The trashed connection should be closed and not in the pool or trash. assertThat(connection1.isClosed()).isTrue(); assertThat(pool.connections).doesNotContain(connection1); assertThat(pool.trash).doesNotContain(connection1); } finally { completeRequests(requests); cluster.close(); } } /** * Ensures that if a connection that has less than the minimum available stream ids is returned to the pool that * the connection is put in the trash. * * @jira_ticket JAVA-419 * @test_category connection:connection_pool * @since 2.0.10, 2.1.6 */ @Test(groups = "short") public void should_trash_on_returning_connection_with_insufficient_streams() throws Exception { Cluster cluster = createClusterBuilder().build(); List<MockRequest> requests = newArrayList(); try { HostConnectionPool pool = createPool(cluster, 1, 2); Connection.Factory factory = spy(cluster.manager.connectionFactory); cluster.manager.connectionFactory = factory; Connection core = pool.connections.get(0); requests.addAll(fillConnectionToThreshold(pool, singletonList(core))); verify(factory, after(1000).times(1)).open(any(HostConnectionPool.class)); assertThat(pool.connections).hasSize(2); // Grab the new non-core connection and replace it with a spy. Connection extra1 = spy(pool.connections.get(1)); pool.connections.set(1, extra1); // Borrow 10 times to ensure pool is utilized. requests.addAll(sendRequests(10, pool)); assertThat(pool.connections).hasSize(2); // stub the maxAvailableStreams method to return 0, indicating there are no remaining streams. // this should cause the connection to be replaced and trashed on returnConnection. doReturn(0).when(extra1).maxAvailableStreams(); // On returning of the connection, should detect that there are no available streams and trash it. assertThat(pool.trash).hasSize(0); pool.returnConnection(extra1); assertThat(pool.connections).hasSize(1); assertThat(pool.trash).hasSize(1); } finally { completeRequests(requests); cluster.close(); } } /** * Ensures that if a connection on a host is lost but other connections remain intact in the Pool that the * host is not marked down. * * @jira_ticket JAVA-544 * @test_category connection:connection_pool * @since 2.0.11 */ @Test(groups = "short") public void should_keep_host_up_when_one_connection_lost() throws Exception { Cluster cluster = createClusterBuilder().build(); try { HostConnectionPool pool = createPool(cluster, 2, 2); Connection core0 = pool.connections.get(0); Connection core1 = pool.connections.get(1); // Drop a connection and ensure the host stays up. currentClient.disableListener(); currentClient.closeConnection(CLOSE, ((InetSocketAddress) core0.channel.localAddress())); Uninterruptibles.sleepUninterruptibly(100, TimeUnit.MILLISECONDS); // connection 0 should be down, while connection 1 and the Host should remain up. assertThat(core0.isClosed()).isTrue(); assertThat(core1.isClosed()).isFalse(); assertThat(pool.connections).doesNotContain(core0); assertThat(cluster).host(1).hasState(Host.State.UP); assertThat(cluster).hasOpenControlConnection(); } finally { cluster.close(); } } /** * Ensures that if all connections on a host are closed that the host is marked * down and the control connection is notified of that fact and re-established * itself. * * @jira_ticket JAVA-544 * @test_category connection:connection_pool * @since 2.0.11 */ @Test(groups = "short") public void should_mark_host_down_when_no_connections_remaining() throws Exception { int readTimeout = 1000; int reconnectInterval = 1000; Cluster cluster = this.createClusterBuilder() .withSocketOptions(new SocketOptions() .setConnectTimeoutMillis(readTimeout) .setReadTimeoutMillis(reconnectInterval)) .withReconnectionPolicy(new ConstantReconnectionPolicy(1000)).build(); try { cluster.init(); Connection.Factory factory = spy(cluster.manager.connectionFactory); cluster.manager.connectionFactory = factory; HostConnectionPool pool = createPool(cluster, 8, 8); // copy list to track these connections. List<Connection> connections = newArrayList(pool.connections); reset(factory); // Drop all connections. currentClient.disableListener(); currentClient.closeConnections(CLOSE); // The host should be marked down and the control connection closed. assertThat(cluster).host(1).goesDownWithin(10, TimeUnit.SECONDS); assertThat(cluster).hasClosedControlConnection(); // Ensure all connections are closed. for (Connection connection : connections) { assertThat(connection.isClosed()).isTrue(); } // Expect a reconnect attempt on host after reconnect interval // on behalf of the control connection. verify(factory, timeout(reconnectInterval * 2).atLeastOnce()).open(host); // Sleep for a bit to allow reconnect to fail. Uninterruptibles.sleepUninterruptibly(readTimeout * 2, TimeUnit.MILLISECONDS); // Ensure control connection is still closed. assertThat(cluster).hasClosedControlConnection(); // Reenable connectivity. currentClient.enableListener(); // Reconnect attempt should have been connected for control connection // and pool. // 2 attempts for connection.open (reconnect control connection and initial connection for host state). verify(factory, after(reconnectInterval * 2).atLeast(2)).open(host); // 7 attempts for core connections after first initial connection. verify(factory, timeout(reconnectInterval * 2)).newConnections(any(HostConnectionPool.class), eq(7)); // Wait some reasonable amount of time for connection to reestablish. Uninterruptibles.sleepUninterruptibly(readTimeout, TimeUnit.MILLISECONDS); // Control Connection should now be open. assertThat(cluster).hasOpenControlConnection(); assertThat(cluster).host(1).hasState(Host.State.UP); } finally { cluster.close(); } } /** * Ensures that if a connection on a host is lost that brings the number of active connections in a pool * under core connection count that up to core connections are re-established, but only after the * next reconnect schedule has elapsed. * * @jira_ticket JAVA-544 * @test_category connection:connection_pool * @since 2.0.11 */ @Test(groups = "short") public void should_create_new_connections_when_connection_lost_and_under_core_connections() throws Exception { int readTimeout = 1000; int reconnectInterval = 1000; Cluster cluster = this.createClusterBuilder() .withSocketOptions(new SocketOptions() .setConnectTimeoutMillis(readTimeout) .setReadTimeoutMillis(reconnectInterval)) .withReconnectionPolicy(new ConstantReconnectionPolicy(1000)).build(); List<MockRequest> requests = newArrayList(); try { cluster.init(); Connection.Factory factory = spy(cluster.manager.connectionFactory); cluster.manager.connectionFactory = factory; HostConnectionPool pool = createPool(cluster, 3, 3); Connection core0 = pool.connections.get(0); Connection core1 = pool.connections.get(1); Connection core2 = pool.connections.get(2); // Drop two core connections. // Disable new connections initially and we'll eventually reenable it. currentClient.disableListener(); currentClient.closeConnection(CLOSE, ((InetSocketAddress) core0.channel.localAddress())); currentClient.closeConnection(CLOSE, ((InetSocketAddress) core2.channel.localAddress())); Uninterruptibles.sleepUninterruptibly(100, TimeUnit.MILLISECONDS); // Since we have a connection left the host should remain up. assertThat(cluster).host(1).hasState(Host.State.UP); assertThat(pool.connections).hasSize(1); // The borrowed connection should be the open one. MockRequest request = MockRequest.send(pool); requests.add(request); assertThat(request.connection).isEqualTo(core1); // Should not have tried to create a new core connection since reconnection time had not elapsed. verify(factory, never()).open(any(HostConnectionPool.class)); // Sleep to elapse the Reconnection Policy. Uninterruptibles.sleepUninterruptibly(reconnectInterval, TimeUnit.MILLISECONDS); // Attempt to borrow connection, this should trigger ensureCoreConnections thus spawning a new connection. request = MockRequest.send(pool); requests.add(request); assertThat(request.connection).isEqualTo(core1); // Should have tried to open up to core connections as result of borrowing a connection past reconnect time and not being at core. verify(factory, timeout(reconnectInterval).times(1)).open(any(HostConnectionPool.class)); reset(factory); // Sleep for reconnect interval to allow reconnection time to elapse. Uninterruptibles.sleepUninterruptibly((readTimeout + reconnectInterval) * 2, TimeUnit.MILLISECONDS); // Enable listening so new connections succeed. currentClient.enableListener(); // Sleep to elapse the Reconnection Policy. Uninterruptibles.sleepUninterruptibly(reconnectInterval, TimeUnit.MILLISECONDS); // Try to borrow a connection, the pool should grow. requests.add(MockRequest.send(pool)); verify(factory, timeout((reconnectInterval + readTimeout) * 2).times(1)).open(any(HostConnectionPool.class)); reset(factory); // Another core connection should be opened as result of another request to get us up to core connections. requests.add(MockRequest.send(pool)); verify(factory, timeout((reconnectInterval + readTimeout) * 2).times(1)).open(any(HostConnectionPool.class)); reset(factory); // Sending another request should not grow the pool any more, since we are now at core connections. requests.add(MockRequest.send(pool)); verify(factory, after((reconnectInterval + readTimeout) * 2).never()).open(any(HostConnectionPool.class)); } finally { completeRequests(requests); cluster.close(); } } /** * Ensures that if a connection on a host is lost and the number of remaining connections is at * core connection count that no connections are re-established until after there are enough * inflight requests to justify creating one and the reconnection interval has elapsed. * * @jira_ticket JAVA-544 * @test_category connection:connection_pool * @since 2.0.11 */ @Test(groups = "short") public void should_not_schedule_reconnect_when_connection_lost_and_at_core_connections() throws Exception { int readTimeout = 1000; int reconnectInterval = 1000; Cluster cluster = this.createClusterBuilder() .withSocketOptions(new SocketOptions() .setConnectTimeoutMillis(readTimeout) .setReadTimeoutMillis(reconnectInterval)) .withReconnectionPolicy(new ConstantReconnectionPolicy(1000)).build(); List<MockRequest> requests = newArrayList(); try { cluster.init(); Connection.Factory factory = spy(cluster.manager.connectionFactory); cluster.manager.connectionFactory = factory; HostConnectionPool pool = createPool(cluster, 1, 2); Connection core0 = pool.connections.get(0); // Create enough inFlight requests to spawn another connection. List<MockRequest> core0requests = newArrayList(); for (int i = 0; i < 101; i++) { MockRequest request = MockRequest.send(pool); assertThat(request.connection).isEqualTo(core0); core0requests.add(request); } // Pool should grow by 1. verify(factory, after(1000).times(1)).open(any(HostConnectionPool.class)); assertThat(pool.connections).hasSize(2); // Reset factory mock as we'll be checking for new open() invokes later. reset(factory); // Grab the new non-core connection. Connection extra1 = pool.connections.get(1); // Drop a connection and disable listening. currentClient.closeConnection(CLOSE, ((InetSocketAddress) core0.channel.localAddress())); Uninterruptibles.sleepUninterruptibly(100, TimeUnit.MILLISECONDS); currentClient.disableListener(); // Since core0 was closed, all of it's requests should have errored. for (MockRequest request : core0requests) { verify(request, times(1)).onException(any(Connection.class), any(Exception.class), anyLong(), anyInt()); } assertThat(cluster).host(1).hasState(Host.State.UP); // Create enough inFlight requests to fill connection. requests.addAll(sendRequests(100, pool, singletonList(extra1))); assertThat(pool.connections).hasSize(1); // A new connection should never have been spawned since we didn't max out core. verify(factory, after(readTimeout).never()).open(any(HostConnectionPool.class)); // Borrow another connection, since we exceed max another connection should be opened. MockRequest request = MockRequest.send(pool); requests.add(request); assertThat(request.connection).isEqualTo(extra1); // After some time the a connection should attempt to be opened (but will fail). verify(factory, timeout(readTimeout)).open(any(HostConnectionPool.class)); assertThat(pool.connections).hasSize(1); // Wait some reasonable amount of time for connection to reestablish then check pool size. Uninterruptibles.sleepUninterruptibly(readTimeout * 2, TimeUnit.MILLISECONDS); // Reconnecting failed since listening was enabled. assertThat(pool.connections).hasSize(1); // Re enable listening then wait for reconnect. currentClient.enableListener(); Uninterruptibles.sleepUninterruptibly(reconnectInterval, TimeUnit.MILLISECONDS); // Borrow another connection, since we exceed max another connection should be opened. request = MockRequest.send(pool); requests.add(request); assertThat(request.connection).isEqualTo(extra1); // Wait some reasonable amount of time for connection to reestablish then check pool size. Uninterruptibles.sleepUninterruptibly(readTimeout, TimeUnit.MILLISECONDS); // Reconnecting should have exceeded and pool will have grown. assertThat(pool.connections).hasSize(2); // Borrowed connection should be the newly spawned connection since the other one has some inflight requests. request = MockRequest.send(pool); requests.add(request); assertThat(request.connection).isNotEqualTo(core0).isNotEqualTo(extra1); } finally { completeRequests(requests); cluster.close(); } } /** * Ensures that if some connections fail on pool init that the host and subsequently the * control connection is not marked down. The test also ensures that when making requests * on the pool that connections are brought up to core. * * @jira_ticket JAVA-544 * @test_category connection:connection_pool * @since 2.0.11 */ @Test(groups = "short") public void should_not_mark_host_down_if_some_connections_fail_on_init() throws Exception { int readTimeout = 1000; int reconnectInterval = 1000; Cluster cluster = this.createClusterBuilder() .withSocketOptions(new SocketOptions() .setConnectTimeoutMillis(readTimeout) .setReadTimeoutMillis(reconnectInterval)) .withReconnectionPolicy(new ConstantReconnectionPolicy(1000)).build(); List<MockRequest> requests = newArrayList(); try { cluster.init(); Connection.Factory factory = spy(cluster.manager.connectionFactory); cluster.manager.connectionFactory = factory; // Allow the first 4 connections to establish, but disable after that. currentClient.disableListener(4); HostConnectionPool pool = createPool(cluster, 8, 8); reset(factory); // Pool size should show all successful connections. assertThat(pool.connections).hasSize(4); // Control connection should remain up in addition to to host. assertThat(cluster).host(1).hasState(Host.State.UP); assertThat(cluster).hasOpenControlConnection(); // Reenable listener, wait reconnectInterval and then try borrowing a connection. currentClient.enableListener(); Uninterruptibles.sleepUninterruptibly(reconnectInterval, TimeUnit.MILLISECONDS); // Should open up to core connections, however it will only spawn up to 1 connection // per request, so we need to make enough requests to make up the deficit. Additionally // we need to wait for connections to be established between requests for the pool // to spawn new connections (since it only allows one simultaneous creation). for (int i = 5; i <= 8; i++) { requests.add(MockRequest.send(pool)); verify(factory, timeout(readTimeout)).open(any(HostConnectionPool.class)); reset(factory); Uninterruptibles.sleepUninterruptibly(readTimeout, TimeUnit.MILLISECONDS); assertThat(pool.connections).hasSize(i); } } finally { completeRequests(requests); cluster.close(); } } /** * Ensures that if all connections fail on pool init that the host and subsequently the * control connection is not marked down since the control connection is still active. * on the pool that a TimeoutException is yielded if we are still in the reconnection window * according to the ConvictionPolicy. * * @jira_ticket JAVA-544 * @test_category connection:connection_pool * @since 2.0.11 */ @Test(groups = "short", expectedExceptions = TimeoutException.class) public void should_throw_exception_if_convicted_and_no_connections_available() throws Exception { int readTimeout = 1000; int reconnectInterval = 1000; Cluster cluster = this.createClusterBuilder() .withSocketOptions(new SocketOptions() .setConnectTimeoutMillis(readTimeout) .setReadTimeoutMillis(reconnectInterval)) .withReconnectionPolicy(new ConstantReconnectionPolicy(1000)).build(); try { // Init cluster so control connection is created. cluster.init(); assertThat(cluster).hasOpenControlConnection(); Connection.Factory factory = spy(cluster.manager.connectionFactory); cluster.manager.connectionFactory = factory; // Disable listener so all connections on pool fail. currentClient.disableListener(); HostConnectionPool pool = createPool(cluster, 8, 8); reset(factory); // Pool should be empty. assertThat(pool.connections).hasSize(0); // Control connection should stay up with the host. assertThat(cluster).host(1).hasState(Host.State.UP); assertThat(cluster).hasOpenControlConnection(); MockRequest.send(pool); } finally { cluster.close(); } } /** * Ensures that if all connections fail on pool init that the host and subsequently the * control connection is not marked down. The test also ensures that when making requests * on the pool after the conviction period that all core connections are created. * * @jira_ticket JAVA-544 * @test_category connection:connection_pool * @since 2.0.11 */ @Test(groups = "short") public void should_wait_on_connection_if_not_convicted_and_no_connections_available() throws Exception { int readTimeout = 1000; int reconnectInterval = 1000; Cluster cluster = this.createClusterBuilder() .withSocketOptions(new SocketOptions() .setConnectTimeoutMillis(readTimeout) .setReadTimeoutMillis(reconnectInterval)) .withReconnectionPolicy(new ConstantReconnectionPolicy(1000)).build(); try { // Init cluster so control connection is created. cluster.init(); assertThat(cluster).hasOpenControlConnection(); Connection.Factory factory = spy(cluster.manager.connectionFactory); cluster.manager.connectionFactory = factory; // Disable listener so all connections on pool fail. currentClient.disableListener(); HostConnectionPool pool = createPool(cluster, 8, 8); // Pool should be empty. assertThat(pool.connections).hasSize(0); // Control connection should stay up with the host. assertThat(cluster).host(1).hasState(Host.State.UP); assertThat(cluster).hasOpenControlConnection(); currentClient.enableListener(); // Wait for reconnectInterval so ConvictionPolicy allows connection to be created. Uninterruptibles.sleepUninterruptibly(reconnectInterval, TimeUnit.MILLISECONDS); reset(factory); MockRequest request = MockRequest.send(pool); request.simulateSuccessResponse(); // Should create up to core connections. verify(factory, timeout(readTimeout * 8).times(8)).open(any(HostConnectionPool.class)); Uninterruptibles.sleepUninterruptibly(readTimeout, TimeUnit.MILLISECONDS); assertThat(pool.connections).hasSize(8); } finally { cluster.close(); } } /** * Ensures that if a pool is created with zero core connections that when a request * is first sent that one and only one connection is created and that it waits on availability * of that connection and returns it. * * @jira_ticket JAVA-544 * @test_category connection:connection_pool * @since 2.0.11 */ @Test(groups = "short") public void should_wait_on_connection_if_zero_core_connections() throws Exception { int readTimeout = 1000; int reconnectInterval = 1000; Cluster cluster = this.createClusterBuilder() .withSocketOptions(new SocketOptions() .setConnectTimeoutMillis(readTimeout) .setReadTimeoutMillis(reconnectInterval)) .withReconnectionPolicy(new ConstantReconnectionPolicy(1000)).build(); try { // Init cluster so control connection is created. cluster.init(); assertThat(cluster).hasOpenControlConnection(); HostConnectionPool pool = createPool(cluster, 0, 2); // Pool should be empty. assertThat(pool.connections).hasSize(0); // Control connection should stay up with the host. assertThat(cluster).host(1).hasState(Host.State.UP); assertThat(cluster).hasOpenControlConnection(); // Send a request, this should create a connection. MockRequest request = MockRequest.send(pool); request.simulateSuccessResponse(); // Should create up to core connections. assertThat(pool.connections).hasSize(1); } finally { cluster.close(); } } private HostConnectionPool createPool(Cluster cluster, int coreConnections, int maxConnections) { cluster.getConfiguration().getPoolingOptions() .setMaxConnectionsPerHost(HostDistance.LOCAL, maxConnections) .setCoreConnectionsPerHost(HostDistance.LOCAL, coreConnections); Session session = cluster.connect(); Host host = TestUtils.findHost(cluster, 1); // Replace the existing pool with a spy pool and return it. SessionManager sm = ((SessionManager) session); return sm.pools.get(host); } /** * <p/> * This test uses a table named "Java349" with 1000 column and performs asynchronously 100k insertions. While the * insertions are being executed, the number of opened connection is monitored. * <p/> * If at anytime, the number of opened connections is negative, this test will fail. * * @jira_ticket JAVA-349 * @test_category connection:connection_pool * @since 2.0.6, 2.1.1 */ @Test(groups = "long", enabled = false /* this test causes timeouts on Jenkins */) public void open_connections_metric_should_always_be_positive() throws InterruptedException { // Track progress in a dedicated thread int numberOfInserts = 100 * 1000; final CountDownLatch pendingInserts = new CountDownLatch(numberOfInserts); ExecutorService progressReportExecutor = Executors.newSingleThreadExecutor(); final Runnable progressReporter = new Runnable() { @Override public void run() { pendingInserts.countDown(); } }; // Track opened connections in a dedicated thread every one second final AtomicBoolean negativeOpenConnectionCountSpotted = new AtomicBoolean(false); final Gauge<Integer> openConnections = cluster.getMetrics().getOpenConnections(); ScheduledExecutorService openConnectionsWatcherExecutor = Executors.newScheduledThreadPool(1); final Runnable openConnectionsWatcher = new Runnable() { @Override public void run() { Integer value = openConnections.getValue(); if (value < 0) { System.err.println("Negative value spotted for openConnection metric: " + value); negativeOpenConnectionCountSpotted.set(true); } } }; openConnectionsWatcherExecutor.scheduleAtFixedRate(openConnectionsWatcher, 1, 1, SECONDS); // Insert 100k lines in a newly created 1k columns table PreparedStatement insertStatement = session.prepare(generateJava349InsertStatement()); for (int key = 0; key < numberOfInserts; key++) { ResultSetFuture future = session.executeAsync(insertStatement.bind(key)); future.addListener(progressReporter, progressReportExecutor); } // Wait for all inserts to happen and stop connections and progress tracking pendingInserts.await(); openConnectionsWatcherExecutor.shutdownNow(); progressReportExecutor.shutdownNow(); if (negativeOpenConnectionCountSpotted.get()) { fail("Negative value spotted for open connection count"); } } private String generateJava349InsertStatement() { StringBuilder sb = new StringBuilder("INSERT INTO Java349 (mykey"); for (int i = 0; i < 1000; i++) { sb.append(", column").append(i); } sb.append(") VALUES (?"); for (int i = 0; i < 1000; i++) { sb.append(", ").append(i); } sb.append(");"); PrimingRequest preparedStatementPrime = PrimingRequest.preparedStatementBuilder() .withQuery(sb.toString()) .withThen(then().withVariableTypes(PrimitiveType.INT)) .build(); primingClient.prime(preparedStatementPrime); return sb.toString(); } /** * Mock ResponseCallback that simulates the behavior of SpeculativeExecution (in terms of borrowing/releasing connections). */ static class MockRequest implements Connection.ResponseCallback { enum State {START, COMPLETED, FAILED, TIMED_OUT} final Connection connection; private Connection.ResponseHandler responseHandler; private final AtomicReference<State> state = new AtomicReference<State>(State.START); static MockRequest send(HostConnectionPool pool) throws ConnectionException, BusyConnectionException, TimeoutException { // Create a MockRequest and spy on it. Create a response handler and add it to the connection's dispatcher. MockRequest request = spy(new MockRequest(pool)); request.responseHandler = new Connection.ResponseHandler(request.connection, request); request.connection.dispatcher.add(request.responseHandler); return request; } private MockRequest(HostConnectionPool pool) throws ConnectionException, TimeoutException, BusyConnectionException { connection = pool.borrowConnection(500, TimeUnit.MILLISECONDS); } void simulateSuccessResponse() { onSet(connection, null, 0, 0); } @SuppressWarnings("unused") void simulateErrorResponse() { onException(connection, null, 0, 0); } @SuppressWarnings("unused") void simulateTimeout() { if (onTimeout(connection, 0, 0)) responseHandler.cancelHandler(); } @Override public void onSet(Connection connection, Message.Response response, long latency, int retryCount) { if (state.compareAndSet(State.START, State.COMPLETED)) { connection.release(); connection.dispatcher.removeHandler(responseHandler, true); } } @Override public void onException(Connection connection, Exception exception, long latency, int retryCount) { if (state.compareAndSet(State.START, State.FAILED)) { connection.release(); connection.dispatcher.removeHandler(responseHandler, true); } } @Override public boolean onTimeout(Connection connection, long latency, int retryCount) { return state.compareAndSet(State.START, State.TIMED_OUT); } @Override public Message.Request request() { return null; // not needed for this test class } @Override public int retryCount() { return 0; // value not important for this test class } } }
/* * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.imagepipeline.producers; import android.os.SystemClock; import com.facebook.common.internal.VisibleForTesting; import com.facebook.imagepipeline.image.EncodedImage; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import javax.annotation.concurrent.GuardedBy; /** * Manages jobs so that only one can be executed at a time and no more often than once in * <code>mMinimumJobIntervalMs</code> milliseconds. */ public class JobScheduler { static final String QUEUE_TIME_KEY = "queueTime"; @VisibleForTesting static class JobStartExecutorSupplier { private static ScheduledExecutorService sJobStarterExecutor; static ScheduledExecutorService get() { if (sJobStarterExecutor == null) { sJobStarterExecutor = Executors.newSingleThreadScheduledExecutor(); } return sJobStarterExecutor; } } public interface JobRunnable { void run(EncodedImage encodedImage, @Consumer.Status int status); } private final Executor mExecutor; private final JobRunnable mJobRunnable; private final Runnable mDoJobRunnable; private final Runnable mSubmitJobRunnable; private final int mMinimumJobIntervalMs; @VisibleForTesting enum JobState { IDLE, QUEUED, RUNNING, RUNNING_AND_PENDING } // job data @GuardedBy("this") @VisibleForTesting EncodedImage mEncodedImage; @GuardedBy("this") @VisibleForTesting @Consumer.Status int mStatus; // job state @GuardedBy("this") @VisibleForTesting JobState mJobState; @GuardedBy("this") @VisibleForTesting long mJobSubmitTime; @GuardedBy("this") @VisibleForTesting long mJobStartTime; public JobScheduler(Executor executor, JobRunnable jobRunnable, int minimumJobIntervalMs) { mExecutor = executor; mJobRunnable = jobRunnable; mMinimumJobIntervalMs = minimumJobIntervalMs; mDoJobRunnable = new Runnable() { @Override public void run() { doJob(); } }; mSubmitJobRunnable = new Runnable() { @Override public void run() { submitJob(); } }; mEncodedImage = null; mStatus = 0; mJobState = JobState.IDLE; mJobSubmitTime = 0; mJobStartTime = 0; } /** * Clears the currently set job. * * <p> In case the currently set job has been scheduled but not started yet, the job won't be * executed. */ public void clearJob() { EncodedImage oldEncodedImage; synchronized (this) { oldEncodedImage = mEncodedImage; mEncodedImage = null; mStatus = 0; } EncodedImage.closeSafely(oldEncodedImage); } /** * Updates the job. * * <p> This just updates the job, but it doesn't schedule it. In order to be executed, the job has * to be scheduled after being set. In case there was a previous job scheduled that has not yet * started, this new job will be executed instead. * * @return whether the job was successfully updated. */ public boolean updateJob(EncodedImage encodedImage, @Consumer.Status int status) { if (!shouldProcess(encodedImage, status)) { return false; } EncodedImage oldEncodedImage; synchronized (this) { oldEncodedImage = mEncodedImage; mEncodedImage = EncodedImage.cloneOrNull(encodedImage); mStatus = status; } EncodedImage.closeSafely(oldEncodedImage); return true; } /** * Schedules the currently set job (if any). * * <p> This method can be called multiple times. It is guaranteed that each job set will be * executed no more than once. It is guaranteed that the last job set will be executed, unless * the job was cleared first. * <p> The job will be scheduled no sooner than <code>minimumJobIntervalMs</code> milliseconds * since the last job started. * * @return true if the job was scheduled, false if there was no valid job to be scheduled */ public boolean scheduleJob() { long now = SystemClock.uptimeMillis(); long when = 0; boolean shouldEnqueue = false; synchronized (this) { if (!shouldProcess(mEncodedImage, mStatus)) { return false; } switch (mJobState) { case IDLE: when = Math.max(mJobStartTime + mMinimumJobIntervalMs, now); shouldEnqueue = true; mJobSubmitTime = now; mJobState = JobState.QUEUED; break; case QUEUED: // do nothing, the job is already queued break; case RUNNING: mJobState = JobState.RUNNING_AND_PENDING; break; case RUNNING_AND_PENDING: // do nothing, the next job is already pending break; } } if (shouldEnqueue) { enqueueJob(when - now); } return true; } private void enqueueJob(long delay) { // If we make mExecutor be a {@link ScheduledexecutorService}, we could just have // `mExecutor.schedule(mDoJobRunnable, delay)` and avoid mSubmitJobRunnable and // JobStartExecutorSupplier altogether. That would require some refactoring though. if (delay > 0) { JobStartExecutorSupplier.get().schedule(mSubmitJobRunnable, delay, TimeUnit.MILLISECONDS); } else { mSubmitJobRunnable.run(); } } private void submitJob() { mExecutor.execute(mDoJobRunnable); } private void doJob() { long now = SystemClock.uptimeMillis(); EncodedImage input; int status; synchronized (this) { input = mEncodedImage; status = mStatus; mEncodedImage = null; mStatus = 0; mJobState = JobState.RUNNING; mJobStartTime = now; } try { // we need to do a check in case the job got cleared in the meantime if (shouldProcess(input, status)) { mJobRunnable.run(input, status); } } finally { EncodedImage.closeSafely(input); onJobFinished(); } } private void onJobFinished() { long now = SystemClock.uptimeMillis(); long when = 0; boolean shouldEnqueue = false; synchronized (this) { if (mJobState == JobState.RUNNING_AND_PENDING) { when = Math.max(mJobStartTime + mMinimumJobIntervalMs, now); shouldEnqueue = true; mJobSubmitTime = now; mJobState = JobState.QUEUED; } else { mJobState = JobState.IDLE; } } if (shouldEnqueue) { enqueueJob(when - now); } } private static boolean shouldProcess(EncodedImage encodedImage, @Consumer.Status int status) { // the last result should always be processed, whereas // an intermediate result should be processed only if valid return BaseConsumer.isLast(status) || BaseConsumer.statusHasFlag(status, Consumer.IS_PLACEHOLDER) || EncodedImage.isValid(encodedImage); } /** * Gets the queued time in milliseconds for the currently running job. * * <p> The result is only valid if called from {@link JobRunnable#run}. */ public synchronized long getQueuedTime() { return mJobStartTime - mJobSubmitTime; } }
package com.shonshampain.streamrecorder.helpers; import android.content.ContentResolver; import android.content.Context; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.view.View; import android.widget.RelativeLayout; import com.shonshampain.streamrecorder.adapters.FileAdapter; import com.shonshampain.streamrecorder.adapters.PlaylistAdapter; import com.shonshampain.streamrecorder.adapters.PlaylistDetailAdapter; import com.shonshampain.streamrecorder.adapters.StreamAdapter; import com.shonshampain.streamrecorder.application.StreamRecorderApplication; import com.shonshampain.streamrecorder.dao.Stream; import com.shonshampain.streamrecorder.db.StreamHelper; import com.shonshampain.streamrecorder.events.FabEvent; import com.shonshampain.streamrecorder.events.PausePlaybackEvent; import com.shonshampain.streamrecorder.events.SwitchToDetailsView; import com.shonshampain.streamrecorder.events.SwitchToFileDetailsView; import com.shonshampain.streamrecorder.events.TrackMetaData; import com.shonshampain.streamrecorder.managers.PlaylistCursorManager; import com.shonshampain.streamrecorder.managers.SongCursorManager; import com.shonshampain.streamrecorder.managers.StreamCursorManager; import com.shonshampain.streamrecorder.views.StreamRecorderFab; import java.util.ArrayList; import de.greenrobot.event.EventBus; public class ViewHelper { private static final String TAG = "ViewHelper"; private static final boolean DBG = false; enum ContainerViews {Streams, Playlists, PlaylistDetails, PlaylistFileDetails} RelativeLayout baseView; View streamsView, playlistsView, playlistDetailsView, playlistFileDetailsView; StreamCursorManager streamCursorManager; PlaylistCursorManager playlistCursorManager; SongCursorManager songCursorManager; StreamAdapter streamAdapter; PlaylistAdapter playlistAdapter; PlaylistDetailAdapter playlistDetailAdapter; FileAdapter playlistFileDetailAdapter; LinearLayoutManager playlistDetailRecyclerviewLayoutManager; LinearLayoutManager playlistFileDetailRecyclerviewLayoutManager; int playlistId, oldPlaylistId; String playlistName, oldPlaylistName; ContainerViews currentView; public ViewHelper(RelativeLayout baseView) { EventBus.getDefault().register(this); this.baseView = baseView; ContentResolver cr = baseView.getContext().getContentResolver(); streamCursorManager = new StreamCursorManager(cr); streamCursorManager.open(); playlistCursorManager = new PlaylistCursorManager(cr); playlistCursorManager.open(); streamAdapter = new StreamAdapter(streamCursorManager); playlistAdapter = new PlaylistAdapter(playlistCursorManager); ArrayList<Stream> streams = StreamHelper.getInstance().fetch(baseView.getContext()); if (streams.size() == 0) { StreamHelper.getInstance().insert(baseView.getContext(), new Stream("KCRW Eclectic 24", "http://kcrw.ic.llnwd.net/stream/kcrw_music", true)); StreamHelper.getInstance().insert(baseView.getContext(), new Stream("KCRW Live Feed", "http://kcrw.ic.llnwd.net/stream/kcrw_live", true)); StreamHelper.getInstance().insert(baseView.getContext(), new Stream("KJazz Live Feed", "http://1.ice1.firststreaming.com/kkjz_fm.aac", true)); streamCursorManager.open(); streamAdapter.notifyDataSetChanged(); } switchViews(baseView.getContext(), ContainerViews.Streams); } private void prepareSteams(Context context) { if (streamsView == null) { LinearLayoutManager llm = new LinearLayoutManager(context, LinearLayoutManager.VERTICAL, false); RecyclerView recyclerview = new RecyclerView(context); recyclerview.setLayoutManager(llm); recyclerview.setAdapter(streamAdapter); streamsView = recyclerview; } } private void preparePlaylists(Context context) { if (playlistsView == null) { LinearLayoutManager llm = new LinearLayoutManager(context, LinearLayoutManager.VERTICAL, false); RecyclerView recyclerview = new RecyclerView(context); recyclerview.setLayoutManager(llm); recyclerview.setAdapter(playlistAdapter); playlistsView = recyclerview; } } private void preparePlaylistDetail(Context context) { ContentResolver cr = context.getContentResolver(); songCursorManager = new SongCursorManager(cr); songCursorManager.open(playlistId); if (DBG) Log.d(TAG, "Loading playlist: " + playlistId); if (DBG) Log.d(TAG, "Num songs in this playlist: " + songCursorManager.getCursor().getCount()); playlistDetailAdapter = new PlaylistDetailAdapter(playlistId, songCursorManager); playlistDetailRecyclerviewLayoutManager = new LinearLayoutManager(context, LinearLayoutManager.VERTICAL, false); RecyclerView recyclerview = new RecyclerView(context); recyclerview.setLayoutManager(playlistDetailRecyclerviewLayoutManager); recyclerview.setAdapter(playlistDetailAdapter); playlistDetailsView = recyclerview; } private void preparePlaylistFileDetail(Context context) { playlistFileDetailAdapter = new FileAdapter(playlistName); playlistFileDetailRecyclerviewLayoutManager = new LinearLayoutManager(context, LinearLayoutManager.VERTICAL, false); RecyclerView recyclerview = new RecyclerView(context); recyclerview.setLayoutManager(playlistFileDetailRecyclerviewLayoutManager); recyclerview.setAdapter(playlistFileDetailAdapter); playlistFileDetailsView = recyclerview; } private void returnToStreams(Context context) { switchViews(context, ContainerViews.Streams); } private void returnToPlaylists(Context context) { switchViews(context, ContainerViews.Playlists); } private void switchViews(Context context, ContainerViews which) { View view = baseView.getChildAt(0); View status = null; if (view == null || which == ContainerViews.Streams) { if (currentView != ContainerViews.Streams) { prepareSteams(context); status = AnimationHelper.animate(baseView, view, streamsView); EventBus.getDefault().post(new FabEvent(StreamRecorderFab.FabStates.AddStream)); } } else if (which == ContainerViews.Playlists) { if (currentView != ContainerViews.Playlists) { if (currentView == ContainerViews.PlaylistDetails || currentView == ContainerViews.PlaylistFileDetails) { EventBus.getDefault().post(new PausePlaybackEvent()); } preparePlaylists(context); status = AnimationHelper.animate(baseView, view, playlistsView); EventBus.getDefault().post(new FabEvent(StreamRecorderFab.FabStates.AddFile)); } } else if (which == ContainerViews.PlaylistDetails) { if (currentView != ContainerViews.PlaylistDetails || playlistId != oldPlaylistId) { preparePlaylistDetail(context); status = AnimationHelper.animate(baseView, view, playlistDetailsView); } } else if (which == ContainerViews.PlaylistFileDetails) { if (currentView != ContainerViews.PlaylistFileDetails || !playlistName.equals(oldPlaylistName)) { preparePlaylistFileDetail(context); status = AnimationHelper.animate(baseView, view, playlistFileDetailsView); } } if (status != null) { currentView = which; if (DBG) Log.d(TAG, "Switching to: " + currentView); } } public void switchToPlaylistDetail(Context context, int id) { oldPlaylistId = playlistId; playlistId = id; AnimationHelper.setDirectionRight(false); switchViews(context, ContainerViews.PlaylistDetails); } public void switchToPlaylistDetail(Context context, String name) { oldPlaylistName = playlistName; playlistName = name; AnimationHelper.setDirectionRight(false); switchViews(context, ContainerViews.PlaylistFileDetails); } public void swipe(Context context, boolean right) { if (currentView == ContainerViews.Streams && right) { return; } if (currentView == ContainerViews.Playlists && !right) { return; } if (currentView == ContainerViews.PlaylistDetails && !right) { return; } if (currentView == ContainerViews.PlaylistFileDetails && !right) { return; } if (currentView == ContainerViews.Streams) { switchViews(context, ContainerViews.Playlists); } else if (currentView == ContainerViews.Playlists) { returnToStreams(context); } else if (currentView == ContainerViews.PlaylistDetails || currentView == ContainerViews.PlaylistFileDetails) { returnToPlaylists(context); } } public void resetStream() { streamCursorManager.open(); streamAdapter.notifyDataSetChanged(); } public void resetPlaylist() { playlistCursorManager.open(); playlistAdapter.notifyDataSetChanged(); } @SuppressWarnings("unused") public void onEventMainThread(TrackMetaData unused) { if (currentView == ContainerViews.PlaylistDetails) { songCursorManager.open(playlistId); playlistDetailAdapter.notifyDataSetChanged(); int max = playlistDetailRecyclerviewLayoutManager.getItemCount(); playlistDetailRecyclerviewLayoutManager.scrollToPosition(max - 1); } } @SuppressWarnings("unused") public void onEventMainThread(SwitchToDetailsView event) { switchToPlaylistDetail(StreamRecorderApplication.getContext(), event.id); } @SuppressWarnings("unused") public void onEventMainThread(SwitchToFileDetailsView event) { switchToPlaylistDetail(StreamRecorderApplication.getContext(), event.name); } public void close() { streamCursorManager.close(); playlistCursorManager.close(); if (songCursorManager != null) { songCursorManager.close(); } EventBus.getDefault().unregister(this); } }
package fr.elefort.piratesremastered.model.gravity; import fr.elefort.piratesremastered.maths.Geometry; import fr.elefort.piratesremastered.model.BaseObject; import fr.elefort.piratesremastered.model.Movable; import fr.elefort.piratesremastered.model.Obstacle; /** * Created by Eric on 15/09/2015. */ public enum Gravity implements GravityEngine { NONE { @Override public int getOrientation() { return 0; } @Override public void update(Movable sender, long timeDelta) { } @Override public boolean intersect(Movable sender, BaseObject obj) { return false; } @Override public void checkBottom(Movable sender) { } }, UP { @Override public int getOrientation() { return 180; } @Override public void update(Movable sender, long timeDelta) { int x = sender.getPosition().x + sender.velocity * (-1); int y = sender.getPosition().y + sender.impulsion; sender.setPosition(x, y); checkBottom(sender); } @Override public boolean intersect(Movable sender, BaseObject obj) { return Geometry.intersect(sender.getHitBox().left, sender.getHitBox().top - 1, sender.getHitBox().right, sender.getHitBox().bottom, obj.getHitBox().left, obj.getHitBox().top, obj.getHitBox().right, obj.getHitBox().bottom); } @Override public void checkBottom(Movable sender) { Obstacle obstacle = sender.getObstacle(); if(sender.impulsion >= 0) sender.impulsion -= 1; if(obstacle != null) { if (intersect(sender, obstacle)) { sender.setPosition(sender.getPosition().x, (int)(obstacle.getHitBox().bottom + (sender.getHitBox().height() * 0.5f) )); sender.impulsion = 0; return; } else { sender.setObstacle(null); } } } }, DOWN { @Override public int getOrientation() { return 0; } @Override public void update(Movable sender, long timeDelta) { int x = sender.getPosition().x + sender.velocity; int y = sender.getPosition().y + sender.impulsion * (-1); sender.setPosition(x, y); checkBottom(sender); } @Override public boolean intersect(Movable sender, BaseObject obj) { return Geometry.intersect(sender.getHitBox().left, sender.getHitBox().top, sender.getHitBox().right, sender.getHitBox().bottom + 1, obj.getHitBox().left, obj.getHitBox().top, obj.getHitBox().right, obj.getHitBox().bottom); } @Override public void checkBottom(Movable sender) { Obstacle obstacle = sender.getObstacle(); if(sender.impulsion >= 0) sender.impulsion -= 1; if(obstacle != null) { if (intersect(sender, obstacle)) { sender.setPosition(sender.getPosition().x, (int)(obstacle.getHitBox().top - (sender.getHitBox().height() * 0.5f) )); sender.impulsion = 0; return; } else { sender.setObstacle(null); } } } }, LEFT { @Override public int getOrientation() { return 90; } @Override public void update(Movable sender, long timeDelta) { int y = sender.getPosition().y + sender.velocity; int x = sender.getPosition().x + sender.impulsion; sender.setPosition(x, y); checkBottom(sender); } @Override public boolean intersect(Movable sender, BaseObject obj) { return Geometry.intersect(sender.getHitBox().left - 1, sender.getHitBox().top, sender.getHitBox().right, sender.getHitBox().bottom, obj.getHitBox().left, obj.getHitBox().top, obj.getHitBox().right, obj.getHitBox().bottom); } @Override public void checkBottom(Movable sender) { Obstacle obstacle = sender.getObstacle(); if(sender.impulsion >= 0) sender.impulsion -= 1; if(obstacle != null) { if (intersect(sender, obstacle)) { sender.setPosition((int)(obstacle.getHitBox().right + (sender.getHitBox().width() * 0.5f)), sender.getPosition().y); sender.impulsion = 0; return; } else { sender.setObstacle(null); } } } }, RIGHT { @Override public int getOrientation() { return 270; } @Override public void update(Movable sender, long timeDelta) { int y = sender.getPosition().y + sender.velocity * (-1); int x = sender.getPosition().x + sender.impulsion * (-1); sender.setPosition(x, y); checkBottom(sender); } @Override public boolean intersect(Movable sender, BaseObject obj) { return Geometry.intersect(sender.getHitBox().left, sender.getHitBox().top, sender.getHitBox().right + 1, sender.getHitBox().bottom, obj.getHitBox().left, obj.getHitBox().top, obj.getHitBox().right, obj.getHitBox().bottom); } @Override public void checkBottom(Movable sender) { Obstacle obstacle = sender.getObstacle(); if(sender.impulsion >= 0) sender.impulsion -= 1; if(obstacle != null) { if (intersect(sender, obstacle)) { sender.setPosition((int)(obstacle.getHitBox().left - (sender.getHitBox().width() * 0.5f)), sender.getPosition().y); sender.impulsion = 0; return; } else { sender.setObstacle(null); } } } } }
/* * Copyright 2014 Robin Stuart * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.org.okapibarcode.backend; import java.awt.geom.Rectangle2D; import java.nio.charset.StandardCharsets; /** * <p>Implements Codablock-F according to AIM Europe "Uniform Symbology Specification - Codablock F", 1995. * * <p>Codablock-F is a multi-row symbology using Code 128 encoding. It can encode any 8-bit ISO 8859-1 (Latin-1) * data up to approximately 1000 alpha-numeric characters or 2000 numeric digits in length. * * @author <a href="mailto:rstuart114@gmail.com">Robin Stuart</a> */ public class CodablockF extends Symbol { private enum Mode { SHIFTA, LATCHA, SHIFTB, LATCHB, SHIFTC, LATCHC, AORB, ABORC, CANDB, CANDBB } private enum CfMode { MODEA, MODEB, MODEC } /* Annex A Table A.1 */ private static final String[] C_128_TABLE = { "212222", "222122", "222221", "121223", "121322", "131222", "122213", "122312", "132212", "221213", "221312", "231212", "112232", "122132", "122231", "113222", "123122", "123221", "223211", "221132", "221231", "213212", "223112", "312131", "311222", "321122", "321221", "312212", "322112", "322211", "212123", "212321", "232121", "111323", "131123", "131321", "112313", "132113", "132311", "211313", "231113", "231311", "112133", "112331", "132131", "113123", "113321", "133121", "313121", "211331", "231131", "213113", "213311", "213131", "311123", "311321", "331121", "312113", "312311", "332111", "314111", "221411", "431111", "111224", "111422", "121124", "121421", "141122", "141221", "112214", "112412", "122114", "122411", "142112", "142211", "241211", "221114", "413111", "241112", "134111", "111242", "121142", "121241", "114212", "124112", "124211", "411212", "421112", "421211", "212141", "214121", "412121", "111143", "111341", "131141", "114113", "114311", "411113", "411311", "113141", "114131", "311141", "411131", "211412", "211214", "211232", "2331112"}; private int[][] blockmatrix = new int[44][62]; private int columns_needed; private int rows_needed; private CfMode final_mode; private CfMode[] subset_selector = new CfMode[44]; /** * TODO: It doesn't appear that this symbol should support GS1 (it's not in the GS1 spec and Zint doesn't * support GS1 with this type of symbology). However, the code below does contain GS1 checks, so we'll mark * it as supported for now. It's very possible that the code below which supports GS1 only does so because * it was originally copied from the Code 128 source code (just a suspicion, though). */ @Override protected boolean gs1Supported() { return true; } @Override protected void encode() { int input_length, i, j, k; int min_module_height; Mode last_mode, this_mode; double estimate_codelength; String row_pattern; int[] row_indicator = new int[44]; int[] row_check = new int[44]; int k1_sum, k2_sum; int k1_check, k2_check; final_mode = CfMode.MODEA; if (!content.matches("[\u0000-\u00FF]+")) { throw new OkapiException("Invalid characters in input data"); } inputData = toBytes(content, StandardCharsets.ISO_8859_1, 0x00); input_length = inputData.length - 1; if (input_length > 5450) { throw new OkapiException("Input data too long"); } /* Make a guess at how many characters will be needed to encode the data */ estimate_codelength = 0.0; last_mode = Mode.AORB; /* Codablock always starts with Code A */ for (i = 0; i < input_length; i++) { this_mode = findSubset(inputData[i]); if (this_mode != last_mode) { estimate_codelength += 1.0; } if (this_mode != Mode.ABORC) { estimate_codelength += 1.0; } else { estimate_codelength += 0.5; } if (inputData[i] > 127) { estimate_codelength += 1.0; } last_mode = this_mode; } /* Decide symbol size based on the above guess */ rows_needed = (int) (0.5 + Math.sqrt((estimate_codelength + 2) / 1.45)); if (rows_needed < 2) { rows_needed = 2; } if (rows_needed > 44) { rows_needed = 44; } columns_needed = (int) (estimate_codelength + 2) / rows_needed; if (columns_needed < 4) { columns_needed = 4; } if (columns_needed > 62) { throw new OkapiException("Input data too long"); } /* Encode the data */ data_encode_blockf(); /* Add check digits - Annex F */ k1_sum = 0; k2_sum = 0; for(i = 0; i < input_length; i++) { if(inputData[i] == FNC1) { k1_sum += (i + 1) * 29; /* GS */ k2_sum += i * 29; } else { k1_sum += (i + 1) * inputData[i]; k2_sum += i * inputData[i]; } } k1_check = k1_sum % 86; k2_check = k2_sum % 86; if((final_mode == CfMode.MODEA) || (final_mode == CfMode.MODEB)) { k1_check = k1_check + 64; if(k1_check > 95) { k1_check -= 96; } k2_check = k2_check + 64; if(k2_check > 95) { k2_check -= 96; } } blockmatrix[rows_needed - 1][columns_needed - 2] = k1_check; blockmatrix[rows_needed - 1][columns_needed - 1] = k2_check; /* Calculate row height (4.6.1.a) */ min_module_height = (int) (0.55 * (columns_needed + 3)) + 3; if(min_module_height < 8) { min_module_height = 8; } /* Encode the Row Indicator in the First Row of the Symbol - Table D2 */ if(subset_selector[0] == CfMode.MODEC) { /* Code C */ row_indicator[0] = rows_needed - 2; } else { /* Code A or B */ row_indicator[0] = rows_needed + 62; if(row_indicator[0] > 95) { row_indicator[0] -= 95; } } /* Encode the Row Indicator in the Second and Subsequent Rows of the Symbol - Table D3 */ for(i = 1; i < rows_needed; i++) { /* Note that the second row is row number 1 because counting starts from 0 */ if(subset_selector[i] == CfMode.MODEC) { /* Code C */ row_indicator[i] = i + 42; } else { /* Code A or B */ if( i < 6 ) row_indicator[i] = i + 10; else row_indicator[i] = i + 20; } } /* Calculate row check digits - Annex E */ for(i = 0; i < rows_needed; i++) { k = 103; switch (subset_selector[i]) { case MODEA: k += 98; break; case MODEB: k += 100; break; case MODEC: k += 99; break; } k += 2 * row_indicator[i]; for(j = 0; j < columns_needed; j++) { k+= (j + 3) * blockmatrix[i][j]; } row_check[i] = k % 103; } readable = ""; row_count = rows_needed; pattern = new String[row_count]; row_height = new int[row_count]; infoLine("Grid Size: " + columns_needed + " X " + rows_needed); infoLine("K1 Check Digit: " + k1_check); infoLine("K2 Check Digit: " + k2_check); /* Resolve the data into patterns and place in symbol structure */ info("Encoding: "); for(i = 0; i < rows_needed; i++) { row_pattern = ""; /* Start character */ row_pattern += C_128_TABLE[103]; /* Always Start A */ switch (subset_selector[i]) { case MODEA: row_pattern += C_128_TABLE[98]; info("MODEA "); break; case MODEB: row_pattern += C_128_TABLE[100]; info("MODEB "); break; case MODEC: row_pattern += C_128_TABLE[99]; info("MODEC "); break; } row_pattern += C_128_TABLE[row_indicator[i]]; infoSpace(row_indicator[i]); for (j = 0; j < columns_needed; j++) { row_pattern += C_128_TABLE[blockmatrix[i][j]]; infoSpace(blockmatrix[i][j]); } row_pattern += C_128_TABLE[row_check[i]]; info("(" + row_check[i] + ") "); /* Stop character */ row_pattern += C_128_TABLE[106]; /* Write the information into the symbol */ pattern[i] = row_pattern; row_height[i] = 15; } infoLine(); symbol_height = rows_needed * 15; } private Mode findSubset(int letter) { Mode mode; if (letter == FNC1) { mode = Mode.AORB; } else if (letter <= 31) { mode = Mode.SHIFTA; } else if ((letter >= 48) && (letter <= 57)) { mode = Mode.ABORC; } else if (letter <= 95) { mode = Mode.AORB; } else if (letter <= 127) { mode = Mode.SHIFTB; } else if (letter <= 159) { mode = Mode.SHIFTA; } else if (letter <= 223) { mode = Mode.AORB; } else { mode = Mode.SHIFTB; } return mode; } private void data_encode_blockf() { int i, j, input_position, current_row; int column_position, c; CfMode current_mode; boolean done, exit_status; int input_length = inputData.length - 1; exit_status = false; current_row = 0; current_mode = CfMode.MODEA; column_position = 0; input_position = 0; c = 0; do { done = false; /* 'done' ensures that the instructions are followed in the correct order for each input character */ if (column_position == 0) { /* The Beginning of a row */ c = columns_needed; current_mode = character_subset_select(input_position); subset_selector[current_row] = current_mode; if ((current_row == 0) && (inputDataType == DataType.GS1)) { /* Section 4.4.7.1 */ blockmatrix[current_row][column_position] = 102; /* FNC1 */ column_position++; c--; } } if (inputData[input_position] == FNC1) { blockmatrix[current_row][column_position] = 102; /* FNC1 */ column_position++; c--; input_position++; done = true; } if (!done) { if (c <= 2) { /* Annex B section 1 rule 1 */ /* Ensure that there is sufficient encodation capacity to continue (using the rules of Annex B.2). */ switch (current_mode) { case MODEA: /* Table B1 applies */ if (findSubset(inputData[input_position]) == Mode.ABORC) { blockmatrix[current_row][column_position] = a3_convert(inputData[input_position]); column_position++; c--; input_position++; done = true; } if ((findSubset(inputData[input_position]) == Mode.SHIFTB) && (c == 1)) { /* Needs two symbols */ blockmatrix[current_row][column_position] = 100; /* Code B */ column_position++; c--; done = true; } if ((inputData[input_position] >= 244) && (!done)) { /* Needs three symbols */ blockmatrix[current_row][column_position] = 100; /* Code B */ column_position++; c--; if (c == 1) { blockmatrix[current_row][column_position] = 101; /* Code A */ column_position++; c--; } done = true; } if ((inputData[input_position] >= 128) && (!done) && c == 1) { /* Needs two symbols */ blockmatrix[current_row][column_position] = 100; /* Code B */ column_position++; c--; done = true; } break; case MODEB: /* Table B2 applies */ if (findSubset(inputData[input_position]) == Mode.ABORC) { blockmatrix[current_row][column_position] = a3_convert(inputData[input_position]); column_position++; c--; input_position++; done = true; } if ((findSubset(inputData[input_position]) == Mode.SHIFTA) && (c == 1)) { /* Needs two symbols */ blockmatrix[current_row][column_position] = 101; /* Code A */ column_position++; c--; done = true; } if (((inputData[input_position] >= 128) && (inputData[input_position] <= 159)) && (!done)) { /* Needs three symbols */ blockmatrix[current_row][column_position] = 101; /* Code A */ column_position++; c--; if (c == 1) { blockmatrix[current_row][column_position] = 100; /* Code B */ column_position++; c--; } done = true; } if ((inputData[input_position] >= 160) && (!done) && c == 1) { /* Needs two symbols */ blockmatrix[current_row][column_position] = 101; /* Code A */ column_position++; c--; done = true; } break; case MODEC: /* Table B3 applies */ if ((findSubset(inputData[input_position]) != Mode.ABORC) && (c == 1)) { /* Needs two symbols */ blockmatrix[current_row][column_position] = 101; /* Code A */ column_position++; c--; done = true; } if (((findSubset(inputData[input_position]) == Mode.ABORC) && (findSubset(inputData[input_position + 1]) != Mode.ABORC)) && (c == 1)) { /* Needs two symbols */ blockmatrix[current_row][column_position] = 101; /* Code A */ column_position++; c--; done = true; } if (inputData[input_position] >= 128) { /* Needs three symbols */ blockmatrix[current_row][column_position] = 101; /* Code A */ column_position++; c--; if (c == 1) { blockmatrix[current_row][column_position] = 100; /* Code B */ column_position++; c--; } } break; } } } if (!done) { if (((findSubset(inputData[input_position]) == Mode.AORB) || (findSubset(inputData[input_position]) == Mode.SHIFTA)) && (current_mode == CfMode.MODEA)) { /* Annex B section 1 rule 2 */ /* If in Code Subset A and the next data character can be encoded in Subset A encode the next character. */ if (inputData[input_position] >= 128) { /* Extended ASCII character */ blockmatrix[current_row][column_position] = 101; /* FNC4 */ column_position++; c--; } blockmatrix[current_row][column_position] = a3_convert(inputData[input_position]); column_position++; c--; input_position++; done = true; } } if (!done) { if (((findSubset(inputData[input_position]) == Mode.AORB) || (findSubset(inputData[input_position]) == Mode.SHIFTB)) && (current_mode == CfMode.MODEB)) { /* Annex B section 1 rule 3 */ /* If in Code Subset B and the next data character can be encoded in subset B, encode the next character. */ if (inputData[input_position] >= 128) { /* Extended ASCII character */ blockmatrix[current_row][column_position] = 100; /* FNC4 */ column_position++; c--; } blockmatrix[current_row][column_position] = a3_convert(inputData[input_position]); column_position++; c--; input_position++; done = true; } } if (!done) { if (((findSubset(inputData[input_position]) == Mode.ABORC) && (findSubset(inputData[input_position + 1]) == Mode.ABORC)) && (current_mode == CfMode.MODEC)) { /* Annex B section 1 rule 4 */ /* If in Code Subset C and the next data are 2 digits, encode them. */ blockmatrix[current_row][column_position] = ((inputData[input_position] - '0') * 10) + (inputData[input_position + 1] - '0'); column_position++; c--; input_position += 2; done = true; } } if (!done) { if (((current_mode == CfMode.MODEA) || (current_mode == CfMode.MODEB)) && ((findSubset(inputData[input_position]) == Mode.ABORC) || (inputData[input_position] == FNC1))) { // Count the number of numeric digits // If 4 or more numeric data characters occur together when in subsets A or B: // a. If there is an even number of numeric data characters, insert a Code C character before the // first numeric digit to change to subset C. // b. If there is an odd number of numeric data characters, insert a Code Set C character immediately // after the first numeric digit to change to subset C. i = 0; j = 0; do { i++; if (inputData[input_position + j] == FNC1) { i++; } j++; } while ((findSubset(inputData[input_position + j]) == Mode.ABORC) || (inputData[input_position + j] == FNC1)); i--; if (i >= 4) { /* Annex B section 1 rule 5 */ if ((i % 2) == 1) { /* Annex B section 1 rule 5a */ blockmatrix[current_row][column_position] = 99; /* Code C */ column_position++; c--; blockmatrix[current_row][column_position] = ((inputData[input_position] - '0') * 10) + (inputData[input_position + 1] - '0'); column_position++; c--; input_position += 2; current_mode = CfMode.MODEC; } else { /* Annex B section 1 rule 5b */ blockmatrix[current_row][column_position] = a3_convert(inputData[input_position]); column_position++; c--; input_position++; } done = true; } else { blockmatrix[current_row][column_position] = a3_convert(inputData[input_position]); column_position++; c--; input_position++; done = true; } } } if (!done) { if ((current_mode == CfMode.MODEB) && (findSubset(inputData[input_position]) == Mode.SHIFTA)) { /* Annex B section 1 rule 6 */ /* When in subset B and an ASCII control character occurs in the data: a. If there is a lower case character immediately following the control character, insert a Shift character before the control character. b. Otherwise, insert a Code A character before the control character to change to subset A. */ if ((inputData[input_position + 1] >= 96) && (inputData[input_position + 1] <= 127)) { /* Annex B section 1 rule 6a */ blockmatrix[current_row][column_position] = 98; /* Shift */ column_position++; c--; if (inputData[input_position] >= 128) { /* Extended ASCII character */ blockmatrix[current_row][column_position] = 100; /* FNC4 */ column_position++; c--; } blockmatrix[current_row][column_position] = a3_convert(inputData[input_position]); column_position++; c--; input_position++; } else { /* Annex B section 1 rule 6b */ blockmatrix[current_row][column_position] = 101; /* Code A */ column_position++; c--; if (inputData[input_position] >= 128) { /* Extended ASCII character */ blockmatrix[current_row][column_position] = 100; /* FNC4 */ column_position++; c--; } blockmatrix[current_row][column_position] = a3_convert(inputData[input_position]); column_position++; c--; input_position++; current_mode = CfMode.MODEA; } done = true; } } if (!done) { if ((current_mode == CfMode.MODEA) && (findSubset(inputData[input_position]) == Mode.SHIFTB)) { /* Annex B section 1 rule 7 */ /* When in subset A and a lower case character occurs in the data: a. If following that character, a control character occurs in the data before the occurrence of another lower case character, insert a Shift character before the lower case character. b. Otherwise, insert a Code B character before the lower case character to change to subset B. */ if ((findSubset(inputData[input_position + 1]) == Mode.SHIFTA) && (findSubset(inputData[input_position + 2]) == Mode.SHIFTB)) { /* Annex B section 1 rule 7a */ blockmatrix[current_row][column_position] = 98; /* Shift */ column_position++; c--; if (inputData[input_position] >= 128) { /* Extended ASCII character */ blockmatrix[current_row][column_position] = 101; /* FNC4 */ column_position++; c--; } blockmatrix[current_row][column_position] = a3_convert(inputData[input_position]); column_position++; c--; input_position++; } else { /* Annex B section 1 rule 7b */ blockmatrix[current_row][column_position] = 100; /* Code B */ column_position++; c--; if (inputData[input_position] >= 128) { /* Extended ASCII character */ blockmatrix[current_row][column_position] = 101; /* FNC4 */ column_position++; c--; } blockmatrix[current_row][column_position] = a3_convert(inputData[input_position]); column_position++; c--; input_position++; current_mode = CfMode.MODEB; } done = true; } } if (!done) { if ((current_mode == CfMode.MODEC) && ((findSubset(inputData[input_position]) != Mode.ABORC) || (findSubset(inputData[input_position + 1]) != Mode.ABORC))) { /* Annex B section 1 rule 8 */ /* When in subset C and a non-numeric character (or a single digit) occurs in the data, insert a Code A or Code B character before that character, following rules 8a and 8b to determine between code subsets A and B. a. If an ASCII control character (eg NUL) occurs in the data before any lower case character, use Code A. b. Otherwise use Code B. */ if (findSubset(inputData[input_position]) == Mode.SHIFTA) { /* Annex B section 1 rule 8a */ blockmatrix[current_row][column_position] = 101; /* Code A */ column_position++; c--; if (inputData[input_position] >= 128) { /* Extended ASCII character */ blockmatrix[current_row][column_position] = 101; /* FNC4 */ column_position++; c--; } blockmatrix[current_row][column_position] = a3_convert(inputData[input_position]); column_position++; c--; input_position++; current_mode = CfMode.MODEA; } else { /* Annex B section 1 rule 8b */ blockmatrix[current_row][column_position] = 100; /* Code B */ column_position++; c--; if (inputData[input_position] >= 128) { /* Extended ASCII character */ blockmatrix[current_row][column_position] = 100; /* FNC4 */ column_position++; c--; } blockmatrix[current_row][column_position] = a3_convert(inputData[input_position]); column_position++; c--; input_position++; current_mode = CfMode.MODEB; } done = true; } } if (input_position == input_length) { /* End of data - Annex B rule 5a */ if (c == 1) { if (current_mode == CfMode.MODEA) { blockmatrix[current_row][column_position] = 100; /* Code B */ current_mode = CfMode.MODEB; } else { blockmatrix[current_row][column_position] = 101; /* Code A */ current_mode = CfMode.MODEA; } column_position++; c--; } if (c == 0) { /* Another row is needed */ column_position = 0; c = columns_needed; current_row++; subset_selector[current_row] = CfMode.MODEA; current_mode = CfMode.MODEA; } if (c > 2) { /* Fill up the last row */ do { if (current_mode == CfMode.MODEA) { blockmatrix[current_row][column_position] = 100; /* Code B */ current_mode = CfMode.MODEB; } else { blockmatrix[current_row][column_position] = 101; /* Code A */ current_mode = CfMode.MODEA; } column_position++; c--; } while (c > 2); } /* If (c == 2) { do nothing } */ exit_status = true; final_mode = current_mode; } else { if (c <= 0) { /* Start new row - Annex B rule 5b */ column_position = 0; current_row++; if (current_row > 43) { throw new OkapiException("Too many rows."); } } } } while (!exit_status); if (current_row == 0) { /* fill up the first row */ for(c = column_position; c <= columns_needed; c++) { if(current_mode == CfMode.MODEA) { blockmatrix[current_row][c] = 100; /* Code B */ current_mode = CfMode.MODEB; } else { blockmatrix[current_row][c] = 101; /* Code A */ current_mode = CfMode.MODEA; } } current_row++; /* add a second row */ subset_selector[current_row] = CfMode.MODEA; current_mode = CfMode.MODEA; for(c = 0; c <= columns_needed - 2; c++) { if(current_mode == CfMode.MODEA) { blockmatrix[current_row][c] = 100; /* Code B */ current_mode = CfMode.MODEB; } else { blockmatrix[current_row][c] = 101; /* Code A */ current_mode = CfMode.MODEA; } } } rows_needed = current_row + 1; } private CfMode character_subset_select(int input_position) { /* Section 4.5.2 - Determining the Character Subset Selector in a Row */ if((inputData[input_position] >= '0') && (inputData[input_position] <= '9')) { /* Rule 1 */ return CfMode.MODEC; } if((inputData[input_position] >= 128) && (inputData[input_position] <= 160)) { /* Rule 2 (i) */ return CfMode.MODEA; } if((inputData[input_position] >= 0) && (inputData[input_position] <= 31)) { /* Rule 3 */ return CfMode.MODEA; } /* Rule 4 */ return CfMode.MODEB; } private int a3_convert(int source) { /* Annex A section 3 */ if(source < 32) { return source + 64; } if((source >= 32) && (source <= 127)) { return source - 32; } if((source >= 128) && (source <= 159)) { return (source - 128) + 64; } /* if source >= 160 */ return (source - 128) - 32; } @Override protected void plotSymbol() { int xBlock, yBlock; int x, y, w, h; boolean black; rectangles.clear(); y = 1; h = 1; for (yBlock = 0; yBlock < row_count; yBlock++) { black = true; x = 0; for (xBlock = 0; xBlock < pattern[yBlock].length(); xBlock++) { if (black) { black = false; w = pattern[yBlock].charAt(xBlock) - '0'; if (row_height[yBlock] == -1) { h = default_height; } else { h = row_height[yBlock]; } if (w != 0 && h != 0) { Rectangle2D.Double rect = new Rectangle2D.Double(x, y, w, h); rectangles.add(rect); } if ((x + w) > symbol_width) { symbol_width = x + w; } } else { black = true; } x += pattern[yBlock].charAt(xBlock) - '0'; } y += h; if (y > symbol_height) { symbol_height = y; } /* Add bars between rows */ if (yBlock != (row_count - 1)) { Rectangle2D.Double rect = new Rectangle2D.Double(11, y - 1, (symbol_width - 24), 2); rectangles.add(rect); } } /* Add top and bottom binding bars */ Rectangle2D.Double top = new Rectangle2D.Double(0, 0, symbol_width, 2); rectangles.add(top); Rectangle2D.Double bottom = new Rectangle2D.Double(0, y - 1, symbol_width, 2); rectangles.add(bottom); symbol_height += 1; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.apache.sling.jcr.webconsole.internal; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import javax.jcr.RepositoryException; import javax.jcr.Session; import javax.jcr.Value; import javax.jcr.nodetype.NodeDefinition; import javax.jcr.nodetype.NodeType; import javax.jcr.nodetype.NodeTypeIterator; import javax.jcr.nodetype.NodeTypeManager; import javax.jcr.nodetype.PropertyDefinition; import javax.jcr.version.OnParentVersionAction; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Properties; import org.apache.felix.scr.annotations.Property; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.ReferencePolicy; import org.apache.felix.scr.annotations.Service; import org.apache.felix.webconsole.ConfigurationPrinter; import org.apache.felix.webconsole.ModeAwareConfigurationPrinter; import org.apache.sling.jcr.api.SlingRepository; /** * A Felix WebConsole ConfigurationPrinter which outputs the current JCR * nodetypes. */ @Component @Service(ConfigurationPrinter.class) @Properties({ @Property(name = "service.description", value = "JCR Nodetype Configuration Printer"), @Property(name = "service.vendor", value = "The Apache Software Foundation"), @Property(name = "felix.webconsole.configprinter.web.unescaped", boolValue = true) }) public class NodeTypeConfigurationPrinter implements ModeAwareConfigurationPrinter { @Reference(policy = ReferencePolicy.DYNAMIC) private volatile SlingRepository slingRepository; /** * Get the title of the configuration status page. * * @return the title */ public String getTitle() { return "JCR NodeTypes"; } /** * {@inheritDoc} */ public void printConfiguration(PrintWriter pw, String mode) { if (slingRepository != null) { Session session = null; try { session = slingRepository.loginAdministrative(null); NodeTypeManager ntm = session.getWorkspace().getNodeTypeManager(); NodeTypeIterator it = ntm.getAllNodeTypes(); List<NodeType> sortedTypes = sortTypes(it); for (NodeType nt : sortedTypes) { pw.printf("[%s]", nt.getName()); printSuperTypes(pw, nt); if (nt.hasOrderableChildNodes()) { pw.print(" orderable"); } if (nt.isMixin()) { pw.print(" mixin"); } linebreak(pw, mode); for (PropertyDefinition prop : nt.getPropertyDefinitions()) { if (prop.getDeclaringNodeType() == nt) { startBold(pw, mode); } pw.printf("- %s", prop.getName()); printDefaultValues(pw, prop); if (prop.getName().equals(nt.getPrimaryItemName())) { pw.print(" primary"); } if (prop.isMandatory()) { pw.print(" mandatory"); } if (prop.isAutoCreated()) { pw.print(" autocreated"); } if (prop.isProtected()) { pw.print(" protected"); } if (prop.isMultiple()) { pw.print(" multiple"); } pw.printf(" %s", OnParentVersionAction.nameFromValue(prop.getOnParentVersion())); printConstraints(pw, prop); if (prop.getDeclaringNodeType() == nt) { stopBold(pw, mode); } linebreak(pw, mode); } for (NodeDefinition child : nt.getChildNodeDefinitions()) { if (child.getDeclaringNodeType() == nt) { startBold(pw, mode); } pw.printf("+ %s", child.getName()); printRequiredChildTypes(pw, child); if (child.getDefaultPrimaryType() != null) { pw.printf(" = %s", child.getDefaultPrimaryType().getName()); } if (child.isMandatory()) { pw.print(" mandatory"); } if (child.isAutoCreated()) { pw.print(" autocreated"); } if (child.isProtected()) { pw.print(" protected"); } if (child.allowsSameNameSiblings()) { pw.print(" multiple"); } pw.printf(" %s", OnParentVersionAction.nameFromValue(child.getOnParentVersion())); if (child.getDeclaringNodeType() == nt) { stopBold(pw, mode); } linebreak(pw, mode); } linebreak(pw, mode); } } catch (RepositoryException e) { pw.println("Unable to output namespace mappings."); e.printStackTrace(pw); } finally { if (session != null) { session.logout(); } } } else { pw.println("SlingRepository is not available."); } } /** * Output a list of node types from the NamespaceRegistry. * * @param pw a PrintWriter */ public void printConfiguration(PrintWriter pw) { printConfiguration(pw, ConfigurationPrinter.MODE_TXT); } private List<NodeType> sortTypes(NodeTypeIterator it) { List<NodeType> types = new ArrayList<NodeType>(); while (it.hasNext()) { NodeType nt = it.nextNodeType(); types.add(nt); } Collections.sort(types, new Comparator<NodeType>(){ public int compare(NodeType o1, NodeType o2) { return o1.getName().compareTo(o2.getName()); } }); return types; } private void linebreak(PrintWriter pw, String mode) { if (ConfigurationPrinter.MODE_WEB.equals(mode)) { pw.println("<br/>"); } else { pw.println(); } } private void stopBold(PrintWriter pw, String mode) { if (ConfigurationPrinter.MODE_WEB.equals(mode)) { pw.print("</b>"); } } private void startBold(PrintWriter pw, String mode) { if (ConfigurationPrinter.MODE_WEB.equals(mode)) { pw.print("<b>"); } } private void printRequiredChildTypes(PrintWriter pw, NodeDefinition child) { if (child.getRequiredPrimaryTypes() != null && child.getRequiredPrimaryTypes().length > 0) { pw.print(" ("); boolean first = true; for (NodeType required : child.getRequiredPrimaryTypes()) { if (!first) { pw.print(", "); } pw.print(required.getName()); first = false; } pw.print(")"); } } private void printDefaultValues(PrintWriter pw, PropertyDefinition prop) throws RepositoryException { if (prop.getDefaultValues() != null && prop.getDefaultValues().length > 0) { pw.print(" = "); boolean first = true; for (Value v : prop.getDefaultValues()) { if (!first) { pw.print(", "); } pw.print(v.getString()); first = false; } } } private void printConstraints(PrintWriter pw, PropertyDefinition prop) throws RepositoryException { if (prop.getValueConstraints() != null && prop.getValueConstraints().length > 0) { pw.print(" < "); boolean first = true; for (String s : prop.getValueConstraints()) { if (!first) { pw.print(", "); } pw.print(s); first = false; } } } private void printSuperTypes(PrintWriter pw, NodeType nt) { pw.print(" > "); boolean first = true; for (NodeType st : nt.getSupertypes()) { if (!first) { pw.print(", "); } pw.print(st.getName()); first = false; } } }
/* * Copyright 2010-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package org.powertac.accounting; import static org.powertac.util.MessageDispatcher.dispatch; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.LogManager; import org.joda.time.Instant; import org.powertac.common.*; import org.powertac.common.TariffTransaction.Type; import org.powertac.common.config.ConfigurableValue; import org.powertac.common.interfaces.*; import org.powertac.common.msg.BalancingControlEvent; import org.powertac.common.msg.DistributionReport; import org.powertac.common.repo.BrokerRepo; import org.powertac.common.repo.RandomSeedRepo; import org.powertac.common.repo.TariffRepo; import org.powertac.common.repo.TimeslotRepo; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; /** * Implementation of {@link org.powertac.common.interfaces.Accounting} * @author John Collins */ @Service public class AccountingService extends TimeslotPhaseProcessor implements Accounting, InitializationService { static private Logger log = LogManager.getLogger(AccountingService.class.getSimpleName()); @Autowired private TimeService timeService; @Autowired private TariffRepo tariffRepo; @Autowired private TimeslotRepo timeslotRepo; @Autowired private BrokerRepo brokerRepo; @Autowired private BrokerProxy brokerProxyService; @Autowired private RandomSeedRepo randomSeedService; @Autowired private TransactionFactory txFactory; @Autowired private ServerConfiguration serverProps; private ArrayList<BrokerTransaction> pendingTransactions; private DistributionReport distributionReport; private double totalConsumption; private double totalProduction; private HashMap<Timeslot, ArrayList<MarketTransaction>> pendingMarketTransactions; // read this from configuration @ConfigurableValue(valueType = "Double", description = "low end of bank interest rate range") private double minInterest = 0.04; @ConfigurableValue(valueType = "Double", description = "high end of bank interest rate range") private double maxInterest = 0.12; @ConfigurableValue(valueType = "Double", publish = true, description = "override random setting of bank interest rate") private Double bankInterest = null; public AccountingService () { super(); pendingTransactions = new ArrayList<BrokerTransaction>(); pendingMarketTransactions = new HashMap<Timeslot, ArrayList<MarketTransaction>>(); } @Override public String initialize (Competition competition, List<String> completedInits) { pendingTransactions.clear(); pendingMarketTransactions.clear(); super.init(); bankInterest = null; serverProps.configureMe(this); RandomSeed random = randomSeedService.getRandomSeed("AccountingService", 0l, "interest"); if (bankInterest == null) { // interest will be non-null in case it was overridden in the config bankInterest = (minInterest + (random.nextDouble() * (maxInterest - minInterest))); log.info("bank interest: " + bankInterest); } serverProps.publishConfiguration(this); return "AccountingService"; } @Override public synchronized MarketTransaction addMarketTransaction(Broker broker, Timeslot timeslot, double mWh, double price) { MarketTransaction mtx = txFactory.makeMarketTransaction(broker, timeslot, mWh, price); // post pending tx so it gets sent to broker pendingTransactions.add(mtx); updateBrokerMarketPosition(mtx); // defer posting to delivery timeslot ArrayList<MarketTransaction> theList = pendingMarketTransactions.get(timeslot); if (null == theList) { theList = new ArrayList<MarketTransaction>(); pendingMarketTransactions.put(timeslot, theList); } theList.add(mtx); return mtx; } @Override public synchronized TariffTransaction addTariffTransaction(TariffTransaction.Type txType, Tariff tariff, CustomerInfo customer, int customerCount, double kWh, double charge) { TariffTransaction ttx = txFactory.makeTariffTransaction(tariff.getBroker(), txType, tariffRepo.findSpecificationById(tariff.getSpecId()), customer, customerCount, kWh, charge); if (null == ttx.getTariffSpec()) log.error("Null tariff spec in addTariffTx()"); pendingTransactions.add(ttx); return ttx; } @Override public synchronized TariffTransaction addRegulationTransaction (Tariff tariff, CustomerInfo customer, int customerCount, double kWh, double charge) { TariffTransaction.Type txType = TariffTransaction.Type.CONSUME; if (kWh > 0.0) txType = TariffTransaction.Type.PRODUCE; TariffTransaction ttx = txFactory.makeTariffTransaction(tariff.getBroker(), txType, tariffRepo.findSpecificationById(tariff.getSpecId()), customer, customerCount, kWh, charge, true); if (null == ttx.getTariffSpec()) log.error("Null tariff spec in addTariffTx()"); pendingTransactions.add(ttx); return ttx; } @Override public synchronized DistributionTransaction addDistributionTransaction (Broker broker, int nSmall, int nLarge, double transport, double distroCharge) { DistributionTransaction dtx = txFactory.makeDistributionTransaction(broker, nSmall, nLarge, transport, distroCharge); pendingTransactions.add(dtx); return dtx; } @Override public synchronized BalancingTransaction addBalancingTransaction(Broker broker, double kWh, double charge) { BalancingTransaction btx = txFactory.makeBalancingTransaction(broker, kWh, charge); pendingTransactions.add(btx); return btx; } @Override public synchronized CapacityTransaction addCapacityTransaction (Broker broker, int peakTimeslot, double threshold, double kWh, double fee) { CapacityTransaction ctx = txFactory.makeCapacityTransaction(broker, peakTimeslot, threshold, kWh, fee); pendingTransactions.add(ctx); return ctx; } @Override public synchronized void postBalancingControl (BalancingControlEvent bce) { log.info("post balancing control for {}, payment={}, kWh={}, tariff={}, ts={}", bce.getBroker().getUsername(), bce.getPayment(), bce.getKwh(), bce.getTariffId(), bce.getTimeslotIndex()); updateCash(bce.getBroker(), bce.getPayment()); } /** * Returns the net load for the given broker in the current timeslot. * Note that this only works AFTER the customer models have run, and * BEFORE the day's transactions have been processed. The value will be * negative if the broker's customers are consuming more than they produce * in the current timeslot. */ @Override public synchronized double getCurrentNetLoad (Broker broker) { double netLoad = 0.0; for (BrokerTransaction btx : pendingTransactions) { if (btx instanceof TariffTransaction) { TariffTransaction ttx = (TariffTransaction)btx; if (ttx.getBroker().getUsername().equals(broker.getUsername())) { if (ttx.getTxType() == TariffTransaction.Type.CONSUME || ttx.getTxType() == TariffTransaction.Type.PRODUCE) { netLoad += ttx.getKWh(); } } } } log.info("net load for " + broker.getUsername() + ": " + netLoad); return netLoad; } /** * Returns a mapping of brokers to total supply and demand among subscribed * customers. */ @Override public Map<Broker, Map<Type, Double>> getCurrentSupplyDemandByBroker () { HashMap<Broker, Map<Type, Double>> result = new HashMap<Broker, Map<Type, Double>>(); for (BrokerTransaction btx : pendingTransactions) { if (btx instanceof TariffTransaction) { TariffTransaction ttx = (TariffTransaction)btx; Broker broker = ttx.getBroker(); Map<Type, Double> record = result.get(broker); if (null == record) { record = new HashMap<Type, Double>(); result.put(broker, record); record.put(Type.CONSUME, 0.0); record.put(Type.PRODUCE, 0.0); } if (ttx.getTxType() == Type.CONSUME) record.put(Type.CONSUME, record.get(Type.CONSUME) + ttx.getKWh()); else if (ttx.getTxType() == Type.PRODUCE) record.put(Type.PRODUCE, record.get(Type.PRODUCE) + ttx.getKWh()); } } return result; } /** * Gets the net market position for the current timeslot. This only works on * processed transactions, but it can be used before activation in case there * can be no new market transactions for the current timeslot. This is the * normal case. The value will be positive if the broker is importing power * during the current timeslot. */ @Override public synchronized double getCurrentMarketPosition(Broker broker) { Timeslot current = timeslotRepo.currentTimeslot(); log.debug("current timeslot: " + current.getSerialNumber()); MarketPosition position = broker.findMarketPositionByTimeslot(current.getSerialNumber()); if (position == null) { log.debug("null position for ts " + current.getSerialNumber()); return 0.0; } log.info("market position for " + broker.getUsername() + ": " + position.getOverallBalance()); return position.getOverallBalance(); } /** * Processes the pending transaction list, computes interest, sends * updates to brokers */ @Override public void activate(Instant time, int phaseNumber) { log.info("Activate: " + pendingTransactions.size() + " messages"); totalConsumption = 0.0; totalProduction = 0.0; HashMap<Broker, List<Object>> brokerMsg = new HashMap<Broker, List<Object>>(); for (Broker broker : brokerRepo.list()) { brokerMsg.put(broker, new ArrayList<Object>()); } // walk through the pending transactions and run the updates for (BrokerTransaction tx : getPendingTransactionList()) { // need to refresh the transaction first if (tx.getBroker() == null) { log.error("tx " + tx.getClass().getName() + ":" + tx.getId() + " has null broker"); } if (brokerMsg.get(tx.getBroker()) == null) { log.error("tx " + tx.getClass().getName() + ":" + tx.getId() + " has unknown broker " + tx.getBroker().getUsername()); } brokerMsg.get(tx.getBroker()).add(tx); // process transactions by method lookup dispatch(this, "processTransaction", tx, brokerMsg.get(tx.getBroker())); } // handle the backed-up mkt transactions for this timeslot handleMarketTransactionsForTimeslot(timeslotRepo.currentTimeslot()); // for each broker, compute interest and send messages double rate = bankInterest / 365.0; for (Broker broker : brokerRepo.list()) { // run interest payments at midnight if (timeService.getHourOfDay() == 0) { double brokerRate = rate; double cash = broker.getCashBalance(); if (cash >= 0.0) { // rate on positive balance is 1/2 of negative brokerRate /= 2.0; } double interest = cash * brokerRate; brokerMsg.get(broker).add(txFactory.makeBankTransaction(broker, interest)); broker.updateCash(interest); } // add the cash position to the list and send messages brokerMsg.get(broker).add(txFactory.makeCashPosition(broker, broker.getCashBalance())); log.info("Broker {} balance = {}", broker.getUsername(), broker.getCashBalance()); log.info("Sending " + brokerMsg.get(broker).size() + " messages to " + broker.getUsername()); brokerProxyService.sendMessages(broker, brokerMsg.get(broker)); } // build and send the distribution report distributionReport = new DistributionReport(timeslotRepo.currentSerialNumber(), totalConsumption, totalProduction); brokerProxyService.broadcastMessage(distributionReport); } /** * Copies out the pending transaction list with concurrency protection, * clears the pending transaction list, and returns the copy. */ private synchronized List<BrokerTransaction> getPendingTransactionList () { ArrayList<BrokerTransaction> result = new ArrayList<BrokerTransaction>(pendingTransactions); pendingTransactions.clear(); return result; } /** * Processes a tariff transaction, updating the broker's cash position * and the consumption, production data in the distribution report. */ public void processTransaction(TariffTransaction tx, ArrayList<Object> messages) { //log.info("processing tariff tx " + tx.toString()); updateCash(tx.getBroker(), tx.getCharge()); // update the distribution report if (TariffTransaction.Type.CONSUME == tx.getTxType()) totalConsumption -= tx.getKWh(); else if (TariffTransaction.Type.PRODUCE == tx.getTxType()) totalProduction += tx.getKWh(); } /** * Processes a balancing transaction by updating the broker's cash position. */ public void processTransaction(BalancingTransaction tx, ArrayList<Object> messages) { updateCash(tx.getBroker(), tx.getCharge()); } /** * Processes a distribution transaction by updating the * broker's cash position. */ public void processTransaction(DistributionTransaction tx, ArrayList<Object> messages) { updateCash(tx.getBroker(), tx.getCharge()); } /** * Processes a capacity transaction by updating the broker's cash position. */ public void processTransaction (CapacityTransaction tx, ArrayList<Object> messages) { updateCash(tx.getBroker(), tx.getCharge()); } /** * Processes a market transaction by ensuring that the market position * will be sent to the broker. * Actual transaction posting is deferred to delivery time */ public void processTransaction(MarketTransaction tx, ArrayList<Object> messages) { MarketPosition mkt = tx.getBroker().findMarketPositionByTimeslot(tx.getTimeslotIndex()); if (!messages.contains(mkt)) messages.add(mkt); } /** * Processes deferred market transactions for the current timeslot * by updating the broker's cash position. */ public void handleMarketTransactionsForTimeslot(Timeslot ts) { ArrayList<MarketTransaction> pending = pendingMarketTransactions.get(ts); if (null == pending) return; for (MarketTransaction tx : pending) { Broker broker = tx.getBroker(); updateCash(broker, tx.getPrice() * Math.abs(tx.getMWh())); } } // pre-process a market transaction private void updateBrokerMarketPosition(MarketTransaction tx) { Broker broker = tx.getBroker(); MarketPosition mkt = broker.findMarketPositionByTimeslot(tx.getTimeslotIndex()); if (mkt == null) { mkt = new MarketPosition(broker, tx.getTimeslot(), tx.getMWh()); log.debug("New MarketPosition(" + broker.getUsername() + ", " + tx.getTimeslot().getSerialNumber() + "): " + mkt.getId()); broker.addMarketPosition(mkt, tx.getTimeslotIndex()); } else { mkt.updateBalance(tx.getMWh()); } } private void updateCash(Broker broker, double amount) { broker.updateCash(amount); } /** * Complains if a bank transaction is among the transactions to be * handled. These should be generated locally and sent directly to * brokers. */ public void processTransaction (BankTransaction tx, ArrayList<Object> messages) { log.error("tx {} calls processTransaction - should not happen", tx.toString()); } /** * Returns the current list of pending tariff transactions. This will be * non-empty only after the customer model has run and before accounting * has run in the current timeslot. */ @Override public synchronized List<TariffTransaction> getPendingTariffTransactions () { List<TariffTransaction> result = new ArrayList<TariffTransaction>(); for (BrokerTransaction tx : pendingTransactions) { if (tx instanceof TariffTransaction) result.add((TariffTransaction)tx); } return result; } // test support List<BrokerTransaction> getPendingTransactions () { return pendingTransactions; } /** * Returns the low end of the bank interest range. */ public double getMinInterest () { return minInterest; } /** * Returns the high end of the bank interest range. */ public double getMaxInterest () { return maxInterest; } /** * Returns the actual bank interest once configuration is complete. */ public Double getBankInterest () { return bankInterest; } // test support void setBankInterest (Double interest) { bankInterest = interest; } }
package com.justjames.beertour.user; import java.util.Collection; import javax.persistence.EntityNotFoundException; import javax.persistence.PersistenceException; import javax.transaction.Transactional; import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import com.justjames.beertour.InvalidPostDataException; import com.justjames.beertour.ResourceException; import com.justjames.beertour.beerlist.BeerListSvc; import com.justjames.beertour.security.ActiveUser; import com.justjames.beertour.security.LoginSvc; import com.justjames.beertour.security.NotAuthorizedException; import com.justjames.beertour.security.Role; import com.justjames.beertour.security.UserUtils; @Component public class UserSvc { private Log log = LogFactory.getLog(UserSvc.class); @Autowired UserRepository userRepo; @Autowired BeerListSvc listSvc; @Autowired LoginSvc loginSvc; public Collection<User> getAll() { if (!UserUtils.isAdmin()) { throw new NotAuthorizedException("Only Admin can list all users:" + UserUtils.getActiveUser()); } return userRepo.findAllByOrderByNameAsc(); } /** * @param id * @return */ public User getUser(Integer id) { User user = null; try { user = userRepo.findOne(id); } catch (EntityNotFoundException enf) { log.info("No user found for id=" + id); } return user; } /** * @param u * @return */ public User addUser(User u) { if (u.getNumListsCompleted() == null) { u.setNumListsCompleted(0); } validateUser(u); return userRepo.saveAndFlush(u); } /** * @param user * @return */ @Transactional public User selfUpdate(User user) { log.info("Updating user: " + user); ActiveUser loggedInUser = UserUtils.getActiveUser(); if (!UserUtils.isAdmin() && loggedInUser.getUserId() != user.getId() ) { throw new NotAuthorizedException("Only admin or user can update account."); } User currentUser = null; currentUser = userRepo.findByEmail(user.getEmail()); if (currentUser == null) { throw new InvalidPostDataException("User does not exist:" + user); } // These are the only attributes that the user can actually update currentUser.setName(user.getName()); currentUser.setPassword(user.getPassword()); currentUser.setNickName(user.getNickName()); User updatedUser = userRepo.saveAndFlush(currentUser); return updatedUser; } /** * @param editedUser * @return */ @Transactional public User adminEdit(Integer userId, UserEditTO editedUser) { log.info("adminEdit: " + editedUser); ActiveUser loggedInUser = UserUtils.getActiveUser(); if (!UserUtils.isAdmin() && loggedInUser.getUserId() != userId ) { throw new NotAuthorizedException("Only admin or user can update account."); } User currentUser = null; User updatedUser = null; try { currentUser = userRepo.getOne(userId); if (currentUser == null) { throw new InvalidPostDataException("User does not exist:" + editedUser); } if (currentUser.getRole() != editedUser.getRole()) { log.info(String.format("%s assigned to role '%s' by %s", currentUser.getName(), editedUser.getRole(), loggedInUser.getUserTO().getName())); } // These are the only attributes that the user can actually update currentUser.setRole(editedUser.getRole()); updatedUser = userRepo.saveAndFlush(currentUser); } catch (EntityNotFoundException enf) { throw new InvalidPostDataException("User not found"); } catch (PersistenceException pe) { throw new ResourceException("Error, can't update user"); } return updatedUser; } /** * @param finisher */ public void addToListFinished(User finisher) { User u = userRepo.getOne(finisher.getId()); Integer listsCompleted = u.getNumListsCompleted() + 1; u.setNumListsCompleted(listsCompleted); userRepo.saveAndFlush(u); } /** * @param newUser * @return */ @Transactional public User loginSignup(User newUser) { newUser.setRole(Role.CUSTOMER); log.info("Adding new user " + newUser); if (emailExists(newUser.getEmail())) { String msg = String.format("'%s' already exists",newUser.getEmail()); log.warn(msg); throw new UserExistsException(msg); } User savedUser = addUser(newUser); // Login their session loginSvc.login(savedUser.getEmail(),savedUser.getPassword()); return savedUser; } /** * @param email * @return */ public User findByEmail(String email) { User user = null; user = userRepo.findByEmail(email.trim()); return user; } public boolean emailExists(String email) { if (StringUtils.isEmpty(email)) { return false; } User user = findByEmail(email.trim()); return user != null ? true : false; } private boolean validateUser(User user) { if (user == null) { throw new InvalidPostDataException("User can't be null."); } if (StringUtils.isBlank(user.getEmail())) { throw new InvalidPostDataException("Email is required!"); } if (StringUtils.isBlank(user.getName())) { throw new InvalidPostDataException("Name is required!"); } if (StringUtils.isBlank(user.getPassword())) { throw new InvalidPostDataException("Password is required!"); } return true; } /** * Retrieves the login token for the given user * * @param email * @return current login token */ public User findByToken(String token) { User u = userRepo.findByToken(token); if (u == null) { log.warn("Token not found : [" + token + "]"); } return u; } /** * @param u */ @Transactional public void setNewToken(User u) { String token = RandomStringUtils.randomAlphanumeric(32); u.setToken(token); log.debug("Token assigned " + u); userRepo.save(u); } }
/* * Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * This file is available under and governed by the GNU General Public * License version 2 only, as published by the Free Software Foundation. * However, the following notice accompanied the original version of this * file: * * Copyright (c) 2008-2012, Stephen Colebourne & Michael Nascimento Santos * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the name of JSR-310 nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package test.java.time; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.time.DateTimeException; import java.time.Instant; import java.time.LocalDateTime; import java.time.ZoneId; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.time.format.TextStyle; import java.time.zone.ZoneOffsetTransition; import java.time.zone.ZoneRules; import java.time.zone.ZoneRulesException; import java.util.List; import java.util.Locale; import java.util.SimpleTimeZone; import java.util.TimeZone; import org.junit.Ignore; import org.junit.Test; /** * Test ZoneId. */ public class TestZoneId extends AbstractTest { private static final int OVERLAP = 2; private static final int GAP = 0; //----------------------------------------------------------------------- // Basics //----------------------------------------------------------------------- @Ignore("J2ObjC: requires reflection metadata.") @Test public void test_immutable() { // cannot use standard test as ZoneId is abstract Class<ZoneId> cls = ZoneId.class; assertTrue(Modifier.isPublic(cls.getModifiers())); Field[] fields = cls.getDeclaredFields(); for (Field field : fields) { if (Modifier.isStatic(field.getModifiers()) == false) { assertTrue(Modifier.isPrivate(field.getModifiers())); assertTrue(Modifier.isFinal(field.getModifiers()) || (Modifier.isVolatile(field.getModifiers()) && Modifier.isTransient(field.getModifiers()))); } } } //----------------------------------------------------------------------- // UTC //----------------------------------------------------------------------- @Test public void test_constant_UTC() { ZoneId test = ZoneOffset.UTC; assertEquals(test.getId(), "Z"); assertEquals(test.getDisplayName(TextStyle.FULL, Locale.UK), "Z"); assertEquals(test.getRules().isFixedOffset(), true); assertEquals(test.getRules().getOffset(Instant.ofEpochSecond(0L)), ZoneOffset.UTC); checkOffset(test.getRules(), createLDT(2008, 6, 30), ZoneOffset.UTC, 1); } //----------------------------------------------------------------------- // system default //----------------------------------------------------------------------- @Test public void test_systemDefault() { ZoneId test = ZoneId.systemDefault(); assertEquals(test.getId(), TimeZone.getDefault().getID()); } @Test(expected = DateTimeException.class) public void test_systemDefault_unableToConvert_badFormat() { TimeZone current = TimeZone.getDefault(); try { TimeZone.setDefault(new SimpleTimeZone(127, "Something Weird")); ZoneId.systemDefault(); } finally { TimeZone.setDefault(current); } } @Test(expected = ZoneRulesException.class) public void test_systemDefault_unableToConvert_unknownId() { TimeZone current = TimeZone.getDefault(); try { TimeZone.setDefault(new SimpleTimeZone(127, "SomethingWeird")); ZoneId.systemDefault(); } finally { TimeZone.setDefault(current); } } //----------------------------------------------------------------------- // Europe/London //----------------------------------------------------------------------- @Test public void test_London() { ZoneId test = ZoneId.of("Europe/London"); assertEquals(test.getId(), "Europe/London"); assertEquals(test.getRules().isFixedOffset(), false); } @Test public void test_London_getOffset() { ZoneId test = ZoneId.of("Europe/London"); assertEquals(test.getRules().getOffset(createInstant(2008, 1, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 2, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 4, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 5, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 6, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 7, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 8, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 9, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 11, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 12, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); } @Test public void test_London_getOffset_toDST() { ZoneId test = ZoneId.of("Europe/London"); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 24, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 25, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 26, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 27, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 28, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 29, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 30, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 31, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); // cutover at 01:00Z assertEquals(test.getRules().getOffset(createInstant(2008, 3, 30, 0, 59, 59, 999999999, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 30, 1, 0, 0, 0, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); } @Test public void test_London_getOffset_fromDST() { ZoneId test = ZoneId.of("Europe/London"); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 24, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 25, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 26, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 27, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 28, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 29, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 30, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 31, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); // cutover at 01:00Z assertEquals(test.getRules().getOffset(createInstant(2008, 10, 26, 0, 59, 59, 999999999, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 26, 1, 0, 0, 0, ZoneOffset.UTC)), ZoneOffset.ofHours(0)); } @Test public void test_London_getOffsetInfo() { ZoneId test = ZoneId.of("Europe/London"); checkOffset(test.getRules(), createLDT(2008, 1, 1), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 2, 1), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 3, 1), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 4, 1), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 5, 1), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 6, 1), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 7, 1), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 8, 1), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 9, 1), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 10, 1), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 11, 1), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 12, 1), ZoneOffset.ofHours(0), 1); } @Test public void test_London_getOffsetInfo_toDST() { ZoneId test = ZoneId.of("Europe/London"); checkOffset(test.getRules(), createLDT(2008, 3, 24), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 3, 25), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 3, 26), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 3, 27), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 3, 28), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 3, 29), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 3, 30), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 3, 31), ZoneOffset.ofHours(1), 1); // cutover at 01:00Z checkOffset(test.getRules(), LocalDateTime.of(2008, 3, 30, 0, 59, 59, 999999999), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), LocalDateTime.of(2008, 3, 30, 1, 30, 0, 0), ZoneOffset.ofHours(0), GAP); checkOffset(test.getRules(), LocalDateTime.of(2008, 3, 30, 2, 0, 0, 0), ZoneOffset.ofHours(1), 1); } @Test public void test_London_getOffsetInfo_fromDST() { ZoneId test = ZoneId.of("Europe/London"); checkOffset(test.getRules(), createLDT(2008, 10, 24), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 10, 25), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 10, 26), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 10, 27), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 10, 28), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 10, 29), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 10, 30), ZoneOffset.ofHours(0), 1); checkOffset(test.getRules(), createLDT(2008, 10, 31), ZoneOffset.ofHours(0), 1); // cutover at 01:00Z checkOffset(test.getRules(), LocalDateTime.of(2008, 10, 26, 0, 59, 59, 999999999), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), LocalDateTime.of(2008, 10, 26, 1, 30, 0, 0), ZoneOffset.ofHours(1), OVERLAP); checkOffset(test.getRules(), LocalDateTime.of(2008, 10, 26, 2, 0, 0, 0), ZoneOffset.ofHours(0), 1); } @Test public void test_London_getOffsetInfo_gap() { ZoneId test = ZoneId.of("Europe/London"); final LocalDateTime dateTime = LocalDateTime.of(2008, 3, 30, 1, 0, 0, 0); ZoneOffsetTransition trans = checkOffset(test.getRules(), dateTime, ZoneOffset.ofHours(0), GAP); assertEquals(trans.isGap(), true); assertEquals(trans.isOverlap(), false); assertEquals(trans.getOffsetBefore(), ZoneOffset.ofHours(0)); assertEquals(trans.getOffsetAfter(), ZoneOffset.ofHours(1)); assertEquals(trans.getInstant(), dateTime.toInstant(ZoneOffset.UTC)); assertEquals(trans.getDateTimeBefore(), LocalDateTime.of(2008, 3, 30, 1, 0)); assertEquals(trans.getDateTimeAfter(), LocalDateTime.of(2008, 3, 30, 2, 0)); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(-1)), false); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(0)), false); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(1)), false); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(2)), false); assertEquals(trans.toString(), "Transition[Gap at 2008-03-30T01:00Z to +01:00]"); assertFalse(trans.equals(null)); assertFalse(trans.equals(ZoneOffset.ofHours(0))); assertTrue(trans.equals(trans)); final ZoneOffsetTransition otherTrans = test.getRules().getTransition(dateTime); assertTrue(trans.equals(otherTrans)); assertEquals(trans.hashCode(), otherTrans.hashCode()); } @Test public void test_London_getOffsetInfo_overlap() { ZoneId test = ZoneId.of("Europe/London"); final LocalDateTime dateTime = LocalDateTime.of(2008, 10, 26, 1, 0, 0, 0); ZoneOffsetTransition trans = checkOffset(test.getRules(), dateTime, ZoneOffset.ofHours(1), OVERLAP); assertEquals(trans.isGap(), false); assertEquals(trans.isOverlap(), true); assertEquals(trans.getOffsetBefore(), ZoneOffset.ofHours(1)); assertEquals(trans.getOffsetAfter(), ZoneOffset.ofHours(0)); assertEquals(trans.getInstant(), dateTime.toInstant(ZoneOffset.UTC)); assertEquals(trans.getDateTimeBefore(), LocalDateTime.of(2008, 10, 26, 2, 0)); assertEquals(trans.getDateTimeAfter(), LocalDateTime.of(2008, 10, 26, 1, 0)); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(-1)), false); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(0)), true); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(1)), true); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(2)), false); assertEquals(trans.toString(), "Transition[Overlap at 2008-10-26T02:00+01:00 to Z]"); assertFalse(trans.equals(null)); assertFalse(trans.equals(ZoneOffset.ofHours(1))); assertTrue(trans.equals(trans)); final ZoneOffsetTransition otherTrans = test.getRules().getTransition(dateTime); assertTrue(trans.equals(otherTrans)); assertEquals(trans.hashCode(), otherTrans.hashCode()); } //----------------------------------------------------------------------- // Europe/Paris //----------------------------------------------------------------------- @Test public void test_Paris() { ZoneId test = ZoneId.of("Europe/Paris"); assertEquals(test.getId(), "Europe/Paris"); assertEquals(test.getRules().isFixedOffset(), false); } @Test public void test_Paris_getOffset() { ZoneId test = ZoneId.of("Europe/Paris"); assertEquals(test.getRules().getOffset(createInstant(2008, 1, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 2, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 4, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); assertEquals(test.getRules().getOffset(createInstant(2008, 5, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); assertEquals(test.getRules().getOffset(createInstant(2008, 6, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); assertEquals(test.getRules().getOffset(createInstant(2008, 7, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); assertEquals(test.getRules().getOffset(createInstant(2008, 8, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); assertEquals(test.getRules().getOffset(createInstant(2008, 9, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); assertEquals(test.getRules().getOffset(createInstant(2008, 11, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 12, 1, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); } @Test public void test_Paris_getOffset_toDST() { ZoneId test = ZoneId.of("Europe/Paris"); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 24, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 25, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 26, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 27, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 28, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 29, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 30, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 31, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); // cutover at 01:00Z assertEquals(test.getRules().getOffset(createInstant(2008, 3, 30, 0, 59, 59, 999999999, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 30, 1, 0, 0, 0, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); } @Test public void test_Paris_getOffset_fromDST() { ZoneId test = ZoneId.of("Europe/Paris"); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 24, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 25, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 26, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 27, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 28, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 29, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 30, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 31, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); // cutover at 01:00Z assertEquals(test.getRules().getOffset(createInstant(2008, 10, 26, 0, 59, 59, 999999999, ZoneOffset.UTC)), ZoneOffset.ofHours(2)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 26, 1, 0, 0, 0, ZoneOffset.UTC)), ZoneOffset.ofHours(1)); } @Test public void test_Paris_getOffsetInfo() { ZoneId test = ZoneId.of("Europe/Paris"); checkOffset(test.getRules(), createLDT(2008, 1, 1), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 2, 1), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 3, 1), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 4, 1), ZoneOffset.ofHours(2), 1); checkOffset(test.getRules(), createLDT(2008, 5, 1), ZoneOffset.ofHours(2), 1); checkOffset(test.getRules(), createLDT(2008, 6, 1), ZoneOffset.ofHours(2), 1); checkOffset(test.getRules(), createLDT(2008, 7, 1), ZoneOffset.ofHours(2), 1); checkOffset(test.getRules(), createLDT(2008, 8, 1), ZoneOffset.ofHours(2), 1); checkOffset(test.getRules(), createLDT(2008, 9, 1), ZoneOffset.ofHours(2), 1); checkOffset(test.getRules(), createLDT(2008, 10, 1), ZoneOffset.ofHours(2), 1); checkOffset(test.getRules(), createLDT(2008, 11, 1), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 12, 1), ZoneOffset.ofHours(1), 1); } @Test public void test_Paris_getOffsetInfo_toDST() { ZoneId test = ZoneId.of("Europe/Paris"); checkOffset(test.getRules(), createLDT(2008, 3, 24), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 3, 25), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 3, 26), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 3, 27), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 3, 28), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 3, 29), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 3, 30), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 3, 31), ZoneOffset.ofHours(2), 1); // cutover at 01:00Z which is 02:00+01:00(local Paris time) checkOffset(test.getRules(), LocalDateTime.of(2008, 3, 30, 1, 59, 59, 999999999), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), LocalDateTime.of(2008, 3, 30, 2, 30, 0, 0), ZoneOffset.ofHours(1), GAP); checkOffset(test.getRules(), LocalDateTime.of(2008, 3, 30, 3, 0, 0, 0), ZoneOffset.ofHours(2), 1); } @Test public void test_Paris_getOffsetInfo_fromDST() { ZoneId test = ZoneId.of("Europe/Paris"); checkOffset(test.getRules(), createLDT(2008, 10, 24), ZoneOffset.ofHours(2), 1); checkOffset(test.getRules(), createLDT(2008, 10, 25), ZoneOffset.ofHours(2), 1); checkOffset(test.getRules(), createLDT(2008, 10, 26), ZoneOffset.ofHours(2), 1); checkOffset(test.getRules(), createLDT(2008, 10, 27), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 10, 28), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 10, 29), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 10, 30), ZoneOffset.ofHours(1), 1); checkOffset(test.getRules(), createLDT(2008, 10, 31), ZoneOffset.ofHours(1), 1); // cutover at 01:00Z which is 02:00+01:00(local Paris time) checkOffset(test.getRules(), LocalDateTime.of(2008, 10, 26, 1, 59, 59, 999999999), ZoneOffset.ofHours(2), 1); checkOffset(test.getRules(), LocalDateTime.of(2008, 10, 26, 2, 30, 0, 0), ZoneOffset.ofHours(2), OVERLAP); checkOffset(test.getRules(), LocalDateTime.of(2008, 10, 26, 3, 0, 0, 0), ZoneOffset.ofHours(1), 1); } @Test public void test_Paris_getOffsetInfo_gap() { ZoneId test = ZoneId.of("Europe/Paris"); final LocalDateTime dateTime = LocalDateTime.of(2008, 3, 30, 2, 0, 0, 0); ZoneOffsetTransition trans = checkOffset(test.getRules(), dateTime, ZoneOffset.ofHours(1), GAP); assertEquals(trans.isGap(), true); assertEquals(trans.isOverlap(), false); assertEquals(trans.getOffsetBefore(), ZoneOffset.ofHours(1)); assertEquals(trans.getOffsetAfter(), ZoneOffset.ofHours(2)); assertEquals(trans.getInstant(), createInstant(2008, 3, 30, 1, 0, 0, 0, ZoneOffset.UTC)); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(0)), false); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(1)), false); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(2)), false); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(3)), false); assertEquals(trans.toString(), "Transition[Gap at 2008-03-30T02:00+01:00 to +02:00]"); assertFalse(trans.equals(null)); assertFalse(trans.equals(ZoneOffset.ofHours(1))); assertTrue(trans.equals(trans)); final ZoneOffsetTransition otherDis = test.getRules().getTransition(dateTime); assertTrue(trans.equals(otherDis)); assertEquals(trans.hashCode(), otherDis.hashCode()); } @Test public void test_Paris_getOffsetInfo_overlap() { ZoneId test = ZoneId.of("Europe/Paris"); final LocalDateTime dateTime = LocalDateTime.of(2008, 10, 26, 2, 0, 0, 0); ZoneOffsetTransition trans = checkOffset(test.getRules(), dateTime, ZoneOffset.ofHours(2), OVERLAP); assertEquals(trans.isGap(), false); assertEquals(trans.isOverlap(), true); assertEquals(trans.getOffsetBefore(), ZoneOffset.ofHours(2)); assertEquals(trans.getOffsetAfter(), ZoneOffset.ofHours(1)); assertEquals(trans.getInstant(), createInstant(2008, 10, 26, 1, 0, 0, 0, ZoneOffset.UTC)); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(0)), false); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(1)), true); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(2)), true); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(3)), false); assertEquals(trans.toString(), "Transition[Overlap at 2008-10-26T03:00+02:00 to +01:00]"); assertFalse(trans.equals(null)); assertFalse(trans.equals(ZoneOffset.ofHours(2))); assertTrue(trans.equals(trans)); final ZoneOffsetTransition otherDis = test.getRules().getTransition(dateTime); assertTrue(trans.equals(otherDis)); assertEquals(trans.hashCode(), otherDis.hashCode()); } //----------------------------------------------------------------------- // America/New_York //----------------------------------------------------------------------- @Test public void test_NewYork() { ZoneId test = ZoneId.of("America/New_York"); assertEquals(test.getId(), "America/New_York"); assertEquals(test.getRules().isFixedOffset(), false); } @Test public void test_NewYork_getOffset() { ZoneId test = ZoneId.of("America/New_York"); ZoneOffset offset = ZoneOffset.ofHours(-5); assertEquals(test.getRules().getOffset(createInstant(2008, 1, 1, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 2, 1, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 1, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 4, 1, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 5, 1, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 6, 1, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 7, 1, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 8, 1, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 9, 1, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 1, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 11, 1, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 12, 1, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 1, 28, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 2, 28, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 28, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 4, 28, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 5, 28, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 6, 28, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 7, 28, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 8, 28, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 9, 28, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 10, 28, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 11, 28, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 12, 28, offset)), ZoneOffset.ofHours(-5)); } @Test public void test_NewYork_getOffset_toDST() { ZoneId test = ZoneId.of("America/New_York"); ZoneOffset offset = ZoneOffset.ofHours(-5); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 8, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 9, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 10, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 11, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 12, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 13, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 14, offset)), ZoneOffset.ofHours(-4)); // cutover at 02:00 local assertEquals(test.getRules().getOffset(createInstant(2008, 3, 9, 1, 59, 59, 999999999, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 3, 9, 2, 0, 0, 0, offset)), ZoneOffset.ofHours(-4)); } @Test public void test_NewYork_getOffset_fromDST() { ZoneId test = ZoneId.of("America/New_York"); ZoneOffset offset = ZoneOffset.ofHours(-4); assertEquals(test.getRules().getOffset(createInstant(2008, 11, 1, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 11, 2, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 11, 3, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 11, 4, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 11, 5, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 11, 6, offset)), ZoneOffset.ofHours(-5)); assertEquals(test.getRules().getOffset(createInstant(2008, 11, 7, offset)), ZoneOffset.ofHours(-5)); // cutover at 02:00 local assertEquals(test.getRules().getOffset(createInstant(2008, 11, 2, 1, 59, 59, 999999999, offset)), ZoneOffset.ofHours(-4)); assertEquals(test.getRules().getOffset(createInstant(2008, 11, 2, 2, 0, 0, 0, offset)), ZoneOffset.ofHours(-5)); } @Test public void test_NewYork_getOffsetInfo() { ZoneId test = ZoneId.of("America/New_York"); checkOffset(test.getRules(), createLDT(2008, 1, 1), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 2, 1), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 3, 1), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 4, 1), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 5, 1), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 6, 1), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 7, 1), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 8, 1), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 9, 1), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 10, 1), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 11, 1), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 12, 1), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 1, 28), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 2, 28), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 3, 28), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 4, 28), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 5, 28), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 6, 28), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 7, 28), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 8, 28), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 9, 28), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 10, 28), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 11, 28), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 12, 28), ZoneOffset.ofHours(-5), 1); } @Test public void test_NewYork_getOffsetInfo_toDST() { ZoneId test = ZoneId.of("America/New_York"); checkOffset(test.getRules(), createLDT(2008, 3, 8), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 3, 9), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 3, 10), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 3, 11), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 3, 12), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 3, 13), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 3, 14), ZoneOffset.ofHours(-4), 1); // cutover at 02:00 local checkOffset(test.getRules(), LocalDateTime.of(2008, 3, 9, 1, 59, 59, 999999999), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), LocalDateTime.of(2008, 3, 9, 2, 30, 0, 0), ZoneOffset.ofHours(-5), GAP); checkOffset(test.getRules(), LocalDateTime.of(2008, 3, 9, 3, 0, 0, 0), ZoneOffset.ofHours(-4), 1); } @Test public void test_NewYork_getOffsetInfo_fromDST() { ZoneId test = ZoneId.of("America/New_York"); checkOffset(test.getRules(), createLDT(2008, 11, 1), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 11, 2), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), createLDT(2008, 11, 3), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 11, 4), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 11, 5), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 11, 6), ZoneOffset.ofHours(-5), 1); checkOffset(test.getRules(), createLDT(2008, 11, 7), ZoneOffset.ofHours(-5), 1); // cutover at 02:00 local checkOffset(test.getRules(), LocalDateTime.of(2008, 11, 2, 0, 59, 59, 999999999), ZoneOffset.ofHours(-4), 1); checkOffset(test.getRules(), LocalDateTime.of(2008, 11, 2, 1, 30, 0, 0), ZoneOffset.ofHours(-4), OVERLAP); checkOffset(test.getRules(), LocalDateTime.of(2008, 11, 2, 2, 0, 0, 0), ZoneOffset.ofHours(-5), 1); } @Test public void test_NewYork_getOffsetInfo_gap() { ZoneId test = ZoneId.of("America/New_York"); final LocalDateTime dateTime = LocalDateTime.of(2008, 3, 9, 2, 0, 0, 0); ZoneOffsetTransition trans = checkOffset(test.getRules(), dateTime, ZoneOffset.ofHours(-5), GAP); assertEquals(trans.getOffsetBefore(), ZoneOffset.ofHours(-5)); assertEquals(trans.getOffsetAfter(), ZoneOffset.ofHours(-4)); assertEquals(trans.getInstant(), createInstant(2008, 3, 9, 2, 0, 0, 0, ZoneOffset.ofHours(-5))); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(-6)), false); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(-5)), false); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(-4)), false); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(-3)), false); assertEquals(trans.toString(), "Transition[Gap at 2008-03-09T02:00-05:00 to -04:00]"); assertFalse(trans.equals(null)); assertFalse(trans.equals(ZoneOffset.ofHours(-5))); assertTrue(trans.equals(trans)); final ZoneOffsetTransition otherTrans = test.getRules().getTransition(dateTime); assertTrue(trans.equals(otherTrans)); assertEquals(trans.hashCode(), otherTrans.hashCode()); } @Test public void test_NewYork_getOffsetInfo_overlap() { ZoneId test = ZoneId.of("America/New_York"); final LocalDateTime dateTime = LocalDateTime.of(2008, 11, 2, 1, 0, 0, 0); ZoneOffsetTransition trans = checkOffset(test.getRules(), dateTime, ZoneOffset.ofHours(-4), OVERLAP); assertEquals(trans.getOffsetBefore(), ZoneOffset.ofHours(-4)); assertEquals(trans.getOffsetAfter(), ZoneOffset.ofHours(-5)); assertEquals(trans.getInstant(), createInstant(2008, 11, 2, 2, 0, 0, 0, ZoneOffset.ofHours(-4))); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(-1)), false); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(-5)), true); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(-4)), true); assertEquals(trans.isValidOffset(ZoneOffset.ofHours(2)), false); assertEquals(trans.toString(), "Transition[Overlap at 2008-11-02T02:00-04:00 to -05:00]"); assertFalse(trans.equals(null)); assertFalse(trans.equals(ZoneOffset.ofHours(-4))); assertTrue(trans.equals(trans)); final ZoneOffsetTransition otherTrans = test.getRules().getTransition(dateTime); assertTrue(trans.equals(otherTrans)); assertEquals(trans.hashCode(), otherTrans.hashCode()); } //----------------------------------------------------------------------- // getXxx() isXxx() //----------------------------------------------------------------------- @Test public void test_get_Tzdb() { ZoneId test = ZoneId.of("Europe/London"); assertEquals(test.getId(), "Europe/London"); assertEquals(test.getRules().isFixedOffset(), false); } @Test public void test_get_TzdbFixed() { ZoneId test = ZoneId.of("+01:30"); assertEquals(test.getId(), "+01:30"); assertEquals(test.getRules().isFixedOffset(), true); } //----------------------------------------------------------------------- //----------------------------------------------------------------------- //----------------------------------------------------------------------- private Instant createInstant(int year, int month, int day, ZoneOffset offset) { return LocalDateTime.of(year, month, day, 0, 0).toInstant(offset); } private Instant createInstant(int year, int month, int day, int hour, int min, int sec, int nano, ZoneOffset offset) { return LocalDateTime.of(year, month, day, hour, min, sec, nano).toInstant(offset); } private ZonedDateTime createZDT(int year, int month, int day, int hour, int min, int sec, int nano, ZoneId zone) { return LocalDateTime.of(year, month, day, hour, min, sec, nano).atZone(zone); } private LocalDateTime createLDT(int year, int month, int day) { return LocalDateTime.of(year, month, day, 0, 0); } private ZoneOffsetTransition checkOffset(ZoneRules rules, LocalDateTime dateTime, ZoneOffset offset, int type) { List<ZoneOffset> validOffsets = rules.getValidOffsets(dateTime); assertEquals(validOffsets.size(), type); assertEquals(rules.getOffset(dateTime), offset); if (type == 1) { assertEquals(validOffsets.get(0), offset); return null; } else { ZoneOffsetTransition zot = rules.getTransition(dateTime); assertNotNull(zot); assertEquals(zot.isOverlap(), type == 2); assertEquals(zot.isGap(), type == 0); assertEquals(zot.isValidOffset(offset), type == 2); return zot; } } }
package org.apache.maven.model.merge; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.maven.model.BuildBase; import org.apache.maven.model.CiManagement; import org.apache.maven.model.Dependency; import org.apache.maven.model.DeploymentRepository; import org.apache.maven.model.DistributionManagement; import org.apache.maven.model.Exclusion; import org.apache.maven.model.Extension; import org.apache.maven.model.InputLocation; import org.apache.maven.model.IssueManagement; import org.apache.maven.model.Model; import org.apache.maven.model.ModelBase; import org.apache.maven.model.Organization; import org.apache.maven.model.Plugin; import org.apache.maven.model.PluginExecution; import org.apache.maven.model.ReportPlugin; import org.apache.maven.model.ReportSet; import org.apache.maven.model.Repository; import org.apache.maven.model.RepositoryBase; import org.apache.maven.model.Scm; import org.apache.maven.model.Site; /** * The domain-specific model merger for the Maven POM, overriding generic code from parent class when necessary with * more adapted algorithms. * * @author Benjamin Bentmann */ public class MavenModelMerger extends ModelMerger { /** * The hint key for the child path adjustment used during inheritance for URL calculations. */ public static final String CHILD_PATH_ADJUSTMENT = "child-path-adjustment"; /** * The context key for the artifact id of the target model. */ public static final String ARTIFACT_ID = "artifact-id"; @Override protected void mergeModel( Model target, Model source, boolean sourceDominant, Map<Object, Object> context ) { context.put( ARTIFACT_ID, target.getArtifactId() ); super.mergeModel( target, source, sourceDominant, context ); } @Override protected void mergeModel_Name( Model target, Model source, boolean sourceDominant, Map<Object, Object> context ) { String src = source.getName(); if ( src != null ) { if ( sourceDominant ) { target.setName( src ); target.setLocation( "name", source.getLocation( "name" ) ); } } } @Override protected void mergeModel_Url( Model target, Model source, boolean sourceDominant, Map<Object, Object> context ) { String src = source.getUrl(); if ( src != null ) { if ( sourceDominant ) { target.setUrl( src ); target.setLocation( "url", source.getLocation( "url" ) ); } else if ( target.getUrl() == null ) { target.setUrl( extrapolateChildUrl( src, context ) ); target.setLocation( "url", source.getLocation( "url" ) ); } } } /* * TODO Whether the merge continues recursively into an existing node or not could be an option for the generated * merger */ @Override protected void mergeModel_Organization( Model target, Model source, boolean sourceDominant, Map<Object, Object> context ) { Organization src = source.getOrganization(); if ( src != null ) { Organization tgt = target.getOrganization(); if ( tgt == null ) { tgt = new Organization(); tgt.setLocation( "", src.getLocation( "" ) ); target.setOrganization( tgt ); mergeOrganization( tgt, src, sourceDominant, context ); } } } @Override protected void mergeModel_IssueManagement( Model target, Model source, boolean sourceDominant, Map<Object, Object> context ) { IssueManagement src = source.getIssueManagement(); if ( src != null ) { IssueManagement tgt = target.getIssueManagement(); if ( tgt == null ) { tgt = new IssueManagement(); tgt.setLocation( "", src.getLocation( "" ) ); target.setIssueManagement( tgt ); mergeIssueManagement( tgt, src, sourceDominant, context ); } } } @Override protected void mergeModel_CiManagement( Model target, Model source, boolean sourceDominant, Map<Object, Object> context ) { CiManagement src = source.getCiManagement(); if ( src != null ) { CiManagement tgt = target.getCiManagement(); if ( tgt == null ) { tgt = new CiManagement(); tgt.setLocation( "", src.getLocation( "" ) ); target.setCiManagement( tgt ); mergeCiManagement( tgt, src, sourceDominant, context ); } } } @Override protected void mergeModel_ModelVersion( Model target, Model source, boolean sourceDominant, Map<Object, Object> context ) { // neither inherited nor injected } @Override protected void mergeModel_ArtifactId( Model target, Model source, boolean sourceDominant, Map<Object, Object> context ) { // neither inherited nor injected } @Override protected void mergeModel_Profiles( Model target, Model source, boolean sourceDominant, Map<Object, Object> context ) { // neither inherited nor injected } @Override protected void mergeModel_Prerequisites( Model target, Model source, boolean sourceDominant, Map<Object, Object> context ) { // neither inherited nor injected } @Override protected void mergeModel_Licenses( Model target, Model source, boolean sourceDominant, Map<Object, Object> context ) { if ( target.getLicenses().isEmpty() ) { target.setLicenses( new ArrayList<>( source.getLicenses() ) ); } } @Override protected void mergeModel_Developers( Model target, Model source, boolean sourceDominant, Map<Object, Object> context ) { if ( target.getDevelopers().isEmpty() ) { target.setDevelopers( new ArrayList<>( source.getDevelopers() ) ); } } @Override protected void mergeModel_Contributors( Model target, Model source, boolean sourceDominant, Map<Object, Object> context ) { if ( target.getContributors().isEmpty() ) { target.setContributors( new ArrayList<>( source.getContributors() ) ); } } @Override protected void mergeModel_MailingLists( Model target, Model source, boolean sourceDominant, Map<Object, Object> context ) { if ( target.getMailingLists().isEmpty() ) { target.setMailingLists( new ArrayList<>( source.getMailingLists() ) ); } } @Override protected void mergeModelBase_Modules( ModelBase target, ModelBase source, boolean sourceDominant, Map<Object, Object> context ) { List<String> src = source.getModules(); if ( !src.isEmpty() && sourceDominant ) { List<Integer> indices = new ArrayList<>(); List<String> tgt = target.getModules(); Set<String> excludes = new LinkedHashSet<>( tgt ); List<String> merged = new ArrayList<>( tgt.size() + src.size() ); merged.addAll( tgt ); for ( int i = 0, n = tgt.size(); i < n; i++ ) { indices.add( i ); } for ( int i = 0, n = src.size(); i < n; i++ ) { String s = src.get( i ); if ( !excludes.contains( s ) ) { merged.add( s ); indices.add( ~i ); } } target.setModules( merged ); target.setLocation( "modules", InputLocation.merge( target.getLocation( "modules" ), source.getLocation( "modules" ), indices ) ); } } /* * TODO The order of the merged list could be controlled by an attribute in the model association: target-first, * source-first, dominant-first, recessive-first */ @Override protected void mergeModelBase_Repositories( ModelBase target, ModelBase source, boolean sourceDominant, Map<Object, Object> context ) { List<Repository> src = source.getRepositories(); if ( !src.isEmpty() ) { List<Repository> tgt = target.getRepositories(); Map<Object, Repository> merged = new LinkedHashMap<>( ( src.size() + tgt.size() ) * 2 ); List<Repository> dominant, recessive; if ( sourceDominant ) { dominant = src; recessive = tgt; } else { dominant = tgt; recessive = src; } for ( Repository element : dominant ) { Object key = getRepositoryKey( element ); merged.put( key, element ); } for ( Repository element : recessive ) { Object key = getRepositoryKey( element ); if ( !merged.containsKey( key ) ) { merged.put( key, element ); } } target.setRepositories( new ArrayList<>( merged.values() ) ); } } @Override protected void mergeModelBase_PluginRepositories( ModelBase target, ModelBase source, boolean sourceDominant, Map<Object, Object> context ) { List<Repository> src = source.getPluginRepositories(); if ( !src.isEmpty() ) { List<Repository> tgt = target.getPluginRepositories(); Map<Object, Repository> merged = new LinkedHashMap<>( ( src.size() + tgt.size() ) * 2 ); List<Repository> dominant, recessive; if ( sourceDominant ) { dominant = src; recessive = tgt; } else { dominant = tgt; recessive = src; } for ( Repository element : dominant ) { Object key = getRepositoryKey( element ); merged.put( key, element ); } for ( Repository element : recessive ) { Object key = getRepositoryKey( element ); if ( !merged.containsKey( key ) ) { merged.put( key, element ); } } target.setPluginRepositories( new ArrayList<>( merged.values() ) ); } } /* * TODO Whether duplicates should be removed looks like an option for the generated merger. */ @Override protected void mergeBuildBase_Filters( BuildBase target, BuildBase source, boolean sourceDominant, Map<Object, Object> context ) { List<String> src = source.getFilters(); if ( !src.isEmpty() ) { List<String> tgt = target.getFilters(); Set<String> excludes = new LinkedHashSet<>( tgt ); List<String> merged = new ArrayList<>( tgt.size() + src.size() ); merged.addAll( tgt ); for ( String s : src ) { if ( !excludes.contains( s ) ) { merged.add( s ); } } target.setFilters( merged ); } } @Override protected void mergeBuildBase_Resources( BuildBase target, BuildBase source, boolean sourceDominant, Map<Object, Object> context ) { if ( sourceDominant || target.getResources().isEmpty() ) { super.mergeBuildBase_Resources( target, source, sourceDominant, context ); } } @Override protected void mergeBuildBase_TestResources( BuildBase target, BuildBase source, boolean sourceDominant, Map<Object, Object> context ) { if ( sourceDominant || target.getTestResources().isEmpty() ) { super.mergeBuildBase_TestResources( target, source, sourceDominant, context ); } } @Override protected void mergeDistributionManagement_Repository( DistributionManagement target, DistributionManagement source, boolean sourceDominant, Map<Object, Object> context ) { DeploymentRepository src = source.getRepository(); if ( src != null ) { DeploymentRepository tgt = target.getRepository(); if ( sourceDominant || tgt == null ) { tgt = new DeploymentRepository(); tgt.setLocation( "", src.getLocation( "" ) ); target.setRepository( tgt ); mergeDeploymentRepository( tgt, src, sourceDominant, context ); } } } @Override protected void mergeDistributionManagement_SnapshotRepository( DistributionManagement target, DistributionManagement source, boolean sourceDominant, Map<Object, Object> context ) { DeploymentRepository src = source.getSnapshotRepository(); if ( src != null ) { DeploymentRepository tgt = target.getSnapshotRepository(); if ( sourceDominant || tgt == null ) { tgt = new DeploymentRepository(); tgt.setLocation( "", src.getLocation( "" ) ); target.setSnapshotRepository( tgt ); mergeDeploymentRepository( tgt, src, sourceDominant, context ); } } } @Override protected void mergeDistributionManagement_Site( DistributionManagement target, DistributionManagement source, boolean sourceDominant, Map<Object, Object> context ) { Site src = source.getSite(); if ( src != null ) { Site tgt = target.getSite(); if ( sourceDominant || tgt == null ) { tgt = new Site(); tgt.setLocation( "", src.getLocation( "" ) ); target.setSite( tgt ); mergeSite( tgt, src, sourceDominant, context ); } } } @Override protected void mergeSite_Url( Site target, Site source, boolean sourceDominant, Map<Object, Object> context ) { String src = source.getUrl(); if ( src != null ) { if ( sourceDominant ) { target.setUrl( src ); target.setLocation( "url", source.getLocation( "url" ) ); } else if ( target.getUrl() == null ) { target.setUrl( extrapolateChildUrl( src, context ) ); target.setLocation( "url", source.getLocation( "url" ) ); } } } @Override protected void mergeScm_Url( Scm target, Scm source, boolean sourceDominant, Map<Object, Object> context ) { String src = source.getUrl(); if ( src != null ) { if ( sourceDominant ) { target.setUrl( src ); target.setLocation( "url", source.getLocation( "url" ) ); } else if ( target.getUrl() == null ) { target.setUrl( extrapolateChildUrl( src, context ) ); target.setLocation( "url", source.getLocation( "url" ) ); } } } @Override protected void mergeScm_Connection( Scm target, Scm source, boolean sourceDominant, Map<Object, Object> context ) { String src = source.getConnection(); if ( src != null ) { if ( sourceDominant ) { target.setConnection( src ); target.setLocation( "connection", source.getLocation( "connection" ) ); } else if ( target.getConnection() == null ) { target.setConnection( extrapolateChildUrl( src, context ) ); target.setLocation( "connection", source.getLocation( "connection" ) ); } } } @Override protected void mergeScm_DeveloperConnection( Scm target, Scm source, boolean sourceDominant, Map<Object, Object> context ) { String src = source.getDeveloperConnection(); if ( src != null ) { if ( sourceDominant ) { target.setDeveloperConnection( src ); target.setLocation( "developerConnection", source.getLocation( "developerConnection" ) ); } else if ( target.getDeveloperConnection() == null ) { target.setDeveloperConnection( extrapolateChildUrl( src, context ) ); target.setLocation( "developerConnection", source.getLocation( "developerConnection" ) ); } } } @Override protected void mergePlugin_Executions( Plugin target, Plugin source, boolean sourceDominant, Map<Object, Object> context ) { List<PluginExecution> src = source.getExecutions(); if ( !src.isEmpty() ) { List<PluginExecution> tgt = target.getExecutions(); Map<Object, PluginExecution> merged = new LinkedHashMap<>( ( src.size() + tgt.size() ) * 2 ); for ( PluginExecution element : src ) { if ( sourceDominant || ( element.getInherited() != null ? element.isInherited() : source.isInherited() ) ) { Object key = getPluginExecutionKey( element ); merged.put( key, element ); } } for ( PluginExecution element : tgt ) { Object key = getPluginExecutionKey( element ); PluginExecution existing = merged.get( key ); if ( existing != null ) { mergePluginExecution( element, existing, sourceDominant, context ); } merged.put( key, element ); } target.setExecutions( new ArrayList<>( merged.values() ) ); } } @Override protected void mergePluginExecution_Goals( PluginExecution target, PluginExecution source, boolean sourceDominant, Map<Object, Object> context ) { List<String> src = source.getGoals(); if ( !src.isEmpty() ) { List<String> tgt = target.getGoals(); Set<String> excludes = new LinkedHashSet<>( tgt ); List<String> merged = new ArrayList<>( tgt.size() + src.size() ); merged.addAll( tgt ); for ( String s : src ) { if ( !excludes.contains( s ) ) { merged.add( s ); } } target.setGoals( merged ); } } @Override protected void mergeReportPlugin_ReportSets( ReportPlugin target, ReportPlugin source, boolean sourceDominant, Map<Object, Object> context ) { List<ReportSet> src = source.getReportSets(); if ( !src.isEmpty() ) { List<ReportSet> tgt = target.getReportSets(); Map<Object, ReportSet> merged = new LinkedHashMap<>( ( src.size() + tgt.size() ) * 2 ); for ( ReportSet rset : src ) { if ( sourceDominant || ( rset.getInherited() != null ? rset.isInherited() : source.isInherited() ) ) { Object key = getReportSetKey( rset ); merged.put( key, rset ); } } for ( ReportSet element : tgt ) { Object key = getReportSetKey( element ); ReportSet existing = merged.get( key ); if ( existing != null ) { mergeReportSet( element, existing, sourceDominant, context ); } merged.put( key, element ); } target.setReportSets( new ArrayList<>( merged.values() ) ); } } @Override protected Object getDependencyKey( Dependency dependency ) { return dependency.getManagementKey(); } @Override protected Object getPluginKey( Plugin plugin ) { return plugin.getKey(); } @Override protected Object getPluginExecutionKey( PluginExecution pluginExecution ) { return pluginExecution.getId(); } @Override protected Object getReportPluginKey( ReportPlugin reportPlugin ) { return reportPlugin.getKey(); } @Override protected Object getReportSetKey( ReportSet reportSet ) { return reportSet.getId(); } @Override protected Object getRepositoryBaseKey( RepositoryBase repositoryBase ) { return repositoryBase.getId(); } @Override protected Object getExtensionKey( Extension extension ) { return extension.getGroupId() + ':' + extension.getArtifactId(); } @Override protected Object getExclusionKey( Exclusion exclusion ) { return exclusion.getGroupId() + ':' + exclusion.getArtifactId(); } protected String extrapolateChildUrl( String parentUrl, Map<Object, Object> context ) { return parentUrl; } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.elasticbeanstalk.model; import java.io.Serializable; import com.amazonaws.AmazonWebServiceRequest; /** * <p> * Request to delete an application version. * </p> */ public class DeleteApplicationVersionRequest extends AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The name of the application to delete releases from. * </p> */ private String applicationName; /** * <p> * The label of the version to delete. * </p> */ private String versionLabel; /** * <p> * Indicates whether to delete the associated source bundle from Amazon S3: * </p> * <ul> * <li> <code>true</code>: An attempt is made to delete the associated Amazon * S3 source bundle specified at time of creation.</li> * <li> <code>false</code>: No action is taken on the Amazon S3 source bundle * specified at time of creation.</li> * </ul> * <p> * Valid Values: <code>true</code> | <code>false</code> * </p> */ private Boolean deleteSourceBundle; /** * Default constructor for DeleteApplicationVersionRequest object. Callers * should use the setter or fluent setter (with...) methods to initialize * the object after creating it. */ public DeleteApplicationVersionRequest() { } /** * Constructs a new DeleteApplicationVersionRequest object. Callers should * use the setter or fluent setter (with...) methods to initialize any * additional object members. * * @param applicationName * The name of the application to delete releases from. * @param versionLabel * The label of the version to delete. */ public DeleteApplicationVersionRequest(String applicationName, String versionLabel) { setApplicationName(applicationName); setVersionLabel(versionLabel); } /** * <p> * The name of the application to delete releases from. * </p> * * @param applicationName * The name of the application to delete releases from. */ public void setApplicationName(String applicationName) { this.applicationName = applicationName; } /** * <p> * The name of the application to delete releases from. * </p> * * @return The name of the application to delete releases from. */ public String getApplicationName() { return this.applicationName; } /** * <p> * The name of the application to delete releases from. * </p> * * @param applicationName * The name of the application to delete releases from. * @return Returns a reference to this object so that method calls can be * chained together. */ public DeleteApplicationVersionRequest withApplicationName( String applicationName) { setApplicationName(applicationName); return this; } /** * <p> * The label of the version to delete. * </p> * * @param versionLabel * The label of the version to delete. */ public void setVersionLabel(String versionLabel) { this.versionLabel = versionLabel; } /** * <p> * The label of the version to delete. * </p> * * @return The label of the version to delete. */ public String getVersionLabel() { return this.versionLabel; } /** * <p> * The label of the version to delete. * </p> * * @param versionLabel * The label of the version to delete. * @return Returns a reference to this object so that method calls can be * chained together. */ public DeleteApplicationVersionRequest withVersionLabel(String versionLabel) { setVersionLabel(versionLabel); return this; } /** * <p> * Indicates whether to delete the associated source bundle from Amazon S3: * </p> * <ul> * <li> <code>true</code>: An attempt is made to delete the associated Amazon * S3 source bundle specified at time of creation.</li> * <li> <code>false</code>: No action is taken on the Amazon S3 source bundle * specified at time of creation.</li> * </ul> * <p> * Valid Values: <code>true</code> | <code>false</code> * </p> * * @param deleteSourceBundle * Indicates whether to delete the associated source bundle from * Amazon S3: </p> * <ul> * <li> <code>true</code>: An attempt is made to delete the associated * Amazon S3 source bundle specified at time of creation.</li> * <li> <code>false</code>: No action is taken on the Amazon S3 source * bundle specified at time of creation.</li> * </ul> * <p> * Valid Values: <code>true</code> | <code>false</code> */ public void setDeleteSourceBundle(Boolean deleteSourceBundle) { this.deleteSourceBundle = deleteSourceBundle; } /** * <p> * Indicates whether to delete the associated source bundle from Amazon S3: * </p> * <ul> * <li> <code>true</code>: An attempt is made to delete the associated Amazon * S3 source bundle specified at time of creation.</li> * <li> <code>false</code>: No action is taken on the Amazon S3 source bundle * specified at time of creation.</li> * </ul> * <p> * Valid Values: <code>true</code> | <code>false</code> * </p> * * @return Indicates whether to delete the associated source bundle from * Amazon S3: </p> * <ul> * <li> <code>true</code>: An attempt is made to delete the * associated Amazon S3 source bundle specified at time of creation. * </li> * <li> <code>false</code>: No action is taken on the Amazon S3 * source bundle specified at time of creation.</li> * </ul> * <p> * Valid Values: <code>true</code> | <code>false</code> */ public Boolean getDeleteSourceBundle() { return this.deleteSourceBundle; } /** * <p> * Indicates whether to delete the associated source bundle from Amazon S3: * </p> * <ul> * <li> <code>true</code>: An attempt is made to delete the associated Amazon * S3 source bundle specified at time of creation.</li> * <li> <code>false</code>: No action is taken on the Amazon S3 source bundle * specified at time of creation.</li> * </ul> * <p> * Valid Values: <code>true</code> | <code>false</code> * </p> * * @param deleteSourceBundle * Indicates whether to delete the associated source bundle from * Amazon S3: </p> * <ul> * <li> <code>true</code>: An attempt is made to delete the associated * Amazon S3 source bundle specified at time of creation.</li> * <li> <code>false</code>: No action is taken on the Amazon S3 source * bundle specified at time of creation.</li> * </ul> * <p> * Valid Values: <code>true</code> | <code>false</code> * @return Returns a reference to this object so that method calls can be * chained together. */ public DeleteApplicationVersionRequest withDeleteSourceBundle( Boolean deleteSourceBundle) { setDeleteSourceBundle(deleteSourceBundle); return this; } /** * <p> * Indicates whether to delete the associated source bundle from Amazon S3: * </p> * <ul> * <li> <code>true</code>: An attempt is made to delete the associated Amazon * S3 source bundle specified at time of creation.</li> * <li> <code>false</code>: No action is taken on the Amazon S3 source bundle * specified at time of creation.</li> * </ul> * <p> * Valid Values: <code>true</code> | <code>false</code> * </p> * * @return Indicates whether to delete the associated source bundle from * Amazon S3: </p> * <ul> * <li> <code>true</code>: An attempt is made to delete the * associated Amazon S3 source bundle specified at time of creation. * </li> * <li> <code>false</code>: No action is taken on the Amazon S3 * source bundle specified at time of creation.</li> * </ul> * <p> * Valid Values: <code>true</code> | <code>false</code> */ public Boolean isDeleteSourceBundle() { return this.deleteSourceBundle; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getApplicationName() != null) sb.append("ApplicationName: " + getApplicationName() + ","); if (getVersionLabel() != null) sb.append("VersionLabel: " + getVersionLabel() + ","); if (getDeleteSourceBundle() != null) sb.append("DeleteSourceBundle: " + getDeleteSourceBundle()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DeleteApplicationVersionRequest == false) return false; DeleteApplicationVersionRequest other = (DeleteApplicationVersionRequest) obj; if (other.getApplicationName() == null ^ this.getApplicationName() == null) return false; if (other.getApplicationName() != null && other.getApplicationName().equals(this.getApplicationName()) == false) return false; if (other.getVersionLabel() == null ^ this.getVersionLabel() == null) return false; if (other.getVersionLabel() != null && other.getVersionLabel().equals(this.getVersionLabel()) == false) return false; if (other.getDeleteSourceBundle() == null ^ this.getDeleteSourceBundle() == null) return false; if (other.getDeleteSourceBundle() != null && other.getDeleteSourceBundle().equals( this.getDeleteSourceBundle()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getApplicationName() == null) ? 0 : getApplicationName() .hashCode()); hashCode = prime * hashCode + ((getVersionLabel() == null) ? 0 : getVersionLabel() .hashCode()); hashCode = prime * hashCode + ((getDeleteSourceBundle() == null) ? 0 : getDeleteSourceBundle().hashCode()); return hashCode; } @Override public DeleteApplicationVersionRequest clone() { return (DeleteApplicationVersionRequest) super.clone(); } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.codeInsight.override; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.intellij.codeInsight.CodeInsightUtilCore; import com.intellij.featureStatistics.FeatureUsageTracker; import com.intellij.featureStatistics.ProductivityFeatureNames; import com.intellij.ide.util.MemberChooser; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.Result; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.ScrollType; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiWhiteSpace; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.ui.SpeedSearchComparator; import com.jetbrains.python.PyNames; import com.jetbrains.python.psi.*; import com.jetbrains.python.psi.impl.PyFunctionBuilder; import com.jetbrains.python.psi.impl.PyPsiUtils; import com.jetbrains.python.psi.types.PyClassLikeType; import com.jetbrains.python.psi.types.PyNoneType; import com.jetbrains.python.psi.types.PyTypeUtil; import com.jetbrains.python.psi.types.TypeEvalContext; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; /** * @author Alexey.Ivanov */ public class PyOverrideImplementUtil { private static final Logger LOG = Logger.getInstance("#com.jetbrains.python.codeInsight.override.PyOverrideImplementUtil"); private PyOverrideImplementUtil() { } @Nullable public static PyClass getContextClass(@NotNull final Project project, @NotNull final Editor editor, @NotNull final PsiFile file) { PsiDocumentManager.getInstance(project).commitAllDocuments(); int offset = editor.getCaretModel().getOffset(); PsiElement element = file.findElementAt(offset); if (element == null) { // are we in whitespace after last class? PY-440 final PsiElement lastChild = file.getLastChild(); if (lastChild != null && offset >= lastChild.getTextRange().getStartOffset() && offset <= lastChild.getTextRange().getEndOffset()) { element = lastChild; } } final PyClass pyClass = PsiTreeUtil.getParentOfType(element, PyClass.class, false); if (pyClass == null && element instanceof PsiWhiteSpace && element.getPrevSibling() instanceof PyClass) { return (PyClass)element.getPrevSibling(); } return pyClass; } public static void chooseAndOverrideMethods(final Project project, @NotNull final Editor editor, @NotNull final PyClass pyClass) { FeatureUsageTracker.getInstance().triggerFeatureUsed(ProductivityFeatureNames.CODEASSISTS_OVERRIDE_IMPLEMENT); chooseAndOverrideOrImplementMethods(project, editor, pyClass); } private static void chooseAndOverrideOrImplementMethods(final Project project, @NotNull final Editor editor, @NotNull final PyClass pyClass) { PyPsiUtils.assertValid(pyClass); ApplicationManager.getApplication().assertReadAccessAllowed(); final Set<PyFunction> result = new HashSet<>(); TypeEvalContext context = TypeEvalContext.codeCompletion(project, null); final Collection<PyFunction> superFunctions = getAllSuperFunctions(pyClass, context); result.addAll(superFunctions); chooseAndOverrideOrImplementMethods(project, editor, pyClass, result, "Select Methods to Override", false); } public static void chooseAndOverrideOrImplementMethods(@NotNull final Project project, @NotNull final Editor editor, @NotNull final PyClass pyClass, @NotNull final Collection<PyFunction> superFunctions, @NotNull final String title, final boolean implement) { List<PyMethodMember> elements = new ArrayList<>(); for (PyFunction function : superFunctions) { final String name = function.getName(); if (name == null || PyUtil.isClassPrivateName(name)) { continue; } if (pyClass.findMethodByName(name, false, null) == null) { final PyMethodMember member = new PyMethodMember(function); elements.add(member); } } if (elements.size() == 0) { return; } final MemberChooser<PyMethodMember> chooser = new MemberChooser<PyMethodMember>(elements.toArray(new PyMethodMember[elements.size()]), false, true, project) { @Override protected SpeedSearchComparator getSpeedSearchComparator() { return new SpeedSearchComparator(false) { @Nullable @Override public Iterable<TextRange> matchingFragments(@NotNull String pattern, @NotNull String text) { return super.matchingFragments(PyMethodMember.trimUnderscores(pattern), text); } }; } }; chooser.setTitle(title); chooser.setCopyJavadocVisible(false); chooser.show(); if (chooser.getExitCode() != DialogWrapper.OK_EXIT_CODE) { return; } List<PyMethodMember> membersToOverride = chooser.getSelectedElements(); overrideMethods(editor, pyClass, membersToOverride, implement); } public static void overrideMethods(final Editor editor, final PyClass pyClass, final List<PyMethodMember> membersToOverride, final boolean implement) { if (membersToOverride == null) { return; } new WriteCommandAction(pyClass.getProject(), pyClass.getContainingFile()) { protected void run(@NotNull final Result result) throws Throwable { write(pyClass, membersToOverride, editor, implement); } }.execute(); } private static void write(@NotNull final PyClass pyClass, @NotNull final List<PyMethodMember> newMembers, @NotNull final Editor editor, boolean implement) { final PyStatementList statementList = pyClass.getStatementList(); final int offset = editor.getCaretModel().getOffset(); PsiElement anchor = null; for (PyStatement statement : statementList.getStatements()) { if (statement.getTextRange().getStartOffset() < offset || (statement instanceof PyExpressionStatement && ((PyExpressionStatement)statement).getExpression() instanceof PyStringLiteralExpression)) { anchor = statement; } } PyFunction element = null; for (PyMethodMember newMember : newMembers) { PyFunction baseFunction = (PyFunction)newMember.getPsiElement(); final PyFunctionBuilder builder = buildOverriddenFunction(pyClass, baseFunction, implement); PyFunction function = builder.addFunctionAfter(statementList, anchor, LanguageLevel.forElement(statementList)); element = CodeInsightUtilCore.forcePsiPostprocessAndRestoreElement(function); } PyPsiUtils.removeRedundantPass(statementList); if (element != null) { final PyStatementList targetStatementList = element.getStatementList(); final int start = targetStatementList.getTextRange().getStartOffset(); editor.getCaretModel().moveToOffset(start); editor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE); editor.getSelectionModel().setSelection(start, element.getTextRange().getEndOffset()); } } private static PyFunctionBuilder buildOverriddenFunction(PyClass pyClass, PyFunction baseFunction, boolean implement) { final boolean overridingNew = PyNames.NEW.equals(baseFunction.getName()); assert baseFunction.getName() != null; PyFunctionBuilder pyFunctionBuilder = new PyFunctionBuilder(baseFunction.getName(), baseFunction); final PyDecoratorList decorators = baseFunction.getDecoratorList(); boolean baseMethodIsStatic = false; if (decorators != null) { if (decorators.findDecorator(PyNames.CLASSMETHOD) != null) { pyFunctionBuilder.decorate(PyNames.CLASSMETHOD); } else if (decorators.findDecorator(PyNames.STATICMETHOD) != null) { baseMethodIsStatic = true; pyFunctionBuilder.decorate(PyNames.STATICMETHOD); } else if (decorators.findDecorator(PyNames.PROPERTY) != null || decorators.findDecorator(PyNames.ABSTRACTPROPERTY) != null) { pyFunctionBuilder.decorate(PyNames.PROPERTY); } } PyAnnotation anno = baseFunction.getAnnotation(); if (anno != null) { pyFunctionBuilder.annotation(anno.getText()); } final TypeEvalContext context = TypeEvalContext.userInitiated(baseFunction.getProject(), baseFunction.getContainingFile()); final List<PyParameter> baseParams = PyUtil.getParameters(baseFunction, context); for (PyParameter parameter : baseParams) { pyFunctionBuilder.parameter(parameter.getText()); } PyClass baseClass = baseFunction.getContainingClass(); assert baseClass != null; StringBuilder statementBody = new StringBuilder(); boolean hadStar = false; List<String> parameters = new ArrayList<>(); for (PyParameter parameter : baseParams) { final PyNamedParameter pyNamedParameter = parameter.getAsNamed(); if (pyNamedParameter != null) { String repr = pyNamedParameter.getRepr(false); parameters.add(hadStar && !pyNamedParameter.isKeywordContainer() ? pyNamedParameter.getName() + "=" + repr : repr); if (pyNamedParameter.isPositionalContainer()) { hadStar = true; } } else if (parameter instanceof PySingleStarParameter) { hadStar = true; } else { parameters.add(parameter.getText()); } } if (PyNames.FAKE_OLD_BASE.equals(baseClass.getName()) || raisesNotImplementedError(baseFunction) || implement) { statementBody.append(PyNames.PASS); } else { if (!PyNames.INIT.equals(baseFunction.getName()) && context.getReturnType(baseFunction) != PyNoneType.INSTANCE || overridingNew) { statementBody.append("return "); } if (baseClass.isNewStyleClass(context)) { statementBody.append(PyNames.SUPER); statementBody.append("("); final LanguageLevel langLevel = ((PyFile)pyClass.getContainingFile()).getLanguageLevel(); if (!langLevel.isPy3K()) { final String baseFirstName = !baseParams.isEmpty() ? baseParams.get(0).getName() : null; final String firstName = baseFirstName != null ? baseFirstName : PyNames.CANONICAL_SELF; PsiElement outerClass = PsiTreeUtil.getParentOfType(pyClass, PyClass.class, true, PyFunction.class); String className = pyClass.getName(); final List<String> nameResult = Lists.newArrayList(className); while (outerClass != null) { nameResult.add(0, ((PyClass)outerClass).getName()); outerClass = PsiTreeUtil.getParentOfType(outerClass, PyClass.class, true, PyFunction.class); } StringUtil.join(nameResult, ".", statementBody); statementBody.append(", ").append(firstName); } statementBody.append(").").append(baseFunction.getName()).append("("); // type.__new__ is explicitly decorated as @staticmethod in our stubs, but not in real Python code if (parameters.size() > 0 && !(baseMethodIsStatic || overridingNew)) { parameters.remove(0); } } else { statementBody.append(getReferenceText(pyClass, baseClass)).append(".").append(baseFunction.getName()).append("("); } StringUtil.join(parameters, ", ", statementBody); statementBody.append(")"); } pyFunctionBuilder.statement(statementBody.toString()); return pyFunctionBuilder; } public static boolean raisesNotImplementedError(@NotNull PyFunction function) { PyStatementList statementList = function.getStatementList(); IfVisitor visitor = new IfVisitor(); statementList.accept(visitor); return !visitor.hasReturnInside && visitor.raiseNotImplemented; } // TODO find a better place for this logic private static String getReferenceText(PyClass fromClass, PyClass toClass) { final PyExpression[] superClassExpressions = fromClass.getSuperClassExpressions(); for (PyExpression expression : superClassExpressions) { if (expression instanceof PyReferenceExpression) { PsiElement target = ((PyReferenceExpression)expression).getReference().resolve(); if (target == toClass) { return expression.getText(); } } } return toClass.getName(); } /** * Returns all super functions available through MRO. */ @NotNull public static List<PyFunction> getAllSuperFunctions(@NotNull PyClass pyClass, @NotNull TypeEvalContext context) { final Map<String, PyFunction> functions = Maps.newLinkedHashMap(); for (final PyClassLikeType type : pyClass.getAncestorTypes(context)) { if (type != null) { for (PyFunction function : PyTypeUtil.getMembersOfType(type, PyFunction.class, false, context)) { final String name = function.getName(); if (name != null && !functions.containsKey(name)) { functions.put(name, function); } } } } return Lists.newArrayList(functions.values()); } private static class IfVisitor extends PyRecursiveElementVisitor { private boolean hasReturnInside; private boolean raiseNotImplemented; @Override public void visitPyReturnStatement(PyReturnStatement node) { hasReturnInside = true; } @Override public void visitPyRaiseStatement(PyRaiseStatement node) { final PyExpression[] expressions = node.getExpressions(); if (expressions.length > 0) { final PyExpression firstExpression = expressions[0]; if (firstExpression instanceof PyCallExpression) { final PyExpression callee = ((PyCallExpression)firstExpression).getCallee(); if (callee != null && callee.getText().equals(PyNames.NOT_IMPLEMENTED_ERROR)) { raiseNotImplemented = true; } } else if (firstExpression.getText().equals(PyNames.NOT_IMPLEMENTED_ERROR)) { raiseNotImplemented = true; } } } } }
/* * Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.databridge.core.internal; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.databridge.commons.Attribute; import org.wso2.carbon.databridge.commons.StreamDefinition; import org.wso2.carbon.databridge.commons.exception.DifferentStreamDefinitionAlreadyDefinedException; import org.wso2.carbon.databridge.commons.exception.MalformedStreamDefinitionException; import org.wso2.carbon.databridge.commons.utils.DataBridgeCommonsUtils; import org.wso2.carbon.databridge.commons.utils.EventDefinitionConverterUtils; import org.wso2.carbon.databridge.core.*; import org.wso2.carbon.databridge.core.Utils.AgentSession; import org.wso2.carbon.databridge.core.Utils.EventComposite; import org.wso2.carbon.databridge.core.conf.DataBridgeConfiguration; import org.wso2.carbon.databridge.core.definitionstore.AbstractStreamDefinitionStore; import org.wso2.carbon.databridge.core.definitionstore.StreamAddRemoveListener; import org.wso2.carbon.databridge.core.exception.StreamDefinitionStoreException; import org.wso2.carbon.databridge.core.internal.authentication.AuthenticationHandler; import org.wso2.carbon.databridge.core.internal.queue.EventQueue; import java.util.*; import java.util.concurrent.ConcurrentHashMap; /** * Dispactches events and their definitions subscribers */ public class EventDispatcher { private List<AgentCallback> subscribers = new ArrayList<AgentCallback>(); private List<RawDataAgentCallback> rawDataSubscribers = new ArrayList<RawDataAgentCallback>(); private AbstractStreamDefinitionStore streamDefinitionStore; private Map<Integer, StreamTypeHolder> domainNameStreamTypeHolderCache = new ConcurrentHashMap<Integer, StreamTypeHolder>(); private EventQueue eventQueue; private AuthenticationHandler authenticationHandler; private static final Log log = LogFactory.getLog(EventDispatcher.class); public EventDispatcher(AbstractStreamDefinitionStore streamDefinitionStore, DataBridgeConfiguration dataBridgeConfiguration, AuthenticationHandler authenticationHandler) { this.eventQueue = new EventQueue(subscribers, rawDataSubscribers, dataBridgeConfiguration); this.streamDefinitionStore = streamDefinitionStore; this.authenticationHandler = authenticationHandler; streamDefinitionStore.subscribe(new StreamAddRemoveListener() { @Override public void streamAdded(int tenantId, String streamId) { } @Override public void streamRemoved(int tenantId, String streamId) { removeStreamDefinitionFromStreamTypeHolder(tenantId, streamId); } }); } public void addCallback(AgentCallback agentCallback) { subscribers.add(agentCallback); } /** * Add thrift subscribers * * @param agentCallback */ public void addCallback(RawDataAgentCallback agentCallback) { rawDataSubscribers.add(agentCallback); } public synchronized String defineStream(String streamDefinition, AgentSession agentSession) throws MalformedStreamDefinitionException, DifferentStreamDefinitionAlreadyDefinedException, StreamDefinitionStoreException { int tenantId = agentSession.getCredentials().getTenantId(); StreamDefinition newStreamDefinition = EventDefinitionConverterUtils.convertFromJson(streamDefinition); StreamTypeHolder streamTypeHolder = getStreamDefinitionHolder(tenantId); StreamAttributeComposite attributeComposite = streamTypeHolder.getAttributeComposite(newStreamDefinition.getStreamId()); if (attributeComposite != null) { StreamDefinition existingStreamDefinition = attributeComposite.getStreamDefinition(); if (!existingStreamDefinition.equals(newStreamDefinition)) { throw new DifferentStreamDefinitionAlreadyDefinedException("Similar event stream for " + newStreamDefinition + " with the same name and version already exist: " + streamDefinitionStore.getStreamDefinition(newStreamDefinition.getName(), newStreamDefinition.getVersion(), tenantId)); } newStreamDefinition = existingStreamDefinition; } else { for (StreamAttributeComposite aAttributeComposite : streamTypeHolder.getAttributeCompositeMap().values()) { validateStreamDefinition(newStreamDefinition, aAttributeComposite.getStreamDefinition()); } updateDomainNameStreamTypeHolderCache(newStreamDefinition, tenantId); streamDefinitionStore.saveStreamDefinition(newStreamDefinition, tenantId); } for (AgentCallback agentCallback : subscribers) { agentCallback.definedStream(newStreamDefinition, tenantId); } for (RawDataAgentCallback agentCallback : rawDataSubscribers) { agentCallback.definedStream(newStreamDefinition, tenantId); } return newStreamDefinition.getStreamId(); } public synchronized String defineStream(String streamDefinition, AgentSession agentSession, String indexDefinition) throws MalformedStreamDefinitionException, DifferentStreamDefinitionAlreadyDefinedException, StreamDefinitionStoreException { int tenantId = agentSession.getCredentials().getTenantId(); StreamDefinition newStreamDefinition = EventDefinitionConverterUtils.convertFromJson(streamDefinition); StreamTypeHolder streamTypeHolder = getStreamDefinitionHolder(tenantId); StreamAttributeComposite attributeComposite = streamTypeHolder.getAttributeComposite(newStreamDefinition.getStreamId()); if (attributeComposite != null) { StreamDefinition existingStreamDefinition = attributeComposite.getStreamDefinition(); if (!existingStreamDefinition.equals(newStreamDefinition)) { throw new DifferentStreamDefinitionAlreadyDefinedException("Similar event stream for " + newStreamDefinition + " with the same name and version already exist: " + streamDefinitionStore.getStreamDefinition(newStreamDefinition.getName(), newStreamDefinition.getVersion(), tenantId)); } newStreamDefinition = existingStreamDefinition; } else { for (StreamAttributeComposite aAttributeComposite : streamTypeHolder.getAttributeCompositeMap().values()) { validateStreamDefinition(newStreamDefinition, aAttributeComposite.getStreamDefinition()); } updateDomainNameStreamTypeHolderCache(newStreamDefinition, tenantId); streamDefinitionStore.saveStreamDefinition(newStreamDefinition, tenantId); } newStreamDefinition.createIndexDefinition(indexDefinition); for (AgentCallback agentCallback : subscribers) { agentCallback.definedStream(newStreamDefinition, tenantId); } for (RawDataAgentCallback agentCallback : rawDataSubscribers) { agentCallback.definedStream(newStreamDefinition, tenantId); } return newStreamDefinition.getStreamId(); } private void validateStreamDefinition(StreamDefinition newStreamDefinition, StreamDefinition existingStreamDefinition) throws DifferentStreamDefinitionAlreadyDefinedException { if (newStreamDefinition.getName().equals(existingStreamDefinition.getName())) { validateAttributes(newStreamDefinition.getMetaData(), existingStreamDefinition.getMetaData(), "meta", newStreamDefinition, existingStreamDefinition); validateAttributes(newStreamDefinition.getCorrelationData(), existingStreamDefinition.getCorrelationData(), "correlation", newStreamDefinition, existingStreamDefinition); validateAttributes(newStreamDefinition.getPayloadData(), existingStreamDefinition.getPayloadData(), "payload", newStreamDefinition, existingStreamDefinition); } } private void validateAttributes(List<Attribute> newAttributes, List<Attribute> existingAttributes, String type, StreamDefinition newStreamDefinition, StreamDefinition existingStreamDefinition) throws DifferentStreamDefinitionAlreadyDefinedException { if (newAttributes != null && existingAttributes != null) { for (Attribute attribute : newAttributes) { for (Attribute existingAttribute : existingAttributes) { if (attribute.getName().equals(existingAttribute.getName())) { if (attribute.getType() != existingAttribute.getType()) { throw new DifferentStreamDefinitionAlreadyDefinedException("Attribute type mismatch " + type + " " + attribute.getName() + " type:" + attribute.getType() + " was already defined with type:" + existingAttribute.getType() + " in " + existingStreamDefinition + ", hence " + newStreamDefinition + " cannot be defined"); } } } } } } public void publish(Object eventBundle, AgentSession agentSession, EventConverter eventConverter) { eventQueue.publish(new EventComposite(eventBundle, getStreamDefinitionHolder(agentSession.getCredentials().getTenantId()), agentSession, eventConverter)); } private StreamTypeHolder getStreamDefinitionHolder(int tenantId) { // this will occur only outside of carbon (ex: Siddhi) StreamTypeHolder streamTypeHolder = domainNameStreamTypeHolderCache.get(tenantId); if (streamTypeHolder != null) { if (log.isDebugEnabled()) { String logMsg = "Event stream holder for tenant : " + tenantId + " : \n "; logMsg += "Meta, Correlation & Payload Data Type Map : "; for (Map.Entry entry : streamTypeHolder.getAttributeCompositeMap().entrySet()) { logMsg += "StreamID=" + entry.getKey() + " : "; logMsg += "Meta= " + Arrays.deepToString(((StreamAttributeComposite) entry.getValue()).getAttributeTypes()[0]) + " : "; logMsg += "Correlation= " + Arrays.deepToString(((StreamAttributeComposite) entry.getValue()).getAttributeTypes()[1]) + " : "; logMsg += "Payload= " + Arrays.deepToString(((StreamAttributeComposite) entry.getValue()).getAttributeTypes()[2]) + "\n"; } log.debug(logMsg); } return streamTypeHolder; } else { return initDomainNameStreamTypeHolderCache(tenantId); } } public synchronized void updateStreamDefinitionHolder(AgentSession agentSession) { int tenantId = agentSession.getCredentials().getTenantId(); StreamTypeHolder streamTypeHolder = domainNameStreamTypeHolderCache.get(tenantId); if (streamTypeHolder != null) { if (log.isDebugEnabled()) { String logMsg = "Event stream holder for tenant : " + tenantId + " : \n "; logMsg += "Meta, Correlation & Payload Data Type Map : "; for (Map.Entry entry : streamTypeHolder.getAttributeCompositeMap().entrySet()) { logMsg += "StreamID=" + entry.getKey() + " : "; logMsg += "Meta= " + Arrays.deepToString(((StreamAttributeComposite) entry.getValue()).getAttributeTypes()[0]) + " : "; logMsg += "Correlation= " + Arrays.deepToString(((StreamAttributeComposite) entry.getValue()).getAttributeTypes()[1]) + " : "; logMsg += "Payload= " + Arrays.deepToString(((StreamAttributeComposite) entry.getValue()).getAttributeTypes()[2]) + "\n"; } log.debug(logMsg); } updateDomainNameStreamTypeHolderCache(tenantId); } } private synchronized void updateDomainNameStreamTypeHolderCache( StreamDefinition streamDefinition, int tenantId) { StreamTypeHolder streamTypeHolder = getStreamDefinitionHolder(tenantId); streamTypeHolder.putStreamDefinition(streamDefinition); } public synchronized void reloadDomainNameStreamTypeHolderCache(int tenantId){ StreamTypeHolder streamTypeHolder = getStreamDefinitionHolder(tenantId); Collection<StreamDefinition> allStreamDefinitions = streamDefinitionStore.getAllStreamDefinitions(tenantId); for (StreamDefinition streamDefinition: allStreamDefinitions){ if (!streamTypeHolder.getAttributeCompositeMap().containsKey(streamDefinition.getStreamId())){ streamTypeHolder.putStreamDefinition(streamDefinition); for (AgentCallback agentCallback : subscribers) { agentCallback.definedStream(streamDefinition, tenantId); } for (RawDataAgentCallback agentCallback : rawDataSubscribers) { agentCallback.definedStream(streamDefinition, tenantId); } } } } private synchronized StreamTypeHolder initDomainNameStreamTypeHolderCache(int tenantId) { StreamTypeHolder streamTypeHolder = domainNameStreamTypeHolderCache.get(tenantId); if (null == streamTypeHolder) { streamTypeHolder = new StreamTypeHolder(tenantId); streamTypeHolder.setEventDispatcherCallback(this); Collection<StreamDefinition> allStreamDefinitions = streamDefinitionStore.getAllStreamDefinitions(tenantId); if (null != allStreamDefinitions) { for (StreamDefinition aStreamDefinition : allStreamDefinitions) { streamTypeHolder.putStreamDefinition(aStreamDefinition); for (AgentCallback agentCallback : subscribers) { agentCallback.definedStream(aStreamDefinition, tenantId); } for (RawDataAgentCallback agentCallback : rawDataSubscribers) { agentCallback.definedStream(aStreamDefinition, tenantId); } } } domainNameStreamTypeHolderCache.put(tenantId, streamTypeHolder); } return streamTypeHolder; } private synchronized StreamTypeHolder updateDomainNameStreamTypeHolderCache(int tenantId) { StreamTypeHolder streamTypeHolder = domainNameStreamTypeHolderCache.get(tenantId); if (null != streamTypeHolder) { Collection<StreamDefinition> allStreamDefinitions = streamDefinitionStore.getAllStreamDefinitions(tenantId); if (null != allStreamDefinitions) { for (StreamDefinition aStreamDefinition : allStreamDefinitions) { if (streamTypeHolder.getAttributeComposite(aStreamDefinition.getStreamId()) == null) { streamTypeHolder.putStreamDefinition(aStreamDefinition); for (AgentCallback agentCallback : subscribers) { agentCallback.definedStream(aStreamDefinition, tenantId); } for (RawDataAgentCallback agentCallback : rawDataSubscribers) { agentCallback.definedStream(aStreamDefinition, tenantId); } } } List<String> streamIdList = new ArrayList<String>(); for (StreamDefinition streamDefinition : allStreamDefinitions) { streamIdList.add(streamDefinition.getStreamId()); } Iterator<String> streamIdIterator = streamTypeHolder.getAttributeCompositeMap().keySet().iterator(); while (streamIdIterator.hasNext()) { if (!streamIdList.contains(streamIdIterator.next())) { streamIdIterator.remove(); } } } domainNameStreamTypeHolderCache.put(tenantId, streamTypeHolder); } return streamTypeHolder; } public List<AgentCallback> getSubscribers() { return subscribers; } public List<RawDataAgentCallback> getRawDataSubscribers() { return rawDataSubscribers; } public String findStreamId(String streamName, String streamVersion, AgentSession agentSession) throws StreamDefinitionStoreException { int tenantId = agentSession.getCredentials().getTenantId(); //Updating the cache when calling the findStreamId to keep the sync between the stream manager and register with data publisher //for CEP - need to review and fix updateDomainNameStreamTypeHolderCache(tenantId); StreamTypeHolder streamTypeHolder = getStreamDefinitionHolder(tenantId); StreamAttributeComposite attributeComposite = streamTypeHolder.getAttributeComposite(DataBridgeCommonsUtils.generateStreamId(streamName, streamVersion)); if (attributeComposite != null) { return attributeComposite.getStreamDefinition().getStreamId(); } return null; } public boolean deleteStream(String streamName, String streamVersion, AgentSession agentSession) { int tenantId = agentSession.getCredentials().getTenantId(); String streamId = DataBridgeCommonsUtils.generateStreamId(streamName, streamVersion); StreamDefinition streamDefinition = removeStreamDefinitionFromStreamTypeHolder(tenantId, streamId); if (streamDefinition != null) { for (AgentCallback agentCallback : subscribers) { agentCallback.removeStream(streamDefinition, tenantId); } for (RawDataAgentCallback agentCallback : rawDataSubscribers) { agentCallback.removeStream(streamDefinition, tenantId); } } return streamDefinitionStore.deleteStreamDefinition(streamName, streamVersion, tenantId); } private synchronized StreamDefinition removeStreamDefinitionFromStreamTypeHolder(int tenantId, String streamId) { StreamTypeHolder streamTypeHolder = domainNameStreamTypeHolderCache.get(tenantId); if (streamTypeHolder != null) { StreamAttributeComposite attributeComposite = streamTypeHolder.getAttributeCompositeMap().remove(streamId); if (attributeComposite != null) { return attributeComposite.getStreamDefinition(); } } return null; } }
/* * ==================================================================== * * The Apache Software License, Version 1.1 * * Copyright (c) 1999-2003 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, if * any, must include the following acknowlegement: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowlegement may appear in the software itself, * if and wherever such third-party acknowlegements normally appear. * * 4. The names "The Jakarta Project", "Jakarta Element Construction Set", * "Jakarta ECS" , and "Apache Software Foundation" must not be used * to endorse or promote products derived * from this software without prior written permission. For written * permission, please contact apache@apache.org. * * 5. Products derived from this software may not be called "Apache", * "Jakarta Element Construction Set" nor "Jakarta ECS" nor may "Apache" * appear in their names without prior written permission of the Apache Group. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.ecs.xhtml; import org.apache.ecs.*; /** This class creates an &lt;img&gt; tag. @version $Id: img.java,v 1.2 2003/04/27 09:39:23 rdonkin Exp $ @author <a href="mailto:snagy@servletapi.com">Stephan Nagy</a> @author <a href="mailto:jon@clearink.com">Jon S. Stevens</a> @author <a href="mailto:bojan@binarix.com">Bojan Smojver</a> */ public class img extends SinglePartElement implements Printable, MouseEvents, KeyEvents { /** Private initialization routine. */ { setElementType("img"); setCase(LOWERCASE); setAttributeQuote(true); setBeginEndModifier('/'); } /** Basic constructor. Use the set* methods to set the attibutes. */ public img() { } /** Creates an img tag @param src the src="" attribute */ public img(String src) { setSrc(src); } /** Creates an img tag @param src the src="" attribute @param border the border="" attribute */ public img(String src, int border) { setSrc(src); setBorder(border); } /** Creates an img tag @param src the src="" attribute @param name the name="" attribute */ public img(String src, String name) { setSrc(src); setName(name); } /** Creates an img tag @param src the src="" attribute @param name the name="" attribute @param border the border="" attribute */ public img(String src, String name, int border) { setSrc(src); setName(name); setBorder(border); } /** Sets the src="" attribute @param src the src="" attribute */ public img setSrc(String src) { addAttribute("src",src); return this; } /** Sets the border="" attribute @param border the border="" attribute */ public img setBorder(int border) { addAttribute("border",Integer.toString(border)); return this; } /** Sets the name="" attribute @param name the name="" attribute */ public img setName(String name) { addAttribute("name",name); return this; } /** Sets the height="" attribute @param height the height="" attribute */ public img setHeight(String height) { addAttribute("height",height); return this; } /** Sets the height="" attribute @param height the height="" attribute */ public img setHeight(int height) { addAttribute("height",Integer.toString(height)); return this; } /** Sets the width="" attribute @param width the width="" attribute */ public img setWidth(String width) { addAttribute("width",width); return this; } /** Sets the width="" attribute @param width the width="" attribute */ public img setWidth(int width) { addAttribute("width",Integer.toString(width)); return this; } /** Sets the alt="" attribute @param alt the alt="" attribute */ public img setAlt(String alt) { addAttribute("alt",alt); return this; } /** Sets the ismap attribute @param ismap the ismap attribute */ public img setIsMap(boolean ismap) { if(ismap == true) addAttribute("ismap","ismap"); else removeAttribute("ismap"); return this; } /** Sets the usmap="" attribute @param usemap the usmap="" attribute */ public img setUseMap(String usemap) { addAttribute("usemap",usemap); return this; } /** Sets the align="" attribute @param align the align="" attribute */ public img setAlign(String align) { addAttribute("align",align); return this; } /** Sets the hspace="" attribute @param hspace the hspace="" attribute */ public img setHspace(String hspace) { addAttribute("hspace",hspace); return this; } /** Sets the hspace="" attribute @param hspace the hspace="" attribute */ public img setHspace(int hspace) { addAttribute("hspace",Integer.toString(hspace)); return this; } /** Sets the vspace="" attribute @param vspace the vspace="" attribute */ public img setVspace(String vspace) { addAttribute("vspace",vspace); return this; } /** Sets the vspace="" attribute @param vspace the vspace="" attribute */ public img setVspace(int vspace) { addAttribute("vspace",Integer.toString(vspace)); return this; } /** Sets the lang="" and xml:lang="" attributes @param lang the lang="" and xml:lang="" attributes */ public Element setLang(String lang) { addAttribute("lang",lang); addAttribute("xml:lang",lang); return this; } /** Adds an Element to the element. @param hashcode name of element for hash table @param element Adds an Element to the element. */ public img addElement(String hashcode,Element element) { addElementToRegistry(hashcode,element); return(this); } /** Adds an Element to the element. @param hashcode name of element for hash table @param element Adds an Element to the element. */ public img addElement(String hashcode,String element) { addElementToRegistry(hashcode,element); return(this); } /** Adds an Element to the element. @param element Adds an Element to the element. */ public img addElement(Element element) { addElementToRegistry(element); return(this); } /** Adds an Element to the element. @param element Adds an Element to the element. */ public img addElement(String element) { addElementToRegistry(element); return(this); } /** Removes an Element from the element. @param hashcode the name of the element to be removed. */ public img removeElement(String hashcode) { removeElementFromRegistry(hashcode); return(this); } /** The onclick event occurs when the pointing device button is clicked over an element. This attribute may be used with most elements. @param The script */ public void setOnClick(String script) { addAttribute ( "onclick", script ); } /** The ondblclick event occurs when the pointing device button is double clicked over an element. This attribute may be used with most elements. @param The script */ public void setOnDblClick(String script) { addAttribute ( "ondblclick", script ); } /** The onmousedown event occurs when the pointing device button is pressed over an element. This attribute may be used with most elements. @param The script */ public void setOnMouseDown(String script) { addAttribute ( "onmousedown", script ); } /** The onmouseup event occurs when the pointing device button is released over an element. This attribute may be used with most elements. @param The script */ public void setOnMouseUp(String script) { addAttribute ( "onmouseup", script ); } /** The onmouseover event occurs when the pointing device is moved onto an element. This attribute may be used with most elements. @param The script */ public void setOnMouseOver(String script) { addAttribute ( "onmouseover", script ); } /** The onmousemove event occurs when the pointing device is moved while it is over an element. This attribute may be used with most elements. @param The script */ public void setOnMouseMove(String script) { addAttribute ( "onmousemove", script ); } /** The onmouseout event occurs when the pointing device is moved away from an element. This attribute may be used with most elements. @param The script */ public void setOnMouseOut(String script) { addAttribute ( "onmouseout", script ); } /** The onkeypress event occurs when a key is pressed and released over an element. This attribute may be used with most elements. @param The script */ public void setOnKeyPress(String script) { addAttribute ( "onkeypress", script ); } /** The onkeydown event occurs when a key is pressed down over an element. This attribute may be used with most elements. @param The script */ public void setOnKeyDown(String script) { addAttribute ( "onkeydown", script ); } /** The onkeyup event occurs when a key is released over an element. This attribute may be used with most elements. @param The script */ public void setOnKeyUp(String script) { addAttribute ( "onkeyup", script ); } }
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.channel.socket.oio; import io.netty.channel.ChannelException; import io.netty.channel.ChannelMetadata; import io.netty.channel.ChannelOutboundBuffer; import io.netty.channel.oio.AbstractOioMessageChannel; import io.netty.channel.socket.ServerSocketChannel; import io.netty.util.internal.logging.InternalLogger; import io.netty.util.internal.logging.InternalLoggerFactory; import java.io.IOException; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; import java.net.SocketAddress; import java.net.SocketTimeoutException; import java.util.List; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; /** * {@link ServerSocketChannel} which accepts new connections and create the {@link OioSocketChannel}'s for them. * * This implementation use Old-Blocking-IO. */ public class OioServerSocketChannel extends AbstractOioMessageChannel implements ServerSocketChannel { private static final InternalLogger logger = InternalLoggerFactory.getInstance(OioServerSocketChannel.class); private static final ChannelMetadata METADATA = new ChannelMetadata(false, 16); private static ServerSocket newServerSocket() { try { return new ServerSocket(); } catch (IOException e) { throw new ChannelException("failed to create a server socket", e); } } final ServerSocket socket; final Lock shutdownLock = new ReentrantLock(); private final OioServerSocketChannelConfig config; /** * Create a new instance with an new {@link Socket} */ public OioServerSocketChannel() { this(newServerSocket()); } /** * Create a new instance from the given {@link ServerSocket} * * @param socket the {@link ServerSocket} which is used by this instance */ public OioServerSocketChannel(ServerSocket socket) { super(null); if (socket == null) { throw new NullPointerException("socket"); } boolean success = false; try { socket.setSoTimeout(SO_TIMEOUT); success = true; } catch (IOException e) { throw new ChannelException( "Failed to set the server socket timeout.", e); } finally { if (!success) { try { socket.close(); } catch (IOException e) { if (logger.isWarnEnabled()) { logger.warn( "Failed to close a partially initialized socket.", e); } } } } this.socket = socket; config = new DefaultOioServerSocketChannelConfig(this, socket); } @Override public InetSocketAddress localAddress() { return (InetSocketAddress) super.localAddress(); } @Override public ChannelMetadata metadata() { return METADATA; } @Override public OioServerSocketChannelConfig config() { return config; } @Override public InetSocketAddress remoteAddress() { return null; } @Override public boolean isOpen() { return !socket.isClosed(); } @Override public boolean isActive() { return isOpen() && socket.isBound(); } @Override protected SocketAddress localAddress0() { return socket.getLocalSocketAddress(); } @Override protected void doBind(SocketAddress localAddress) throws Exception { socket.bind(localAddress, config.getBacklog()); } @Override protected void doClose() throws Exception { socket.close(); } @Override protected int doReadMessages(List<Object> buf) throws Exception { if (socket.isClosed()) { return -1; } try { Socket s = socket.accept(); try { buf.add(new OioSocketChannel(this, s)); return 1; } catch (Throwable t) { logger.warn("Failed to create a new channel from an accepted socket.", t); try { s.close(); } catch (Throwable t2) { logger.warn("Failed to close a socket.", t2); } } } catch (SocketTimeoutException e) { // Expected } return 0; } @Override protected void doWrite(ChannelOutboundBuffer in) throws Exception { throw new UnsupportedOperationException(); } @Override protected Object filterOutboundMessage(Object msg) throws Exception { throw new UnsupportedOperationException(); } @Override protected void doConnect( SocketAddress remoteAddress, SocketAddress localAddress) throws Exception { throw new UnsupportedOperationException(); } @Override protected SocketAddress remoteAddress0() { return null; } @Override protected void doDisconnect() throws Exception { throw new UnsupportedOperationException(); } @Override protected void setReadPending(boolean readPending) { super.setReadPending(readPending); } }
/* * Copyright (c) 2016 simplity.org * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package org.simplity.kernel.util; import java.io.StringWriter; import java.io.Writer; import java.lang.reflect.Array; import java.util.Collection; import java.util.Date; import java.util.List; import java.util.Map; import org.simplity.json.JSONArray; import org.simplity.json.JSONObject; import org.simplity.json.JSONWriter; import org.simplity.json.Jsonable; import org.simplity.kernel.ApplicationError; import org.simplity.kernel.FilterCondition; import org.simplity.kernel.FormattedMessage; import org.simplity.kernel.Messages; import org.simplity.kernel.Tracer; import org.simplity.kernel.comp.ComponentManager; import org.simplity.kernel.data.DataSheet; import org.simplity.kernel.data.FieldsInterface; import org.simplity.kernel.data.HierarchicalSheet; import org.simplity.kernel.data.MultiRowsSheet; import org.simplity.kernel.dm.Field; import org.simplity.kernel.dt.DataType; import org.simplity.kernel.value.Value; import org.simplity.kernel.value.ValueType; import org.simplity.service.ServiceContext; import org.simplity.service.ServiceProtocol; /** * utilities that help Simplity deal with JSON * * @author simplity.org * */ public class JsonUtil { /** * create a data sheet out of a well-formed json array of simple jsonObject. * * @param arr * that has the json array * @param inputFields * Fields to be input. null if we are to take whatever is offered * @param errors * to which any validation errors are added * @param allFieldsAreOptional * true of we are to consider all fields as optional, even if the * field specifies it as mandatory * @param parentFieldName * if this is a child sheet, specify the column name in this * sheet that should be populated with the parent key value * @param parentValue * if this is a child sheet, and you have specified * parentFieldName, value to be populated in each row for that * column * @return data sheet. Null if no data found or the json is not well * formated. was null. case the array is not well-formed */ public static DataSheet getSheet(JSONArray arr, Field[] inputFields, List<FormattedMessage> errors, boolean allFieldsAreOptional, String parentFieldName, Value parentValue) { if (arr == null || arr.length() == 0) { return null; } Field[] fields = inputFields; int parentIdx = -1; if (fields == null) { /* * we guess the fields based on the attributes of first element in * the array */ JSONObject exampleObject = arr.optJSONObject(0); if (exampleObject == null) { Tracer.trace( "Json array has its first object as null, and hence we abandoned parsing it."); return null; } fields = getFields(exampleObject, null, null); if (parentFieldName != null) { Field[] newFields = new Field[fields.length + 1]; newFields[0] = Field.getDefaultField(parentFieldName, parentValue.getValueType()); int j = 1; for (Field field : fields) { newFields[j] = field; j++; } parentIdx = 0; } } else if (parentFieldName != null) { int j = 0; for (Field field : fields) { if (field.getName().equals(parentFieldName)) { parentIdx = j; break; } j++; } if (parentIdx == -1) { Tracer.trace("Parent field name " + parentFieldName + " not found in the fields list for child. Filed will not be populated from parent sheet."); } } DataSheet ds = new MultiRowsSheet(fields); int nbrRows = arr.length(); /* * let us now extract each row into data sheet */ for (int i = 0; i < nbrRows; i++) { JSONObject obj = arr.optJSONObject(i); if (obj == null) { Tracer.trace("Row " + (i + 1) + " is null. Not extracted"); continue; } int j = 0; Value[] row = new Value[fields.length]; for (Field field : fields) { Object val = obj.opt(field.getName()); if (j == parentIdx) { row[j] = parentValue; } else { row[j] = field.parseObject(val, errors, allFieldsAreOptional, null); } j++; } ds.addRow(row); } return ds; } /** * supplied jsonArray has the parent rows. Extract child rows from these * array elements * * @param arr * @param attName * attribute name that holds the child JSONArray * @param fields * expected fields. Input data is validated as per these field * specifications. * @param errors * @param allFieldsAreOptional * * @return data sheet. Null if no data found. Throws ApplicationError on * case the array is not well-formed */ public static DataSheet getChildSheet(JSONArray arr, String attName, Field[] fields, List<FormattedMessage> errors, boolean allFieldsAreOptional) { /* * arr corresponds to following json. We are to accumulate child rows * across all main rows * * [...,"attName"=[{},{}....],..],[....,"attName"=[{},{}.... ],..].... */ Field[] inputFields = fields; DataSheet ds = null; if (inputFields != null) { ds = new MultiRowsSheet(inputFields); } /* * we are not sure of getting a valid child row in first element. So, * let us have a flexible strategy */ int nbrParentRows = arr.length(); /* * for each parent row */ for (int i = 0; i < nbrParentRows; i++) { JSONObject pr = arr.optJSONObject(i); if (pr == null) { continue; } JSONArray rows = pr.optJSONArray(attName); if (rows == null) { continue; } int n = rows.length(); /* * extract this child row into ds */ for (int idx = 0; idx < n; idx++) { JSONObject obj = rows.optJSONObject(idx); if (obj == null) { continue; } if (ds == null || inputFields == null) { inputFields = getFields(obj, null, null); ds = new MultiRowsSheet(inputFields); } int j = 0; Value[] row = new Value[fields.length]; for (Field field : inputFields) { Object val = obj.opt(field.getName()); row[j] = field.parseObject(val, errors, allFieldsAreOptional, attName); j++; } ds.addRow(row); } } return ds; } /** * write the data sheet to json * * @param writer * @param ds * @param childSheets */ public static void sheetToJson(JSONWriter writer, DataSheet ds, HierarchicalSheet[] childSheets) { int nbrRows = 0; int nbrCols = 0; if (ds != null) { nbrRows = ds.length(); nbrCols = ds.width(); } if (ds == null || nbrRows == 0 || nbrCols == 0) { writer.value(null); Tracer.trace("Sheet has no data. json is not added"); return; } writer.array(); String[] names = ds.getColumnNames(); for (int i = 0; i < nbrRows; i++) { writer.object(); /* * note that getRow() returns values in the same order as in * getColumnNames() */ Value[] row = ds.getRow(i); int j = 0; for (String colName : names) { Value value = row[j]; /* * no need to write null attributes */ if (value != null) { writer.key(colName).value(value.toObject()); } j++; } if (childSheets != null) { for (HierarchicalSheet childSheet : childSheets) { if (childSheet != null) { childSheet.toJson(writer, row); } } } writer.endObject(); } writer.endArray(); } /** * create a data sheet for attributes in this object * * @param obj * @param additionalAtt * @param additionalVal * @return array of fields in this object. additional att/val if supplied * are added as the first one. */ public static Field[] getFields(JSONObject obj, String additionalAtt, Object additionalVal) { String[] names = JSONObject.getNames(obj); int nbrCols = names.length; int fieldIdx = 0; Field[] fields = new Field[nbrCols]; if (additionalAtt != null) { /* * rare case, and hence not-optimized for creation of fields */ nbrCols++; fields = new Field[nbrCols]; Value val = Value.parseObject(additionalVal); fields[fieldIdx] = Field.getDefaultField(additionalAtt, val.getValueType()); fieldIdx = 1; } int nonAtts = 0; for (String colName : names) { Object val = obj.opt(colName); if (val instanceof JSONArray || val instanceof JSONObject) { /* * this is not a att-value. */ nonAtts++; } else { ValueType vt = Value.parseObject(val).getValueType(); fields[fieldIdx] = Field.getDefaultField(colName, vt); fieldIdx++; } } if (nonAtts == 0) { return fields; } /* * this is rare case, and hence we have not optimized the algorithm for * this case. non-primitive attributes would have their valueType set to * null. Copy primitive-ones to a new array. */ nbrCols = nbrCols - nonAtts; Field[] newFields = new Field[nbrCols]; for (int i = 0; i < newFields.length; i++) { newFields[i] = fields[i]; } return newFields; } /** * @param json * @param fields * @param ctx * @param errors * @param allFieldsAreOptional * @return number of fields extracted */ public static int extractFields(JSONObject json, Field[] fields, FieldsInterface ctx, List<FormattedMessage> errors, boolean allFieldsAreOptional) { int result = 0; for (Field field : fields) { Object val = json.opt(field.getName()); Value value = null; if (val != null) { value = field.getValueType().parseObject(val); if (value == null) { errors.add(new FormattedMessage(Messages.INVALID_VALUE, null, field.getName(), null, 0, '\'' + val.toString() + "' is not a valid " + field.getValueType())); continue; } } /* * this is validation, and not exactly parse. */ value = field.parse(value, errors, allFieldsAreOptional, null); if (value != null) { ctx.setValue(field.getName(), value); result++; } } return result; } /** * @param json * @param fields * @param ctx * @param errors * @return number of fields extracted */ public static int extractFilterFields(JSONObject json, Field[] fields, FieldsInterface ctx, List<FormattedMessage> errors) { int result = 0; for (Field field : fields) { result += parseFilter(json, ctx, errors, field.getName(), field.getValueType()); } /* * some additional fields for filter, like sort */ /* * what about sort ? */ String fieldName = ServiceProtocol.SORT_COLUMN_NAME; String textValue = json.optString(fieldName, null); if (textValue != null) { Value value = ComponentManager.getDataType(DataType.ENTITY_LIST) .parseValue(textValue); if (value == null) { errors.add(new FormattedMessage(Messages.INVALID_ENTITY_LIST, null, fieldName, null, 0)); } else { ctx.setValue(fieldName, value); } } fieldName = ServiceProtocol.SORT_ORDER; textValue = json.optString(fieldName, null); if (textValue != null) { textValue = textValue.toLowerCase(); if (textValue.equals(ServiceProtocol.SORT_ORDER_ASC) || textValue.equals(ServiceProtocol.SORT_ORDER_DESC)) { ctx.setValue(fieldName, Value.newTextValue(textValue)); } else { errors.add(new FormattedMessage(Messages.INVALID_SORT_ORDER, null, fieldName, null, 0)); } } return result; } /** * parse input object as a filter field * * @param json * @param extratedFields * to which extracted fields are to be added * @param validationErrors * @param recordName * @return number of fields extracted */ private static int parseFilter(JSONObject json, FieldsInterface extratedFields, List<FormattedMessage> validationErrors, String fieldName, ValueType valueType) { Object obj = json.opt(fieldName); if (obj == null) { return 0; } /* * what is the comparator */ String otherName = fieldName + ServiceProtocol.COMPARATOR_SUFFIX; String otherValue = json.optString(otherName, null); FilterCondition f = FilterCondition.parse(otherValue); /* * filter field need not conform to data-type but it should be of the * same value type, except that IN_LIST is always text */ Value value = FilterCondition.In == f ? ValueType.TEXT.parseObject(obj) : valueType.parseObject(obj); if (value == null) { if (validationErrors != null) { validationErrors.add(new FormattedMessage( Messages.INVALID_VALUE, null, fieldName, null, 0)); } } else { extratedFields.setValue(fieldName, value); } if (f == null) { extratedFields.setValue(otherName, Value.newTextValue(ServiceProtocol.EQUAL)); return 1; } extratedFields.setValue(otherName, Value.newTextValue(otherValue)); if (f != FilterCondition.Between) { return 1; } otherName = fieldName + ServiceProtocol.TO_FIELD_SUFFIX; Object val = json.opt(otherName); value = null; if (val != null) { value = valueType.parseObject(val); } if (value == null) { if (validationErrors != null) { validationErrors.add(new org.simplity.kernel.FormattedMessage( Messages.INVALID_VALUE, null, otherName, null, 0)); } } else { extratedFields.setValue(otherName, value); } return 1; } /** * extract a simple json object (with fields and tables) into service * context * * @param json * @param ctx */ public static void extractAll(JSONObject json, ServiceContext ctx) { for (String key : json.keySet()) { JSONArray arr = json.optJSONArray(key); if (arr != null) { DataSheet sheet = JsonUtil.getSheet(arr, null, null, true, null, null); if (sheet == null) { Tracer.trace("Table " + key + " could not be extracted"); } else { ctx.putDataSheet(key, sheet); Tracer.trace("Table " + key + " extracted with " + sheet.length() + " rows"); } continue; } JSONObject obj = json.optJSONObject(key); if (obj != null) { /* * we do not have a standard for converting data structure. As * of now, we just copy this json */ ctx.setObject(key, obj); Tracer.trace(key + " retained as a JSON into ctx"); continue; } Object val = json.opt(key); Value value = Value.parseObject(val); if (value != null) { ctx.setValue(key, value); Tracer.trace(key + " = " + value + " extracted"); } else { Tracer.trace(key + " = " + val + " is not extracted"); } } } /** * * @param json * @param ctx * @param sheetName * @param parentSheetName */ public static void extractWithNoValidation(JSONObject json, ServiceContext ctx, String sheetName, String parentSheetName) { DataSheet ds = null; String arrName = null; JSONArray arr = null; if (parentSheetName != null) { arr = json.optJSONArray(parentSheetName); if (arr == null) { arrName = parentSheetName; } else { ds = getChildSheet(arr, sheetName, null, null, true); } } else { arr = json.optJSONArray(sheetName); if (arr == null) { arrName = sheetName; } else { ds = JsonUtil.getSheet(arr, null, null, true, null, null); } } if (arr == null) { Tracer.trace("No data found for sheet " + arrName); } else if (ds == null) { Tracer.trace("Sheet " + arrName + " has only null data. Data not extracted"); } else { ctx.putDataSheet(sheetName, ds); } } /** * @param writer * @param fieldNames * @param ctx */ public static void addAttributes(JSONWriter writer, String[] fieldNames, ServiceContext ctx) { for (String fieldName : fieldNames) { Value value = ctx.getValue(fieldName); if (value != null) { if (value.isUnknown() == false) { writer.key(fieldName).value(value.toObject()); } continue; } Object obj = ctx.getObject(fieldName); if (obj != null) { writer.key(fieldName); addObject(writer, obj); } } } /** * write an arbitrary object to json * * @param writer * @param obj */ public static void addObject(JSONWriter writer, Object obj) { if (obj == null) { writer.value(null); return; } if (obj instanceof Jsonable) { ((Jsonable) obj).writeJsonValue(writer); return; } if (obj instanceof String || obj instanceof Number || obj instanceof Boolean || obj instanceof Date || obj instanceof Enum) { writer.value(obj); return; } if (obj.getClass().isArray()) { writer.array(); int n = Array.getLength(obj); for (int i = 0; i < n; i++) { addObject(writer, Array.get(obj, i)); } writer.endArray(); return; } if (obj instanceof Map) { writer.object(); @SuppressWarnings("unchecked") Map<String, Object> childMap = (Map<String, Object>) obj; for (Map.Entry<String, Object> childEntry : childMap.entrySet()) { writer.key(childEntry.getKey()); addObject(writer, childEntry.getValue()); } writer.endObject(); return; } if (obj instanceof Collection) { writer.array(); @SuppressWarnings("unchecked") Collection<Object> children = (Collection<Object>) obj; for (Object child : children) { addObject(writer, child); } writer.endArray(); return; } /* * it is another object */ writer.object(); for (Map.Entry<String, java.lang.reflect.Field> entry : ReflectUtil .getAllFields(obj).entrySet()) { writer.key(entry.getKey()); try { addObject(writer, entry.getValue().get(obj)); } catch (Exception e) { Tracer.trace("Unable to get value for object attribute " + entry.getKey() + ". null assumed"); writer.value(null); } } writer.endObject(); } /** * @param object * to be convert to json * @return json string for the object */ public static String toJson(Object object) { Writer w = new StringWriter(); JSONWriter writer = new JSONWriter(w); addObject(writer, object); return w.toString(); } /** * append the text to string builder duly quoted and escaped as per JSON * standard. * * @param value * to be appended * @param json * to be appended to */ public static void appendQoutedText(String value, StringBuilder json) { if (value == null || value.length() == 0) { json.append("\"\""); return; } char lastChar = 0; String hhhh; json.append('"'); for (char c : value.toCharArray()) { switch (c) { case '\\': case '"': json.append('\\'); json.append(c); break; case '/': if (lastChar == '<') { json.append('\\'); } json.append(c); break; case '\b': json.append("\\b"); break; case '\t': json.append("\\t"); break; case '\n': json.append("\\n"); break; case '\f': json.append("\\f"); break; case '\r': json.append("\\r"); break; default: if (c < ' ' || (c >= '\u0080' && c < '\u00a0') || (c >= '\u2000' && c < '\u2100')) { json.append("\\u"); hhhh = Integer.toHexString(c); json.append("0000", 0, 4 - hhhh.length()); json.append(hhhh); } else { json.append(c); } } lastChar = c; } json.append('"'); } /** * convert a JSON array to array of primitive objects. * * @param array * json Array * @return array of primitives, or null in case any of the array element is * not primitive */ public static Object[] toObjectArray(JSONArray array) { Object[] result = new Object[array.length()]; for (int i = 0; i < result.length; i++) { Object obj = array.get(i); if (obj == null) { continue; } if (obj instanceof JSONObject || obj instanceof JSONArray) { Tracer.trace("Element no (zero based) " + i + " is not a primitive, and hence unable to convert the JSONArray into an array of primitives"); return null; } result[i] = obj; } return result; } /** * get value of a qualified field name down the json object structure. * * @param fieldSelector * can be of the form a.b.c.. where each part can be int (for * array index) or name (for attribute). * @param json * Should be either JSONObject or JSONArray * @return attribute value as per the tree. null if not found. * @throws ApplicationError * in case the fieldName pattern and the JSONObject structure * are not in synch. * */ public static Object getValue(String fieldSelector, Object json) { return getValueWorker(fieldSelector, json, 0); } /** * common worker method to go down the object as per selector * * @param fieldSelector * @param json * @param option * * <pre> * 0 means do not create/add anything. return null if anything is not found * 1 means create, add and return a JSON object at the end if it is missing * 2 means create, add and return a JSON array at the end if it is missing * </pre> * * @return */ private static Object getValueWorker(String fieldSelector, Object json, int option) { /* * be considerate for careless-callers.. */ if (fieldSelector == null || fieldSelector.isEmpty()) { Tracer.trace("Null/empty selector for get/setValue"); if (option == 0) { return null; } if (option == 1) { return new JSONObject(); } return new JSONArray(); } /* * special case that indicates root object itself */ if (fieldSelector.charAt(0) == '.') { return json; } String[] parts = fieldSelector.split("\\."); Object result = json; int lastPartIdx = parts.length - 1; try { for (int i = 0; i < parts.length; i++) { String part = parts[i]; part = part.trim(); if (part.isEmpty()) { throw new ApplicationError(fieldSelector + " is malformed for a qualified json field name."); } int idx = parseIdx(part); Object child = null; JSONObject resultObj = null; JSONArray resultArr = null; if (result instanceof JSONObject) { if (idx != -1) { throw new ApplicationError(fieldSelector + " is not an appropriate selector. We encountered a non-object for attribute " + part); } resultObj = (JSONObject) result; child = resultObj.opt(part); } else if (result instanceof JSONArray) { if (idx == -1) { throw new ApplicationError(fieldSelector + " is not an appropriate selector. We encountered a object when we were expecting an array for index " + idx); } resultArr = (JSONArray) result; child = resultArr.opt(idx); } else { throw new ApplicationError(fieldSelector + " is not an appropriate selector as we encountered a non-object on the path."); } if (child != null) { result = child; continue; } if (option == 0) { /* * no provisioning. get out of here. */ return null; } /* * we create an array or an object and add it to the object. */ boolean goForObject = option == 1; if (i < lastPartIdx) { /* * If next part is attribute, then we create an object, else * an array */ goForObject = parseIdx(parts[i + 1]) == -1; } if (goForObject) { child = new JSONObject(); } else { child = new JSONArray(); } if (resultObj != null) { resultObj.put(part, child); } else if (resultArr != null) { // we have put else-if to calm down the lint!! resultArr.put(idx, child); } result = child; } return result; } catch (NumberFormatException e) { throw new ApplicationError(fieldSelector + " is malformed for a qualified json field name."); } catch (ClassCastException e) { throw new ApplicationError(fieldSelector + " is used as an attribute-selector for a test case, but the json does not have the right structure for this pattern."); } catch (ApplicationError e) { throw e; } catch (Exception e) { throw new ApplicationError(e, "Error while getting value for field " + fieldSelector); } } /** * set value to json as per selector, creating object/array on the path if * required. This is like creating a file with full path. * * @param fieldSelector * @param json * @param value */ public static void setValueWorker(String fieldSelector, Object json, Object value) { /* * special case of root object itself */ if (fieldSelector.equals(".")) { if (value instanceof JSONObject == false || json instanceof JSONObject == false) { Tracer.trace( "We expected a JSONObjects for source and destination, but got " + json.getClass().getName() + " as object, and " + (value == null ? "null" : value.getClass().getName()) + " as value"); return; } JSONObject objFrom = (JSONObject) value; JSONObject objTo = (JSONObject) json; for (String attName : objFrom.keySet()) { objTo.put(attName, objFrom.opt(attName)); } return; } String attName = fieldSelector; Object leafObject = json; /* * assume that the value is to be added as an attribute, not an element * of array. */ int objIdx = -1; int idx = fieldSelector.lastIndexOf('.'); if (idx != -1) { attName = fieldSelector.substring(idx + 1); String selector = fieldSelector.substring(0, idx); objIdx = parseIdx(attName); int option = objIdx == -1 ? 1 : 2; leafObject = getValueWorker(selector, json, option); } if (objIdx == -1) { ((JSONObject) leafObject).put(attName, value); } else { ((JSONArray) leafObject).put(objIdx, value); } return; } /** * parse string into int, or return -1; * * @param str * @return */ private static int parseIdx(String str) { char c = str.charAt(0); if (c >= '0' && c <= '9') { return Integer.parseInt(str); } return -1; } /** * @param itemSelector * @param json * @return object as per selector. A new JSON Object is added and returned * if the json does not have a value as per selector, adding as many * object/array on the path if required */ public static Object getObjectValue(String itemSelector, JSONObject json) { return getValueWorker(itemSelector, json, 1); } /** * @param itemSelector * @param json * @return object as per selector. A new JSON array is added and returned if * the json does not have a value as per selector, adding as many * object/array on the path if required */ public static Object getArrayValue(String itemSelector, JSONObject json) { return getValueWorker(itemSelector, json, 2); } }
/* * Copyright (C) Lenon, Bacister (BackUP) * * 2016 Watudu: An Event Sharing Android Mobile App * *This is a course requirement for CS 192 Software Engineering II *under the supervision of Asst. Prof. Ma. Rowena C. Solamo of the *Department of Computer Science, College of Engineering, University *of the Philippines, Diliman for the AY 2015-2016. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://github.com/alea-kim/event-sharing-app * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * DEVELOPED BY: BACK UP * CLIENT: MAAM ANNETTE (ITDC) * Main activity is used as the homepage of the app. It is used to search for the events. */ package com.eventsharing.watudu; import android.content.Intent; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.util.Log; import android.view.View; import android.view.Window; import android.widget.Button; import android.widget.EditText; import android.widget.ImageButton; import android.widget.TextView; import android.widget.Toast; import java.io.FileInputStream; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.List; public class MainActivity extends AppCompatActivity implements View.OnClickListener{ Button bsearch, bsignup, bsignin; String[] name = new String[1]; String[] desc = new String[1]; String[] images = new String[1]; List<String> where = new ArrayList<String>(); List<String> where2 = new ArrayList<String>(); List<String> where3 = new ArrayList<String>(); ImageButton imageButton; EditText etSpace; TextView tvloginlink; //tvprofile; @Override protected void onCreate(Bundle savedInstanceState) { requestWindowFeature(Window.FEATURE_NO_TITLE); super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); etSpace = (EditText) findViewById(R.id.etSpace); bsearch = (Button) findViewById(R.id.bsearch); bsignin = (Button) findViewById(R.id.bsignin); bsignup = (Button) findViewById(R.id.bsignup); // imageButton = (ImageButton) findViewById(R.id.imageButton); bsearch.setOnClickListener(this); bsignin.setOnClickListener(this); bsignup.setOnClickListener(this); // imageButton.setOnClickListener(this); } @Override public void onBackPressed() { // Disable going back to the MainActivity moveTaskToBack(true); } @Override /* if button clicked this will take place*/ /*this switch gets the ID of the view which notified the * onClick button and then the cases will redirects where it will be going. * * * StartActivity means that this is the next tab that will be displayed * after clicking the button.*/ public void onClick(View v) { switch(v.getId()){ /* will notify the onclick methos*/ case R.id.bsearch: if(etSpace.getText().toString().equals("")) { Intent i = new Intent(this, EVENTS.class); startActivity(i); break; } try { FileInputStream fileIn=openFileInput("event.txt"); InputStreamReader InputRead= new InputStreamReader(fileIn); char[] inputBuffer= new char[1]; String s = ""; int charRead; while ((charRead = InputRead.read(inputBuffer)) > 0) { // char to string conversion String readstring = String.copyValueOf(inputBuffer, 0, charRead); if(readstring.equals("{")){ s = ""; } else if(readstring.equals("}")){ where.add(s); s = ""; } else if(readstring.equals("`")){ where2.add(s); s = ""; } else{ s += readstring; } } name = new String[where.size()]; where.toArray(name); desc = new String[where2.size()]; where2.toArray(desc); InputRead.close(); //Toast.makeText(getBaseContext(), s, Toast.LENGTH_SHORT).show(); } catch (Exception e) { e.printStackTrace(); } //GETTING IMAGES try { FileInputStream fileIn=openFileInput("images.txt"); InputStreamReader InputRead= new InputStreamReader(fileIn); char[] inputBuffer= new char[1]; String s=""; int charRead; while ((charRead = InputRead.read(inputBuffer)) > 0) { // char to string conversion String readstring = String.copyValueOf(inputBuffer, 0, charRead); if(readstring.equals("\n")){ where3.add(s); s = ""; } else{ s += readstring; } Log.d("readstring be like...", readstring); Log.d("size...", Integer.toString(where.size())); } images = new String[where3.size()]; where3.toArray(images); InputRead.close(); } catch (Exception e) { e.printStackTrace(); } int index = -1; for(int m = 0; m < name.length; m++){ Log.d("read this...", etSpace.getText().toString()); Log.d("read this2...", name[m]); if(etSpace.getText().toString().equals(name[m])){ index = m; Log.d("read this3...", Integer.toString(index)); break; } } if(index == -1){ Toast.makeText(getBaseContext(), "NO EVENT FOUND.", Toast.LENGTH_SHORT).show(); } else{ String one = name[index]; String two = desc[index]; String three = images[index]; Intent i = new Intent(this, EVENTDESC.class); i.putExtra("Title", one); i.putExtra("Desc", two); i.putExtra("Img", three); startActivity(i); } break; case R.id.bsignin: startActivity(new Intent(this, ActivityLogin.class)); break; case R.id.bsignup: startActivity(new Intent(this, ActivityRegister.class)); break; // case R.id.imageButton: // startActivity(new Intent(this, ActivityLogin.class)); // break; } } }
// // A Structured Logger for Fluent // // Copyright (C) 2011 - 2013 Muga Nishizawa // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package org.fluentd.logger.sender; import org.fluentd.logger.errorhandler.ErrorHandler; import org.msgpack.MessagePack; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.BufferedOutputStream; import java.io.IOException; import java.net.InetSocketAddress; import java.net.Socket; import java.nio.ByteBuffer; import java.util.Map; public class RawSocketSender implements Sender { private static final Logger LOG = LoggerFactory.getLogger(RawSocketSender.class); private static final ErrorHandler DEFAULT_ERROR_HANDLER = new ErrorHandler() {}; private MessagePack msgpack; private Socket socket; private int timeout; private BufferedOutputStream out; private ByteBuffer pendings; private Reconnector reconnector; private String name; private final String host; private final int port; private ErrorHandler errorHandler = DEFAULT_ERROR_HANDLER; public RawSocketSender() { this("localhost", 24224); } public RawSocketSender(String host, int port) { this(host, port, 3 * 1000, 8 * 1024 * 1024); } public RawSocketSender(String host, int port, int timeout, int bufferCapacity) { this(host, port, timeout, bufferCapacity, new ExponentialDelayReconnector()); } public RawSocketSender(String host, int port, int timeout, int bufferCapacity, Reconnector reconnector) { msgpack = new MessagePack(); msgpack.register(Event.class, Event.EventTemplate.INSTANCE); pendings = ByteBuffer.allocate(bufferCapacity); this.host = host; this.port = port; this.reconnector = reconnector; name = String.format("%s_%d_%d_%d", host, port, timeout, bufferCapacity); this.timeout = timeout; } private void connect() throws IOException { try { socket = new Socket(); socket.connect(new InetSocketAddress(host, port), timeout); out = new BufferedOutputStream(socket.getOutputStream()); } catch (IOException e) { throw e; } } private void reconnect() throws IOException { if (socket == null) { connect(); } else if (socket.isClosed() || (!socket.isConnected())) { close(); connect(); } } @Override public synchronized void close() { // close output stream if (out != null) { try { out.close(); } catch (IOException e) { // ignore } finally { out = null; } } // close socket if (socket != null) { try { socket.close(); } catch (IOException e) { // ignore } finally { socket = null; } } } @Override public boolean emit(String tag, Map<String, Object> data) { return emit(tag, System.currentTimeMillis() / 1000, data); } @Override public boolean emit(String tag, long timestamp, Map<String, Object> data) { return emit(new Event(tag, timestamp, data)); } protected boolean emit(Event event) { if (LOG.isTraceEnabled()) { LOG.trace(String.format("Created %s", new Object[]{event})); } byte[] bytes = null; try { // serialize tag, timestamp and data bytes = msgpack.write(event); } catch (IOException e) { LOG.error("Cannot serialize event: " + event, e); return false; } // send serialized data return send(bytes); } private boolean flushBuffer() { if (reconnector.enableReconnection(System.currentTimeMillis())) { flush(); if (pendings.position() == 0) { return true; } else { LOG.error("Cannot send logs to " + socket.getInetAddress().toString()); } } return false; } private synchronized boolean send(byte[] bytes) { // buffering if (pendings.position() + bytes.length > pendings.capacity()) { if (!flushBuffer()) { return false; } if (bytes.length > pendings.remaining()) { LOG.error("Log data {} larger than remaining buffer size {}", bytes.length, pendings.remaining()); return false; } } pendings.put(bytes); // suppress reconnection burst if (!reconnector.enableReconnection(System.currentTimeMillis())) { return true; } // send pending data flush(); return true; } @Override public synchronized void flush() { try { // check whether connection is established or not reconnect(); // write data out.write(getBuffer()); out.flush(); clearBuffer(); reconnector.clearErrorHistory(); } catch (IOException e) { try { errorHandler.handleNetworkError(e); } catch (Exception handlerException) { LOG.warn("ErrorHandler.handleNetworkError failed", handlerException); } LOG.error(this.getClass().getName(), "flush", e); reconnector.addErrorHistory(System.currentTimeMillis()); close(); } } synchronized byte[] getBuffer() { int len = pendings.position(); pendings.position(0); byte[] ret = new byte[len]; pendings.get(ret, 0, len); return ret; } private void clearBuffer() { pendings.clear(); } @Override public String getName() { return name; } @Override public String toString() { return getName(); } @Override public boolean isConnected() { return socket != null && !socket.isClosed() && socket.isConnected() && !socket.isOutputShutdown(); } @Override public void setErrorHandler(ErrorHandler errorHandler) { if (errorHandler == null) { throw new IllegalArgumentException("errorHandler is null"); } this.errorHandler = errorHandler; } @Override public void removeErrorHandler() { this.errorHandler = DEFAULT_ERROR_HANDLER; } }
/* * #%L * OW2 Chameleon - Fuchsia Framework * %% * Copyright (C) 2009 - 2014 OW2 Chameleon * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /* Calimero - A library for KNX network access Copyright (C) 2006-2008 B. Malinowsky This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or at your option any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Linking this library statically or dynamically with other modules is making a combined work based on this library. Thus, the terms and conditions of the GNU General Public License cover the whole combination. As a special exception, the copyright holders of this library give you permission to link this library with independent modules to produce an executable, regardless of the license terms of these independent modules, and to copy and distribute the resulting executable under terms of your choice, provided that you also meet, for each linked independent module, the terms and conditions of the license of that module. An independent module is a module which is not derived from or based on this library. If you modify this library, you may extend this exception to your version of the library, but you are not obligated to do so. If you do not wish to do so, delete this exception statement from your version. */ package tuwien.auto.calimero.link; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.UnknownHostException; import tuwien.auto.calimero.CloseEvent; import tuwien.auto.calimero.FrameEvent; import tuwien.auto.calimero.cemi.CEMIBusMon; import tuwien.auto.calimero.exception.KNXException; import tuwien.auto.calimero.exception.KNXFormatException; import tuwien.auto.calimero.exception.KNXIllegalArgumentException; import tuwien.auto.calimero.knxnetip.KNXnetIPConnection; import tuwien.auto.calimero.knxnetip.KNXnetIPTunnel; import tuwien.auto.calimero.link.event.LinkListener; import tuwien.auto.calimero.link.event.MonitorFrameEvent; import tuwien.auto.calimero.link.medium.KNXMediumSettings; import tuwien.auto.calimero.link.medium.RawFrame; import tuwien.auto.calimero.link.medium.RawFrameFactory; import tuwien.auto.calimero.log.LogManager; import tuwien.auto.calimero.log.LogService; /** * Implementation of the KNX network monitor link based on the KNXnet/IP protocol, using a * {@link KNXnetIPConnection}. * <p> * Once a monitor has been closed, it is not available for further link communication, * i.e. it can't be reopened. * <p> * Pay attention to the IP address consideration stated in the documentation comments of * class {@link KNXNetworkLinkIP}. * * @author B. Malinowsky */ public class KNXNetworkMonitorIP implements KNXNetworkMonitor { private static final class MonitorNotifier extends EventNotifier { volatile boolean decode; MonitorNotifier(Object source, LogService logger) { super(source, logger); } public void frameReceived(FrameEvent e) { final int mc = e.getFrame().getMessageCode(); if (mc == CEMIBusMon.MC_BUSMON_IND) { RawFrame raw = null; logger.info("received monitor indication"); if (decode) try { final short m = ((KNXNetworkMonitorIP) source).medium.getMedium(); raw = RawFrameFactory.create(m, e.getFrame().getPayload(), 0); } catch (final KNXFormatException ex) { logger.error("decoding raw frame", ex); } addEvent(new Indication(new MonitorFrameEvent(source, e.getFrame(), raw))); } else logger.warn("unspecified frame event - ignored, msg code = 0x" + Integer.toHexString(mc)); } public void connectionClosed(CloseEvent e) { ((KNXNetworkMonitorIP) source).closed = true; super.connectionClosed(e); logger.info("monitor closed"); LogManager.getManager().removeLogService(logger.getName()); } }; private volatile boolean closed; private final KNXnetIPConnection conn; private KNXMediumSettings medium; private final LogService logger; // our link connection event notifier private final MonitorNotifier notifier; /** * Creates a new network monitor based on the KNXnet/IP protocol for accessing the KNX * network. * <p> * * @param localEP the local endpoint to use for the link, this is the client control * endpoint, use <code>null</code> for the default local host and an * ephemeral port number * @param remoteEP the remote endpoint of the link; this is the server control * endpoint * @param useNAT <code>true</code> to use network address translation in the * KNXnet/IP protocol, <code>false</code> to use the default (non aware) mode * @param settings medium settings defining the specific KNX medium needed for * decoding raw frames received from the KNX network * @throws KNXException on failure establishing the link */ public KNXNetworkMonitorIP(InetSocketAddress localEP, InetSocketAddress remoteEP, boolean useNAT, KNXMediumSettings settings) throws KNXException { InetSocketAddress ep = localEP; if (ep == null) try { ep = new InetSocketAddress(InetAddress.getLocalHost(), 0); } catch (final UnknownHostException e) { throw new KNXException("no local host available"); } conn = new KNXnetIPTunnel(KNXnetIPTunnel.BUSMONITOR_LAYER, ep, remoteEP, useNAT); logger = LogManager.getManager().getLogService(getName()); logger.info("in busmonitor mode - ready to receive"); notifier = new MonitorNotifier(this, logger); conn.addConnectionListener(notifier); // configure KNX medium stuff setKNXMedium(settings); } /* (non-Javadoc) * @see tuwien.auto.calimero.link.KNXNetworkMonitor#setKNXMedium * (tuwien.auto.calimero.link.medium.KNXMediumSettings) */ public void setKNXMedium(KNXMediumSettings settings) { if (settings == null) throw new KNXIllegalArgumentException("medium settings are mandatory"); if (medium != null && !settings.getClass().isAssignableFrom(medium.getClass()) && !medium.getClass().isAssignableFrom(settings.getClass())) throw new KNXIllegalArgumentException("medium differs"); medium = settings; } /* (non-Javadoc) * @see tuwien.auto.calimero.link.KNXNetworkMonitor#getKNXMedium() */ public KNXMediumSettings getKNXMedium() { return medium; } /* (non-Javadoc) * @see tuwien.auto.calimero.link.KNXNetworkMonitor#addMonitorListener * (tuwien.auto.calimero.link.event.LinkListener) */ public void addMonitorListener(LinkListener l) { notifier.addListener(l); } /* (non-Javadoc) * @see tuwien.auto.calimero.link.KNXNetworkMonitor#removeMonitorListener * (tuwien.auto.calimero.link.event.LinkListener) */ public void removeMonitorListener(LinkListener l) { notifier.removeListener(l); } /* (non-Javadoc) * @see tuwien.auto.calimero.link.KNXNetworkMonitor#setDecodeRawFrames(boolean) */ public void setDecodeRawFrames(boolean decode) { notifier.decode = decode; logger.info((decode ? "enable" : "disable") + " decoding of raw frames"); } /** * {@inheritDoc}<br> * The returned name is "monitor " + remote IP address of the control endpoint + ":" + * remote port used by the monitor. */ public String getName() { // do our own IP:port string, since InetAddress.toString() always prepends a '/' final InetSocketAddress a = conn.getRemoteAddress(); return "monitor " + a.getAddress().getHostAddress() + ":" + a.getPort(); } /* (non-Javadoc) * @see tuwien.auto.calimero.link.KNXNetworkMonitor#isOpen() */ public boolean isOpen() { return !closed; } /* (non-Javadoc) * @see tuwien.auto.calimero.link.KNXNetworkMonitor#close() */ public void close() { synchronized (this) { if (closed) return; closed = true; } conn.close(); notifier.quit(); } /* (non-Javadoc) * @see java.lang.Object#toString() */ public String toString() { return getName() + (closed ? " (closed), " : ", ") + medium.getMediumString() + " medium" + (notifier.decode ? ", decode raw frames" : ""); } }
/* * Copyright 2019 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.spanner.connection; import com.google.cloud.ByteArray; import com.google.cloud.Date; import com.google.cloud.Timestamp; import com.google.cloud.spanner.ResultSet; import com.google.cloud.spanner.SpannerException; import com.google.cloud.spanner.Struct; import com.google.cloud.spanner.Type; import com.google.cloud.spanner.Value; import com.google.common.base.Preconditions; import com.google.spanner.v1.ResultSetStats; import java.math.BigDecimal; import java.util.List; /** * {@link ResultSet} implementation used by the Spanner connection API to ensure that the query for * a {@link ResultSet} is executed directly when it is created. This is done by calling {@link * ResultSet#next()} directly after creation. This ensures that a statement timeout can be applied * to the actual query execution. It also ensures that any invalid query will throw an exception at * execution instead of the first next() call by a client. */ class DirectExecuteResultSet implements ResultSet { private static final String MISSING_NEXT_CALL = "Must be preceded by a next() call"; private final ResultSet delegate; private boolean nextCalledByClient = false; private final boolean initialNextResult; private boolean nextHasReturnedFalse = false; /** * Creates a new {@link DirectExecuteResultSet} from the given delegate {@link ResultSet}. This * automatically executes the query of the given delegate {@link ResultSet} by calling next() on * the delegate. The delegate must not have been used (i.e. next() must not have been called on * it). * * @param delegate The underlying {@link ResultSet} for this {@link DirectExecuteResultSet}. * @return a {@link DirectExecuteResultSet} that has already executed the query associated with * the delegate {@link ResultSet}. */ static DirectExecuteResultSet ofResultSet(ResultSet delegate) { return new DirectExecuteResultSet(delegate); } DirectExecuteResultSet(ResultSet delegate) { Preconditions.checkNotNull(delegate); this.delegate = delegate; initialNextResult = delegate.next(); } @Override public boolean next() throws SpannerException { if (nextCalledByClient) { boolean res = delegate.next(); nextHasReturnedFalse = !res; return res; } nextCalledByClient = true; nextHasReturnedFalse = !initialNextResult; return initialNextResult; } @Override public Struct getCurrentRowAsStruct() { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getCurrentRowAsStruct(); } @Override public void close() { delegate.close(); } @Override public ResultSetStats getStats() { if (nextHasReturnedFalse) { return delegate.getStats(); } return null; } @Override public Type getType() { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getType(); } @Override public int getColumnCount() { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getColumnCount(); } @Override public int getColumnIndex(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getColumnIndex(columnName); } @Override public Type getColumnType(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getColumnType(columnIndex); } @Override public Type getColumnType(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getColumnType(columnName); } @Override public boolean isNull(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.isNull(columnIndex); } @Override public boolean isNull(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.isNull(columnName); } @Override public boolean getBoolean(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getBoolean(columnIndex); } @Override public boolean getBoolean(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getBoolean(columnName); } @Override public long getLong(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getLong(columnIndex); } @Override public long getLong(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getLong(columnName); } @Override public double getDouble(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getDouble(columnIndex); } @Override public BigDecimal getBigDecimal(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getBigDecimal(columnName); } @Override public BigDecimal getBigDecimal(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getBigDecimal(columnIndex); } @Override public double getDouble(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getDouble(columnName); } @Override public String getString(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getString(columnIndex); } @Override public String getString(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getString(columnName); } @Override public String getJson(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getJson(columnIndex); } @Override public String getJson(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getJson(columnName); } @Override public ByteArray getBytes(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getBytes(columnIndex); } @Override public ByteArray getBytes(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getBytes(columnName); } @Override public Timestamp getTimestamp(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getTimestamp(columnIndex); } @Override public Timestamp getTimestamp(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getTimestamp(columnName); } @Override public Date getDate(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getDate(columnIndex); } @Override public Date getDate(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getDate(columnName); } @Override public Value getValue(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getValue(columnIndex); } @Override public Value getValue(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getValue(columnName); } @Override public boolean[] getBooleanArray(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getBooleanArray(columnIndex); } @Override public boolean[] getBooleanArray(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getBooleanArray(columnName); } @Override public List<Boolean> getBooleanList(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getBooleanList(columnIndex); } @Override public List<Boolean> getBooleanList(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getBooleanList(columnName); } @Override public long[] getLongArray(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getLongArray(columnIndex); } @Override public long[] getLongArray(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getLongArray(columnName); } @Override public List<Long> getLongList(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getLongList(columnIndex); } @Override public List<Long> getLongList(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getLongList(columnName); } @Override public double[] getDoubleArray(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getDoubleArray(columnIndex); } @Override public double[] getDoubleArray(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getDoubleArray(columnName); } @Override public List<Double> getDoubleList(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getDoubleList(columnIndex); } @Override public List<Double> getDoubleList(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getDoubleList(columnName); } @Override public List<BigDecimal> getBigDecimalList(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getBigDecimalList(columnIndex); } @Override public List<BigDecimal> getBigDecimalList(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getBigDecimalList(columnName); } @Override public List<String> getStringList(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getStringList(columnIndex); } @Override public List<String> getStringList(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getStringList(columnName); } @Override public List<String> getJsonList(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getJsonList(columnIndex); } @Override public List<String> getJsonList(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getJsonList(columnName); } @Override public List<ByteArray> getBytesList(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getBytesList(columnIndex); } @Override public List<ByteArray> getBytesList(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getBytesList(columnName); } @Override public List<Timestamp> getTimestampList(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getTimestampList(columnIndex); } @Override public List<Timestamp> getTimestampList(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getTimestampList(columnName); } @Override public List<Date> getDateList(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getDateList(columnIndex); } @Override public List<Date> getDateList(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getDateList(columnName); } @Override public List<Struct> getStructList(int columnIndex) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getStructList(columnIndex); } @Override public List<Struct> getStructList(String columnName) { Preconditions.checkState(nextCalledByClient, MISSING_NEXT_CALL); return delegate.getStructList(columnName); } @Override public boolean equals(Object o) { if (!(o instanceof DirectExecuteResultSet)) { return false; } return ((DirectExecuteResultSet) o).delegate.equals(delegate); } @Override public int hashCode() { return delegate.hashCode(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.jackrabbit.oak.plugins.document; import java.io.IOException; import java.util.List; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import org.apache.jackrabbit.oak.api.CommitFailedException; import org.apache.jackrabbit.oak.plugins.document.DocumentStoreFixture.RDBFixture; import org.apache.jackrabbit.oak.spi.commit.CommitInfo; import org.apache.jackrabbit.oak.spi.commit.EmptyHook; import org.apache.jackrabbit.oak.spi.state.NodeBuilder; import org.apache.jackrabbit.oak.stats.Clock; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import static org.apache.jackrabbit.oak.plugins.document.Collection.NODES; import static org.apache.jackrabbit.oak.plugins.document.util.Utils.getIdFromPath; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @RunWith(Parameterized.class) public class LastRevRecoveryAgentTest { private final DocumentStoreFixture fixture; private DocumentNodeStore ds1; private DocumentNodeStore ds2; private int c1Id; private int c2Id; private DocumentStore sharedStore; private Clock clock; public LastRevRecoveryAgentTest(DocumentStoreFixture fixture) { this.fixture = fixture; } //----------------------------------------< Set Up > @Parameterized.Parameters public static java.util.Collection<Object[]> fixtures() throws IOException { List<Object[]> fixtures = Lists.newArrayList(); fixtures.add(new Object[] {new DocumentStoreFixture.MemoryFixture()}); DocumentStoreFixture rdb = new RDBFixture("RDB-H2(file)", "jdbc:h2:file:./target/ds-test", "sa", ""); if (rdb.isAvailable()) { fixtures.add(new Object[] { rdb }); } DocumentStoreFixture mongo = new DocumentStoreFixture.MongoFixture(); if (mongo.isAvailable()) { fixtures.add(new Object[] { mongo }); } return fixtures; } @Before public void setUp() throws InterruptedException { clock = new Clock.Virtual(); //Quite a bit of logic relies on timestamp converted // to 5 sec resolutions clock.waitUntil(System.currentTimeMillis()); ClusterNodeInfo.setClock(clock); Revision.setClock(clock); sharedStore = fixture.createDocumentStore(); DocumentStoreWrapper store = new DocumentStoreWrapper(sharedStore) { @Override public void dispose() { // do not dispose when called by DocumentNodeStore } }; ds1 = new DocumentMK.Builder() .setAsyncDelay(0) .clock(clock) .setDocumentStore(store) .setLeaseCheck(false) .setClusterId(1) .getNodeStore(); c1Id = ds1.getClusterId(); ds2 = new DocumentMK.Builder() .setAsyncDelay(0) .clock(clock) .setDocumentStore(store) .setLeaseCheck(false) .setClusterId(2) .getNodeStore(); c2Id = ds2.getClusterId(); } @After public void tearDown() throws Exception { ds1.dispose(); ds2.dispose(); sharedStore.dispose(); fixture.dispose(); ClusterNodeInfo.resetClockToDefault(); Revision.resetClockToDefault(); } //~------------------------------------------< Test Case > @Test public void testIsRecoveryRequired() throws Exception{ //1. Create base structure /x/y NodeBuilder b1 = ds1.getRoot().builder(); b1.child("x").child("y"); ds1.merge(b1, EmptyHook.INSTANCE, CommitInfo.EMPTY); ds1.runBackgroundOperations(); ds2.runBackgroundOperations(); //2. Add a new node /x/y/z in C2 NodeBuilder b2 = ds2.getRoot().builder(); b2.child("x").child("y").child("z").setProperty("foo", "bar"); ds2.merge(b2, EmptyHook.INSTANCE, CommitInfo.EMPTY); Revision zlastRev2 = ds2.getHeadRevision().getRevision(ds2.getClusterId()); long leaseTime = ds1.getClusterInfo().getLeaseTime(); ds1.runBackgroundOperations(); clock.waitUntil(clock.getTime() + leaseTime + 10); //Renew the lease for C1 ds1.getClusterInfo().renewLease(); assertTrue(ds1.getLastRevRecoveryAgent().isRecoveryNeeded()); Iterable<Integer> cids = ds1.getLastRevRecoveryAgent().getRecoveryCandidateNodes(); assertEquals(1, Iterables.size(cids)); assertEquals(c2Id, Iterables.get(cids, 0).intValue()); ds1.getLastRevRecoveryAgent().recover(Iterables.get(cids, 0)); assertEquals(zlastRev2, getDocument(ds1, "/x/y").getLastRev().get(c2Id)); assertEquals(zlastRev2, getDocument(ds1, "/x").getLastRev().get(c2Id)); assertEquals(zlastRev2, getDocument(ds1, "/").getLastRev().get(c2Id)); } //OAK-5337 @Test public void testSelfRecovery() throws Exception{ //1. Create base structure /x/y NodeBuilder b1 = ds1.getRoot().builder(); b1.child("x").child("y"); merge(ds1, b1); ds1.runBackgroundOperations(); //2. Add a new node /x/y/z in C1 b1 = ds1.getRoot().builder(); b1.child("x").child("y").child("z"); merge(ds1, b1); long leaseTime = ds1.getClusterInfo().getLeaseTime(); clock.waitUntil(clock.getTime() + leaseTime + 10); //Renew the lease for C2 ds2.getClusterInfo().renewLease(); //C1 needs recovery from lease timeout pov assertTrue(ds1.getLastRevRecoveryAgent().isRecoveryNeeded()); Iterable<Integer> cids = ds1.getLastRevRecoveryAgent().getRecoveryCandidateNodes(); //.. but, it won't be returned while we iterate candidate nodes from self assertEquals(0, Iterables.size(cids)); cids = ds2.getLastRevRecoveryAgent().getRecoveryCandidateNodes(); //... checking that from other node still reports assertEquals(1, Iterables.size(cids)); assertEquals(c1Id, Iterables.get(cids, 0).intValue()); ds2.runBackgroundOperations(); assertFalse(ds2.getRoot().getChildNode("x").getChildNode("y").hasChildNode("z")); // yet, calling recover with self-cluster-id still works (useful for startup LRRA) ds1.getLastRevRecoveryAgent().recover(Iterables.get(cids, 0)); ds2.runBackgroundOperations(); assertTrue(ds2.getRoot().getChildNode("x").getChildNode("y").hasChildNode("z")); } @Test public void testRepeatedRecovery() throws Exception { //1. Create base structure /x/y NodeBuilder b1 = ds1.getRoot().builder(); b1.child("x").child("y"); ds1.merge(b1, EmptyHook.INSTANCE, CommitInfo.EMPTY); ds1.runBackgroundOperations(); ds2.runBackgroundOperations(); //2. Add a new node /x/y/z in C2 NodeBuilder b2 = ds2.getRoot().builder(); b2.child("x").child("y").child("z").setProperty("foo", "bar"); ds2.merge(b2, EmptyHook.INSTANCE, CommitInfo.EMPTY); NodeDocument z1 = getDocument(ds1, "/x/y/z"); Revision zlastRev2 = z1.getLastRev().get(c2Id); long leaseTime = ds1.getClusterInfo().getLeaseTime(); ds1.runBackgroundOperations(); clock.waitUntil(clock.getTime() + leaseTime + 10); //Renew the lease for C1 ds1.getClusterInfo().renewLease(); assertTrue(ds1.getLastRevRecoveryAgent().isRecoveryNeeded()); ds1.getLastRevRecoveryAgent().performRecoveryIfNeeded(); assertFalse(ds1.getLastRevRecoveryAgent().isRecoveryNeeded()); } @Test public void recoveryOfModifiedDocument() throws Exception { // do not retry merges ds1.setMaxBackOffMillis(0); ds2.setMaxBackOffMillis(0); NodeBuilder b1 = ds1.getRoot().builder(); b1.child("x").child("y").setProperty("p", "v1"); merge(ds1, b1); ds1.runBackgroundOperations(); ds2.runBackgroundOperations(); NodeBuilder b2 = ds2.getRoot().builder(); b2.child("x").child("y").setProperty("p", "v2"); merge(ds2, b2); // simulate a crash of ds2 long leaseTime = ds2.getClusterInfo().getLeaseTime(); clock.waitUntil(clock.getTime() + leaseTime * 2); // this write will conflict because ds2 did not run // background ops after setting p=v2 b1 = ds1.getRoot().builder(); b1.child("x").child("y").setProperty("p", "v11"); try { merge(ds1, b1); fail("CommitFailedException expected"); } catch (CommitFailedException e) { // expected } ds1.getLastRevRecoveryAgent().recover(2); ds1.runBackgroundOperations(); // now the write must succeed b1 = ds1.getRoot().builder(); b1.child("x").child("y").setProperty("p", "v11"); merge(ds1, b1); } private static NodeDocument getDocument(DocumentNodeStore nodeStore, String path) { return nodeStore.getDocumentStore().find(NODES, getIdFromPath(path)); } private static void merge(DocumentNodeStore store, NodeBuilder builder) throws CommitFailedException { store.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); } }
/* Copyright 2013 Adam Lara Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ import java.io.BufferedReader; import java.io.InputStreamReader; import java.util.Date; import java.util.Properties; import javax.mail.*; import javax.mail.internet.*; public class jPingpingThread extends Thread { jPingping gui; public jPingpingThread(jPingping in) { gui = in; } public void run() { runPing(); } public void runPing() { try { String cmd = ""; String host = "98.130.148.213"; //hostname or IP to ping String[] results = new String[60]; String[] secondaryResults = new String[120]; //***NEW String fullResults = ""; String eol = System.getProperty("line.separator"); fullResults = fullResults + "Name: " + gui.nameField.getText() + eol; fullResults = fullResults + "Email: " + gui.emailField.getText() + eol; fullResults = fullResults + "IP Address: " + gui.ip + eol; int loopCount = 0; int timedoutCount = 0; //***NEW if(System.getProperty("os.name").startsWith("Windows")) { // For Windows cmd = "ping -n 10 " + host; gui.out_console.append("Windows OS detected, beginning ping test to remote host." + eol); gui.out_console.append("''" + cmd + "''" + eol); } else { // For Linux and OSX cmd = "ping -c 10 " + host; gui.out_console.append("Mac OSX or Linux OS detected, beginning ping test to remote host." + eol); gui.out_console.append(cmd + eol); } Process runPing = Runtime.getRuntime().exec(cmd); BufferedReader reader = new BufferedReader(new InputStreamReader(runPing.getInputStream())); String lineRead = null; while((lineRead = reader.readLine()) != null) { results[loopCount]=lineRead; if (lineRead.contains("timed out")) { timedoutCount++; } loopCount++; gui.out_console.append(lineRead + "\n"); } //***NEW if (timedoutCount >= 3) { loopCount = 0; gui.out_console.append("Abnormal time out errors detected, running secondary test suite...\n"); if(System.getProperty("os.name").startsWith("Windows")) { // For Windows cmd = "ipconfig /all "; gui.out_console.append(eol + "Checking network interfaces." + eol); gui.out_console.append("''" + cmd + "''" + eol); } else { // For Linux and OSX cmd = "ifconfig -a "; gui.out_console.append(eol + "Checking network interfaces." + eol); gui.out_console.append(cmd + eol); } runPing = Runtime.getRuntime().exec(cmd); BufferedReader reader2 = new BufferedReader(new InputStreamReader(runPing.getInputStream())); String lineRead2 = null; while((lineRead2 = reader2.readLine()) != null) { secondaryResults[loopCount]=lineRead2; loopCount++; gui.out_console.append(lineRead2 + "\n"); } if(System.getProperty("os.name").startsWith("Windows")) { // For Windows cmd = "tracert " + host; gui.out_console.append(eol + "Running trace route to remote host." + eol); gui.out_console.append("''" + cmd + "''" + eol); } else { // For Linux and OSX cmd = "traceroute " + host; gui.out_console.append(eol + "Running trace route to remote host." + eol); gui.out_console.append(cmd + eol); } runPing = Runtime.getRuntime().exec(cmd); BufferedReader reader3 = new BufferedReader(new InputStreamReader(runPing.getInputStream())); String lineRead3 = null; while((lineRead3 = reader3.readLine()) != null) { secondaryResults[loopCount]=lineRead3; loopCount++; gui.out_console.append(lineRead3 + "\n"); } }//****NEW end. runPing.waitFor(); if(runPing.exitValue() == 0) { gui.out_console.append("Test successful!" + "\n"); } else { gui.out_console.append("Test was not successful, problems encountered." + "\n"); } Properties props = System.getProperties(); props.put("mail.smtp.host","SMTP-MAIL-HOST"); props.put("mail.smtp.port","SMTP-PORT"); props.put("mail.smtp.auth","true-false"); //smtp authentication Authenticator auth = new Authenticator() { public PasswordAuthentication getPasswordAuthentication() { return new PasswordAuthentication("email@address.com","password"); //smtp login & password } }; int size = results.length; int size2 = secondaryResults.length; for (int i=0; i<size; i++) { if(results[i]!=null){ fullResults = fullResults + eol + results[i]; } } for (int u=0; u<size2; u++) { if(secondaryResults[u]!=null){ fullResults = fullResults + eol + secondaryResults[u]; } } sendMail(props,auth,new InternetAddress("test@nnovationlabs.com"),"recipient@email.com","Ping Results",fullResults,"text/plain"); }catch(Exception e) { e.printStackTrace(); gui.out_console.append(e + "\n"); } } public void sendMail(Properties props, Authenticator authenticator, InternetAddress fromAddress, String recipients, String subject, String content, String contentType) { gui.out_console.append("\nTest completed, results have been emailed. \n"); try { Session session = Session.getDefaultInstance(props, authenticator); Message message = new MimeMessage(session); message.setFrom(fromAddress); message.setRecipients(Message.RecipientType.TO, InternetAddress.parse(recipients, false)); message.setSubject(subject); message.setContent(content,contentType); message.setSentDate(new Date()); Transport.send(message); } catch (AddressException e) { e.printStackTrace(); } catch (MessagingException e) { e.printStackTrace(); } }//sendmail }
// // ======================================================================== // Copyright (c) 1995-2014 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // All rights reserved. This program and the accompanying materials // are made available under the terms of the Eclipse Public License v1.0 // and Apache License v2.0 which accompanies this distribution. // // The Eclipse Public License is available at // http://www.eclipse.org/legal/epl-v10.html // // The Apache License v2.0 is available at // http://www.opensource.org/licenses/apache2.0.php // // You may elect to redistribute this code under either of these licenses. // ======================================================================== // package org.eclipse.jetty.util.ajax; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.StringReader; import java.lang.reflect.Array; import java.math.BigDecimal; import java.util.Date; import java.util.HashMap; import java.util.Locale; import java.util.Map; import java.util.TimeZone; import org.eclipse.jetty.util.DateCache; import org.eclipse.jetty.util.ajax.JSON.Output; import org.junit.BeforeClass; import org.junit.Test; public class JSONTest { String test="\n\n\n\t\t "+ "// ignore this ,a [ \" \n"+ "/* and this \n" + "/* and * // this \n" + "*/" + "{ "+ "\"onehundred\" : 100 ,"+ "\"small\":-0.2,"+ "\"name\" : \"fred\" ," + "\"empty\" : {} ," + "\"map\" : {\"a\":-1.0e2} ," + "\"array\" : [\"a\",-1.0e2,[],null,true,false] ," + "\"w0\":{\"class\":\"org.eclipse.jetty.util.ajax.JSONTest$Woggle\",\"name\":\"woggle0\",\"nested\":{\"class\":\"org.eclipse.jetty.util.ajax.JSONTest$Woggle\",\"name\":\"woggle1\",\"nested\":null,\"number\":-101},\"number\":100}," + "\"NaN\": NaN," + "\"undefined\": undefined," + "}"; @BeforeClass public static void setUp() throws Exception { JSON.registerConvertor(Gadget.class,new JSONObjectConvertor(false)); } @Test public void testToString() { HashMap map = new HashMap(); HashMap obj6 = new HashMap(); HashMap obj7 = new HashMap(); Woggle w0 = new Woggle(); Woggle w1 = new Woggle(); w0.name="woggle0"; w0.nested=w1; w0.number=100; w1.name="woggle1"; w1.nested=null; w1.number=-101; map.put("n1",null); map.put("n2",new Integer(2)); map.put("n3",new Double(-0.00000000003)); map.put("n4","4\n\r\t\"4"); map.put("n5",new Object[]{"a",new Character('b'),new Integer(3),new String[]{},null,Boolean.TRUE,Boolean.FALSE}); map.put("n6",obj6); map.put("n7",obj7); map.put("n8",new int[]{1,2,3,4}); map.put("n9",new JSON.Literal("[{}, [], {}]")); map.put("w0",w0); obj7.put("x","value"); String s = JSON.toString(map); assertTrue(s.indexOf("\"n1\":null")>=0); assertTrue(s.indexOf("\"n2\":2")>=0); assertTrue(s.indexOf("\"n3\":-3.0E-11")>=0); assertTrue(s.indexOf("\"n4\":\"4\\n")>=0); assertTrue(s.indexOf("\"n5\":[\"a\",\"b\",")>=0); assertTrue(s.indexOf("\"n6\":{}")>=0); assertTrue(s.indexOf("\"n7\":{\"x\":\"value\"}")>=0); assertTrue(s.indexOf("\"n8\":[1,2,3,4]")>=0); assertTrue(s.indexOf("\"n9\":[{}, [], {}]")>=0); assertTrue(s.indexOf("\"w0\":{\"class\":\"org.eclipse.jetty.util.ajax.JSONTest$Woggle\",\"name\":\"woggle0\",\"nested\":{\"class\":\"org.eclipse.jetty.util.ajax.JSONTest$Woggle\",\"name\":\"woggle1\",\"nested\":null,\"number\":-101},\"number\":100}")>=0); Gadget gadget = new Gadget(); gadget.setShields(42); gadget.setWoggles(new Woggle[]{w0,w1}); s = JSON.toString(new Gadget[]{gadget}); assertTrue(s.startsWith("[")); assertTrue(s.indexOf("\"modulated\":false")>=0); assertTrue(s.indexOf("\"shields\":42")>=0); assertTrue(s.indexOf("\"name\":\"woggle0\"")>=0); assertTrue(s.indexOf("\"name\":\"woggle1\"")>=0); } /* ------------------------------------------------------------ */ @Test public void testParse() { Map map = (Map)JSON.parse(test); assertEquals(new Long(100),map.get("onehundred")); assertEquals("fred",map.get("name")); assertEquals(-0.2,map.get("small")); assertTrue(map.get("array").getClass().isArray()); assertTrue(map.get("w0") instanceof Woggle); assertTrue(((Woggle)map.get("w0")).nested instanceof Woggle); assertEquals(-101,((Woggle)((Woggle)map.get("w0")).nested).number); assertTrue(map.containsKey("NaN")); assertEquals(null,map.get("NaN")); assertTrue(map.containsKey("undefined")); assertEquals(null,map.get("undefined")); test="{\"data\":{\"source\":\"15831407eqdaawf7\",\"widgetId\":\"Magnet_8\"},\"channel\":\"/magnets/moveStart\",\"connectionId\":null,\"clientId\":\"15831407eqdaawf7\"}"; map = (Map)JSON.parse(test); } /* ------------------------------------------------------------ */ @Test public void testParseReader() throws Exception { Map map = (Map)JSON.parse(new StringReader(test)); assertEquals(new Long(100),map.get("onehundred")); assertEquals("fred",map.get("name")); assertTrue(map.get("array").getClass().isArray()); assertTrue(map.get("w0") instanceof Woggle); assertTrue(((Woggle)map.get("w0")).nested instanceof Woggle); test="{\"data\":{\"source\":\"15831407eqdaawf7\",\"widgetId\":\"Magnet_8\"},\"channel\":\"/magnets/moveStart\",\"connectionId\":null,\"clientId\":\"15831407eqdaawf7\"}"; map = (Map)JSON.parse(test); } /* ------------------------------------------------------------ */ @Test public void testStripComment() { String test="\n\n\n\t\t "+ "// ignore this ,a [ \" \n"+ "/* "+ "{ "+ "\"onehundred\" : 100 ,"+ "\"name\" : \"fred\" ," + "\"empty\" : {} ," + "\"map\" : {\"a\":-1.0e2} ," + "\"array\" : [\"a\",-1.0e2,[],null,true,false] ," + "} */"; Object o = JSON.parse(test,false); assertTrue(o==null); o = JSON.parse(test,true); assertTrue(o instanceof Map); assertEquals("fred",((Map)o).get("name")); } /* ------------------------------------------------------------ */ @Test public void testQuote() { String test="\"abc123|\\\"|\\\\|\\/|\\b|\\f|\\n|\\r|\\t|\\uaaaa|\""; String result = (String)JSON.parse(test,false); assertEquals("abc123|\"|\\|/|\b|\f|\n|\r|\t|\uaaaa|",result); } /* ------------------------------------------------------------ */ @Test public void testBigDecimal() { Object obj = JSON.parse("1.0E7"); assertTrue(obj instanceof Double); BigDecimal bd = BigDecimal.valueOf(10000000d); String string = JSON.toString(new Object[]{bd}); obj = Array.get(JSON.parse(string),0); assertTrue(obj instanceof Double); } /* ------------------------------------------------------------ */ @Test public void testZeroByte() { String withzero="\u0000"; JSON.toString(withzero); } /* ------------------------------------------------------------ */ public static class Gadget { private boolean modulated; private long shields; private Woggle[] woggles; /* ------------------------------------------------------------ */ /** * @return the modulated */ public boolean isModulated() { return modulated; } /* ------------------------------------------------------------ */ /** * @param modulated the modulated to set */ public void setModulated(boolean modulated) { this.modulated=modulated; } /* ------------------------------------------------------------ */ /** * @return the shields */ public long getShields() { return shields; } /* ------------------------------------------------------------ */ /** * @param shields the shields to set */ public void setShields(long shields) { this.shields=shields; } /* ------------------------------------------------------------ */ /** * @return the woggles */ public Woggle[] getWoggles() { return woggles; } /* ------------------------------------------------------------ */ /** * @param woggles the woggles to set */ public void setWoggles(Woggle[] woggles) { this.woggles=woggles; } } /* ------------------------------------------------------------ */ @Test public void testConvertor() { // test case#1 - force timezone to GMT JSON json = new JSON(); json.addConvertor(Date.class, new JSONDateConvertor("MM/dd/yyyy HH:mm:ss zzz", TimeZone.getTimeZone("GMT"),false)); json.addConvertor(Object.class,new JSONObjectConvertor()); Woggle w0 = new Woggle(); Gizmo g0 = new Gizmo(); w0.name="woggle0"; w0.nested=g0; w0.number=100; g0.name="woggle1"; g0.nested=null; g0.number=-101; g0.tested=true; HashMap map = new HashMap(); Date dummyDate = new Date(1); map.put("date", dummyDate); map.put("w0",w0); StringBuffer buf = new StringBuffer(); json.append(buf,map); String js=buf.toString(); assertTrue(js.indexOf("\"date\":\"01/01/1970 00:00:00 GMT\"")>=0); assertTrue(js.indexOf("org.eclipse.jetty.util.ajax.JSONTest$Woggle")>=0); assertTrue(js.indexOf("org.eclipse.jetty.util.ajax.JSONTest$Gizmo")<0); assertTrue(js.indexOf("\"tested\":true")>=0); // test case#3 TimeZone tzone = TimeZone.getTimeZone("JST"); String tzone3Letter = tzone.getDisplayName(false, TimeZone.SHORT); String format = "EEE MMMMM dd HH:mm:ss zzz yyyy"; Locale l = new Locale("ja", "JP"); if (l!=null) { json.addConvertor(Date.class, new JSONDateConvertor(format, tzone, false, l)); buf = new StringBuffer(); json.append(buf,map); js=buf.toString(); //assertTrue(js.indexOf("\"date\":\"\u6728 1\u6708 01 09:00:00 JST 1970\"")>=0); assertTrue(js.indexOf(" 01 09:00:00 JST 1970\"")>=0); assertTrue(js.indexOf("org.eclipse.jetty.util.ajax.JSONTest$Woggle")>=0); assertTrue(js.indexOf("org.eclipse.jetty.util.ajax.JSONTest$Gizmo")<0); assertTrue(js.indexOf("\"tested\":true")>=0); } // test case#4 json.addConvertor(Date.class,new JSONDateConvertor(true)); w0.nested=null; buf = new StringBuffer(); json.append(buf,map); js=buf.toString(); assertTrue(js.indexOf("\"date\":\"Thu Jan 01 00:00:00 GMT 1970\"")<0); assertTrue(js.indexOf("org.eclipse.jetty.util.ajax.JSONTest$Woggle")>=0); assertTrue(js.indexOf("org.eclipse.jetty.util.ajax.JSONTest$Gizmo")<0); map=(HashMap)json.parse(new JSON.StringSource(js)); assertTrue(map.get("date") instanceof Date); assertTrue(map.get("w0") instanceof Woggle); } enum Color { Red, Green, Blue }; @Test public void testEnumConvertor() { JSON json = new JSON(); Locale l = new Locale("en", "US"); json.addConvertor(Date.class,new JSONDateConvertor(DateCache.DEFAULT_FORMAT,TimeZone.getTimeZone("GMT"),false,l)); json.addConvertor(Enum.class,new JSONEnumConvertor(false)); json.addConvertor(Object.class,new JSONObjectConvertor()); Woggle w0 = new Woggle(); Gizmo g0 = new Gizmo(); w0.name="woggle0"; w0.nested=g0; w0.number=100; w0.other=Color.Blue; g0.name="woggle1"; g0.nested=null; g0.number=-101; g0.tested=true; g0.other=Color.Green; HashMap map = new HashMap(); map.put("date",new Date(1)); map.put("w0",w0); map.put("g0",g0); StringBuffer buf = new StringBuffer(); json.append((Appendable)buf,map); String js=buf.toString(); assertTrue(js.indexOf("\"date\":\"Thu Jan 01 00:00:00 GMT 1970\"")>=0); assertTrue(js.indexOf("org.eclipse.jetty.util.ajax.JSONTest$Woggle")>=0); assertTrue(js.indexOf("org.eclipse.jetty.util.ajax.JSONTest$Gizmo")<0); assertTrue(js.indexOf("\"tested\":true")>=0); assertTrue(js.indexOf("\"Green\"")>=0); assertTrue(js.indexOf("\"Blue\"")<0); json.addConvertor(Date.class,new JSONDateConvertor(DateCache.DEFAULT_FORMAT,TimeZone.getTimeZone("GMT"),true,l)); json.addConvertor(Enum.class,new JSONEnumConvertor(false)); w0.nested=null; buf = new StringBuffer(); json.append((Appendable)buf,map); js=buf.toString(); assertTrue(js.indexOf("\"date\":\"Thu Jan 01 00:00:00 GMT 1970\"")<0); assertTrue(js.indexOf("org.eclipse.jetty.util.ajax.JSONTest$Woggle")>=0); assertTrue(js.indexOf("org.eclipse.jetty.util.ajax.JSONTest$Gizmo")<0); Map map2=(HashMap)json.parse(new JSON.StringSource(js)); assertTrue(map2.get("date") instanceof Date); assertTrue(map2.get("w0") instanceof Woggle); assertEquals(null, ((Woggle)map2.get("w0")).getOther() ); assertEquals(Color.Green.toString(), ((Map)map2.get("g0")).get("other")); json.addConvertor(Date.class,new JSONDateConvertor(DateCache.DEFAULT_FORMAT,TimeZone.getTimeZone("GMT"),true,l)); json.addConvertor(Enum.class,new JSONEnumConvertor(true)); buf = new StringBuffer(); json.append((Appendable)buf,map); js=buf.toString(); map2=(HashMap)json.parse(new JSON.StringSource(js)); assertTrue(map2.get("date") instanceof Date); assertTrue(map2.get("w0") instanceof Woggle); assertEquals(null, ((Woggle)map2.get("w0")).getOther() ); Object o=((Map)map2.get("g0")).get("other"); assertEquals(Color.Green, o); } /* ------------------------------------------------------------ */ public static class Gizmo { String name; Gizmo nested; long number; boolean tested; Object other; public String getName() { return name; } public Gizmo getNested() { return nested; } public long getNumber() { return number; } public boolean isTested() { return tested; } public Object getOther() { return other; } } /* ------------------------------------------------------------ */ public static class Woggle extends Gizmo implements JSON.Convertible { public Woggle() { } public void fromJSON(Map object) { name=(String)object.get("name"); nested=(Gizmo)object.get("nested"); number=((Number)object.get("number")).intValue(); } public void toJSON(Output out) { out.addClass(Woggle.class); out.add("name",name); out.add("nested",nested); out.add("number",number); } public String toString() { return name+"<<"+nested+">>"+number; } } }
package org.apache.lucene.index; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.Comparator; import java.util.Map; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.PayloadAttribute; import org.apache.lucene.codecs.FieldsConsumer; import org.apache.lucene.codecs.PostingsConsumer; import org.apache.lucene.codecs.TermStats; import org.apache.lucene.codecs.TermsConsumer; import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.RamUsageEstimator; // TODO: break into separate freq and prox writers as // codecs; make separate container (tii/tis/skip/*) that can // be configured as any number of files 1..N final class FreqProxTermsWriterPerField extends TermsHashPerField { private FreqProxPostingsArray freqProxPostingsArray; final boolean hasFreq; final boolean hasProx; final boolean hasOffsets; PayloadAttribute payloadAttribute; OffsetAttribute offsetAttribute; /** Set to true if any token had a payload in the current * segment. */ boolean sawPayloads; public FreqProxTermsWriterPerField(FieldInvertState invertState, TermsHash termsHash, FieldInfo fieldInfo, TermsHashPerField nextPerField) { super(fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 ? 2 : 1, invertState, termsHash, nextPerField, fieldInfo); IndexOptions indexOptions = fieldInfo.getIndexOptions(); assert indexOptions != null; hasFreq = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS) >= 0; hasProx = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; hasOffsets = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; } @Override void finish() throws IOException { super.finish(); if (sawPayloads) { fieldInfo.setStorePayloads(); } } @Override boolean start(IndexableField f, boolean first) { super.start(f, first); payloadAttribute = fieldState.payloadAttribute; offsetAttribute = fieldState.offsetAttribute; return true; } void writeProx(int termID, int proxCode) { if (payloadAttribute == null) { writeVInt(1, proxCode<<1); } else { BytesRef payload = payloadAttribute.getPayload(); if (payload != null && payload.length > 0) { writeVInt(1, (proxCode<<1)|1); writeVInt(1, payload.length); writeBytes(1, payload.bytes, payload.offset, payload.length); sawPayloads = true; } else { writeVInt(1, proxCode<<1); } } assert postingsArray == freqProxPostingsArray; freqProxPostingsArray.lastPositions[termID] = fieldState.position; } void writeOffsets(int termID, int offsetAccum) { final int startOffset = offsetAccum + offsetAttribute.startOffset(); final int endOffset = offsetAccum + offsetAttribute.endOffset(); assert startOffset - freqProxPostingsArray.lastOffsets[termID] >= 0; writeVInt(1, startOffset - freqProxPostingsArray.lastOffsets[termID]); writeVInt(1, endOffset - startOffset); freqProxPostingsArray.lastOffsets[termID] = startOffset; } @Override void newTerm(final int termID) { // First time we're seeing this term since the last // flush assert docState.testPoint("FreqProxTermsWriterPerField.newTerm start"); final FreqProxPostingsArray postings = freqProxPostingsArray; postings.lastDocIDs[termID] = docState.docID; if (!hasFreq) { assert postings.termFreqs == null; postings.lastDocCodes[termID] = docState.docID; } else { postings.lastDocCodes[termID] = docState.docID << 1; postings.termFreqs[termID] = 1; if (hasProx) { writeProx(termID, fieldState.position); if (hasOffsets) { writeOffsets(termID, fieldState.offset); } } else { assert !hasOffsets; } } fieldState.maxTermFrequency = Math.max(1, fieldState.maxTermFrequency); fieldState.uniqueTermCount++; } @Override void addTerm(final int termID) { assert docState.testPoint("FreqProxTermsWriterPerField.addTerm start"); final FreqProxPostingsArray postings = freqProxPostingsArray; assert !hasFreq || postings.termFreqs[termID] > 0; if (!hasFreq) { assert postings.termFreqs == null; if (docState.docID != postings.lastDocIDs[termID]) { // New document; now encode docCode for previous doc: assert docState.docID > postings.lastDocIDs[termID]; writeVInt(0, postings.lastDocCodes[termID]); postings.lastDocCodes[termID] = docState.docID - postings.lastDocIDs[termID]; postings.lastDocIDs[termID] = docState.docID; fieldState.uniqueTermCount++; } } else if (docState.docID != postings.lastDocIDs[termID]) { assert docState.docID > postings.lastDocIDs[termID]:"id: "+docState.docID + " postings ID: "+ postings.lastDocIDs[termID] + " termID: "+termID; // Term not yet seen in the current doc but previously // seen in other doc(s) since the last flush // Now that we know doc freq for previous doc, // write it & lastDocCode if (1 == postings.termFreqs[termID]) { writeVInt(0, postings.lastDocCodes[termID]|1); } else { writeVInt(0, postings.lastDocCodes[termID]); writeVInt(0, postings.termFreqs[termID]); } // Init freq for the current document postings.termFreqs[termID] = 1; fieldState.maxTermFrequency = Math.max(1, fieldState.maxTermFrequency); postings.lastDocCodes[termID] = (docState.docID - postings.lastDocIDs[termID]) << 1; postings.lastDocIDs[termID] = docState.docID; if (hasProx) { writeProx(termID, fieldState.position); if (hasOffsets) { postings.lastOffsets[termID] = 0; writeOffsets(termID, fieldState.offset); } } else { assert !hasOffsets; } fieldState.uniqueTermCount++; } else { fieldState.maxTermFrequency = Math.max(fieldState.maxTermFrequency, ++postings.termFreqs[termID]); if (hasProx) { writeProx(termID, fieldState.position-postings.lastPositions[termID]); if (hasOffsets) { writeOffsets(termID, fieldState.offset); } } } } @Override public void newPostingsArray() { freqProxPostingsArray = (FreqProxPostingsArray) postingsArray; } @Override ParallelPostingsArray createPostingsArray(int size) { IndexOptions indexOptions = fieldInfo.getIndexOptions(); assert indexOptions != null; boolean hasFreq = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS) >= 0; boolean hasProx = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; boolean hasOffsets = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; return new FreqProxPostingsArray(size, hasFreq, hasProx, hasOffsets); } static final class FreqProxPostingsArray extends ParallelPostingsArray { public FreqProxPostingsArray(int size, boolean writeFreqs, boolean writeProx, boolean writeOffsets) { super(size); if (writeFreqs) { termFreqs = new int[size]; } lastDocIDs = new int[size]; lastDocCodes = new int[size]; if (writeProx) { lastPositions = new int[size]; if (writeOffsets) { lastOffsets = new int[size]; } } else { assert !writeOffsets; } //System.out.println("PA init freqs=" + writeFreqs + " pos=" + writeProx + " offs=" + writeOffsets); } int termFreqs[]; // # times this term occurs in the current doc int lastDocIDs[]; // Last docID where this term occurred int lastDocCodes[]; // Code for prior doc int lastPositions[]; // Last position where this term occurred int lastOffsets[]; // Last endOffset where this term occurred @Override ParallelPostingsArray newInstance(int size) { return new FreqProxPostingsArray(size, termFreqs != null, lastPositions != null, lastOffsets != null); } @Override void copyTo(ParallelPostingsArray toArray, int numToCopy) { assert toArray instanceof FreqProxPostingsArray; FreqProxPostingsArray to = (FreqProxPostingsArray) toArray; super.copyTo(toArray, numToCopy); System.arraycopy(lastDocIDs, 0, to.lastDocIDs, 0, numToCopy); System.arraycopy(lastDocCodes, 0, to.lastDocCodes, 0, numToCopy); if (lastPositions != null) { assert to.lastPositions != null; System.arraycopy(lastPositions, 0, to.lastPositions, 0, numToCopy); } if (lastOffsets != null) { assert to.lastOffsets != null; System.arraycopy(lastOffsets, 0, to.lastOffsets, 0, numToCopy); } if (termFreqs != null) { assert to.termFreqs != null; System.arraycopy(termFreqs, 0, to.termFreqs, 0, numToCopy); } } @Override int bytesPerPosting() { int bytes = ParallelPostingsArray.BYTES_PER_POSTING + 2 * RamUsageEstimator.NUM_BYTES_INT; if (lastPositions != null) { bytes += RamUsageEstimator.NUM_BYTES_INT; } if (lastOffsets != null) { bytes += RamUsageEstimator.NUM_BYTES_INT; } if (termFreqs != null) { bytes += RamUsageEstimator.NUM_BYTES_INT; } return bytes; } } /* Walk through all unique text tokens (Posting * instances) found in this field and serialize them * into a single RAM segment. */ void flush(String fieldName, FieldsConsumer consumer, final SegmentWriteState state) throws IOException { BytesRefBuilder payload = null; if (!fieldInfo.isIndexed()) { return; // nothing to flush, don't bother the codec with the unindexed field } final TermsConsumer termsConsumer = consumer.addField(fieldInfo); final Comparator<BytesRef> termComp = termsConsumer.getComparator(); // CONFUSING: this.indexOptions holds the index options // that were current when we first saw this field. But // it's possible this has changed, eg when other // documents are indexed that cause a "downgrade" of the // IndexOptions. So we must decode the in-RAM buffer // according to this.indexOptions, but then write the // new segment to the directory according to // currentFieldIndexOptions: final IndexOptions currentFieldIndexOptions = fieldInfo.getIndexOptions(); assert currentFieldIndexOptions != null; final boolean writeTermFreq = currentFieldIndexOptions.compareTo(IndexOptions.DOCS_AND_FREQS) >= 0; final boolean writePositions = currentFieldIndexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; final boolean writeOffsets = currentFieldIndexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; final boolean readTermFreq = this.hasFreq; final boolean readPositions = this.hasProx; final boolean readOffsets = this.hasOffsets; //System.out.println("flush readTF=" + readTermFreq + " readPos=" + readPositions + " readOffs=" + readOffsets); // Make sure FieldInfo.update is working correctly!: assert !writeTermFreq || readTermFreq; assert !writePositions || readPositions; assert !writeOffsets || readOffsets; assert !writeOffsets || writePositions; final Map<Term,Integer> segDeletes; if (state.segUpdates != null && state.segUpdates.terms.size() > 0) { segDeletes = state.segUpdates.terms; } else { segDeletes = null; } final int[] termIDs = sortPostings(termComp); final int numTerms = bytesHash.size(); final BytesRef text = new BytesRef(); final FreqProxPostingsArray postings = freqProxPostingsArray; final ByteSliceReader freq = new ByteSliceReader(); final ByteSliceReader prox = new ByteSliceReader(); FixedBitSet visitedDocs = new FixedBitSet(state.segmentInfo.getDocCount()); long sumTotalTermFreq = 0; long sumDocFreq = 0; Term protoTerm = new Term(fieldName); for (int i = 0; i < numTerms; i++) { final int termID = termIDs[i]; //System.out.println("term=" + termID); // Get BytesRef final int textStart = postings.textStarts[termID]; bytePool.setBytesRef(text, textStart); initReader(freq, termID, 0); if (readPositions || readOffsets) { initReader(prox, termID, 1); } // TODO: really TermsHashPerField should take over most // of this loop, including merge sort of terms from // multiple threads and interacting with the // TermsConsumer, only calling out to us (passing us the // DocsConsumer) to handle delivery of docs/positions final PostingsConsumer postingsConsumer = termsConsumer.startTerm(text); final int delDocLimit; if (segDeletes != null) { protoTerm.bytes = text; final Integer docIDUpto = segDeletes.get(protoTerm); if (docIDUpto != null) { delDocLimit = docIDUpto; } else { delDocLimit = 0; } } else { delDocLimit = 0; } // Now termStates has numToMerge FieldMergeStates // which all share the same term. Now we must // interleave the docID streams. int docFreq = 0; long totalTermFreq = 0; int docID = 0; while(true) { //System.out.println(" cycle"); final int termFreq; if (freq.eof()) { if (postings.lastDocCodes[termID] != -1) { // Return last doc docID = postings.lastDocIDs[termID]; if (readTermFreq) { termFreq = postings.termFreqs[termID]; } else { termFreq = -1; } postings.lastDocCodes[termID] = -1; } else { // EOF break; } } else { final int code = freq.readVInt(); if (!readTermFreq) { docID += code; termFreq = -1; } else { docID += code >>> 1; if ((code & 1) != 0) { termFreq = 1; } else { termFreq = freq.readVInt(); } } assert docID != postings.lastDocIDs[termID]; } docFreq++; assert docID < state.segmentInfo.getDocCount(): "doc=" + docID + " maxDoc=" + state.segmentInfo.getDocCount(); // NOTE: we could check here if the docID was // deleted, and skip it. However, this is somewhat // dangerous because it can yield non-deterministic // behavior since we may see the docID before we see // the term that caused it to be deleted. This // would mean some (but not all) of its postings may // make it into the index, which'd alter the docFreq // for those terms. We could fix this by doing two // passes, ie first sweep marks all del docs, and // 2nd sweep does the real flush, but I suspect // that'd add too much time to flush. visitedDocs.set(docID); postingsConsumer.startDoc(docID, writeTermFreq ? termFreq : -1); if (docID < delDocLimit) { // Mark it deleted. TODO: we could also skip // writing its postings; this would be // deterministic (just for this Term's docs). // TODO: can we do this reach-around in a cleaner way???? if (state.liveDocs == null) { state.liveDocs = docState.docWriter.codec.liveDocsFormat().newLiveDocs(state.segmentInfo.getDocCount()); } if (state.liveDocs.get(docID)) { state.delCountOnFlush++; state.liveDocs.clear(docID); } } totalTermFreq += termFreq; // Carefully copy over the prox + payload info, // changing the format to match Lucene's segment // format. if (readPositions || readOffsets) { // we did record positions (& maybe payload) and/or offsets int position = 0; int offset = 0; for(int j=0;j<termFreq;j++) { final BytesRef thisPayload; if (readPositions) { final int code = prox.readVInt(); position += code >>> 1; if ((code & 1) != 0) { // This position has a payload final int payloadLength = prox.readVInt(); if (payload == null) { payload = new BytesRefBuilder(); } payload.grow(payloadLength); prox.readBytes(payload.bytes(), 0, payloadLength); payload.setLength(payloadLength); thisPayload = payload.get(); } else { thisPayload = null; } if (readOffsets) { final int startOffset = offset + prox.readVInt(); final int endOffset = startOffset + prox.readVInt(); if (writePositions) { if (writeOffsets) { assert startOffset >=0 && endOffset >= startOffset : "startOffset=" + startOffset + ",endOffset=" + endOffset + ",offset=" + offset; postingsConsumer.addPosition(position, thisPayload, startOffset, endOffset); } else { postingsConsumer.addPosition(position, thisPayload, -1, -1); } } offset = startOffset; } else if (writePositions) { postingsConsumer.addPosition(position, thisPayload, -1, -1); } } } } postingsConsumer.finishDoc(); } termsConsumer.finishTerm(text, new TermStats(docFreq, writeTermFreq ? totalTermFreq : -1)); sumTotalTermFreq += totalTermFreq; sumDocFreq += docFreq; } termsConsumer.finish(writeTermFreq ? sumTotalTermFreq : -1, sumDocFreq, visitedDocs.cardinality()); } }
/* * Copyright (c) 2017 Strapdata (http://www.strapdata.com) * Contains some code from Elasticsearch (http://www.elastic.co) * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.elassandra.discovery; import com.fasterxml.jackson.core.JsonGenerationException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonMappingException; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.MapDifference; import com.google.common.collect.Maps; import com.google.common.net.InetAddresses; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.cql3.UntypedResultSet; import org.apache.cassandra.db.Mutation; import org.apache.cassandra.db.SystemKeyspace; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.exceptions.UnavailableException; import org.apache.cassandra.exceptions.WriteTimeoutException; import org.apache.cassandra.gms.*; import org.apache.cassandra.service.MigrationManager; import org.apache.cassandra.service.StorageService; import org.apache.cassandra.transport.Event; import org.apache.cassandra.utils.FBUtilities; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elassandra.ConcurrentMetaDataUpdateException; import org.elassandra.PaxosMetaDataUpdateException; import org.elassandra.gateway.CassandraGatewayService; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.transport.NoNodeAvailableException; import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.ClusterStateTaskConfig.SchemaUpdate; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode.DiscoveryNodeStatus; import org.elasticsearch.cluster.node.DiscoveryNode.Role; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.service.ClusterApplier; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.MasterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.AckClusterStatePublishResponseHandler; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.DiscoveryStats; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.net.InetAddress; import java.util.*; import java.util.Map.Entry; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import static org.apache.cassandra.cql3.QueryProcessor.executeInternal; import static org.apache.cassandra.service.GZipStringCompressor.compress; import static org.apache.cassandra.service.GZipStringCompressor.uncompressIfGZipped; import static org.elasticsearch.gateway.GatewayService.STATE_NOT_RECOVERED_BLOCK; /** * https://www.elastic.co/guide/en/elasticsearch/reference/6.3/modules-discovery-zen.html * * Discover the cluster topology from cassandra snitch and settings, mappings, blocks from the elastic_admin keyspace. * Publishing is just a notification to refresh in memory configuration from the cassandra table. * @author vroyer * */ public class CassandraDiscovery extends AbstractLifecycleComponent implements Discovery, IEndpointStateChangeSubscriber, AppliedClusterStateAction.AppliedClusterStateListener { final Logger logger = LogManager.getLogger(CassandraDiscovery.class); private static final ImmutableSet CASSANDRA_ROLES = ImmutableSet.of(Role.MASTER,Role.DATA); private final TransportService transportService; private final Settings settings; private final MasterService masterService; private final ClusterService clusterService; private final ClusterSettings clusterSettings; private final ClusterApplier clusterApplier; private final AtomicReference<ClusterState> committedState; // last committed cluster state private final ClusterName clusterName; private final DiscoverySettings discoverySettings; private final NamedWriteableRegistry namedWriteableRegistry; private final PendingClusterStatesQueue pendingStatesQueue; private final AppliedClusterStateAction appliedClusterStateAction; private final AtomicReference<AckClusterStatePublishResponseHandler> handlerRef = new AtomicReference<>(); private final Object stateMutex = new Object(); private final GossipCluster gossipCluster; private final InetAddress localAddress; private final String localDc; private final RoutingTableUpdateTaskExecutor routingTableUpdateTaskExecutor; /** * When searchEnabled=true, local shards are visible for routing, otherwise, local shards are seen as UNASSIGNED. * This allows to gracefully shutdown or start the node for maintenance like an offline repair or rebuild_index. */ private final AtomicBoolean searchEnabled = new AtomicBoolean(false); /** * Compress the gossip application state X1 */ private final boolean gzip = Boolean.parseBoolean(System.getProperty(ClusterService.SETTING_SYSTEM_COMPRESS_INDEXES_IN_GOSSIP, "false")); /** * If autoEnableSearch=true, search is automatically enabled when the node becomes ready to operate, otherwise, searchEnabled should be manually set to true. */ private final AtomicBoolean autoEnableSearch = new AtomicBoolean(System.getProperty("es.auto_enable_search") == null || Boolean.getBoolean("es.auto_enable_search")); public static final Setting<Integer> MAX_PENDING_CLUSTER_STATES_SETTING = Setting.intSetting("discovery.cassandra.publish.max_pending_cluster_states", 1024, 1, Property.NodeScope); public CassandraDiscovery(final Settings settings, final TransportService transportService, final MasterService masterService, final ClusterService clusterService, final ClusterApplier clusterApplier, final ClusterSettings clusterSettings, final NamedWriteableRegistry namedWriteableRegistry) { super(settings); this.settings = settings; this.masterService = masterService; this.clusterApplier = clusterApplier; this.clusterService = clusterService; this.clusterSettings = clusterSettings; this.discoverySettings = new DiscoverySettings(settings, clusterSettings); this.namedWriteableRegistry = namedWriteableRegistry; this.transportService = transportService; this.clusterName = clusterService.getClusterName(); this.committedState = new AtomicReference<>(); this.clusterService.setDiscovery(this); this.masterService.setClusterStateSupplier(() -> committedState.get()); this.masterService.setClusterStatePublisher(this::publish); this.localAddress = FBUtilities.getBroadcastAddress(); this.localDc = DatabaseDescriptor.getEndpointSnitch().getDatacenter(FBUtilities.getBroadcastAddress()); this.gossipCluster = new GossipCluster(); this.pendingStatesQueue = new PendingClusterStatesQueue(logger, MAX_PENDING_CLUSTER_STATES_SETTING.get(settings)); this.appliedClusterStateAction = new AppliedClusterStateAction(settings, transportService, this, discoverySettings); this.routingTableUpdateTaskExecutor = new RoutingTableUpdateTaskExecutor(); } public class GossipNode { boolean removed = false; DiscoveryNode discoveryNode; Map<String,ShardRoutingState> shardRoutingStateMap; public GossipNode(DiscoveryNode discoveryNode, Map<String,ShardRoutingState> shardRoutingStateMap) { this.discoveryNode = discoveryNode; this.shardRoutingStateMap = shardRoutingStateMap; } public GossipNode(DiscoveryNode discoveryNode) { this(discoveryNode, new HashMap<>()); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } GossipNode other = (GossipNode) obj; return Objects.equals(removed, other.removed) && Objects.equals(discoveryNode, other.discoveryNode) && Objects.equals(this.shardRoutingStateMap, other.shardRoutingStateMap); } @Override public int hashCode() { return Objects.hash(removed, discoveryNode, shardRoutingStateMap); } } public class GossipCluster { private final ConcurrentMap<UUID, GossipNode> remoteMembers = new ConcurrentHashMap<>(); public DiscoveryNodes nodes() { DiscoveryNodes.Builder nodesBuilder = new DiscoveryNodes.Builder() .localNodeId(SystemKeyspace.getLocalHostId().toString()) .masterNodeId(SystemKeyspace.getLocalHostId().toString()) .add(localNode()); for (GossipNode node : remoteMembers.values()) { // filter removed nodes, but keep it to avoid detecting them as new nodes. if (!node.removed) { nodesBuilder.add(node.discoveryNode); } } return nodesBuilder.build(); } public DiscoveryNode getDiscoveryNode(UUID id) { return remoteMembers.containsKey(id) ? remoteMembers.get(id).discoveryNode : null; } public Map<String,ShardRoutingState> getShardRoutingState(UUID id) { return remoteMembers.containsKey(id) ? remoteMembers.get(id).shardRoutingStateMap : null; } public boolean contains(UUID id) { return remoteMembers.containsKey(id); } public Collection<GossipNode> remoteMembers() { return remoteMembers.values(); } public ShardRoutingState getShardRoutingState(UUID nodeUuid, org.elasticsearch.index.Index index) { if (localNode().uuid().equals(nodeUuid)) { if (isSearchEnabled()) { try { IndexShard localIndexShard = clusterService.indexServiceSafe(index).getShardOrNull(0); if (localIndexShard != null && localIndexShard.routingEntry() != null) return localIndexShard.routingEntry().state(); // shardRouting not yet created. return ShardRoutingState.INITIALIZING; } catch (IndexNotFoundException e) { } } return ShardRoutingState.UNASSIGNED; } GossipNode gossipNode = remoteMembers.get(nodeUuid); if (gossipNode == null) return ShardRoutingState.UNASSIGNED; ShardRoutingState shardRoutingState = gossipNode.shardRoutingStateMap.get(index.getName()); return (shardRoutingState == null) ? ShardRoutingState.UNASSIGNED : shardRoutingState; } public GossipNode remove(final UUID hostId, final String source) { GossipNode oldGossipNode = remoteMembers.computeIfPresent(hostId, (k,v) -> { v.removed = true; return v; }); if (oldGossipNode != null) { clusterService.submitStateUpdateTask(source, new RoutingTableUpdateTask(true), routingTableUpdateTaskExecutor, routingTableUpdateTaskExecutor, routingTableUpdateTaskExecutor); } return oldGossipNode; } public void update(final InetAddress endpoint, EndpointState epState, String source, boolean allowClusterStateUpdate) { if (epState.getApplicationState(ApplicationState.HOST_ID) == null || epState.getApplicationState(ApplicationState.HOST_ID).value == null) return; UUID hostId = UUID.fromString(epState.getApplicationState(ApplicationState.HOST_ID).value); VersionedValue vv = epState.getApplicationState(ApplicationState.STATUS); if (vv != null && ("removed".startsWith(vv.value) || "LEFT".startsWith(vv.value))) { remove(hostId, "remove-" + endpoint); return; } String x1 = null; try { x1 = epState.getApplicationState(ApplicationState.X1) == null ? null : uncompressIfGZipped(epState.getApplicationState(ApplicationState.X1).value); } catch (IOException e) { logger.warn("Decompression of gossip application state X1 failed, use the value as it : {}", e.getMessage(), e); x1 = epState.getApplicationState(ApplicationState.X1).value; } update(hostId, source, endpoint, getInternalIp(epState), getRpcAddress(epState), discoveryNodeStatus(epState), x1, allowClusterStateUpdate); } /** * Trigger routing table update if node status or x1 changed, or a new ALIVE node appear. * Trigger nodes update if node IP or name changed */ private void update(final UUID hostId, final String source, final InetAddress endpoint, final InetAddress internalIp, final InetAddress rpcAddress, final DiscoveryNodeStatus status, final String x1, boolean allowClusterStateUpdate ) { if (localNode().getId().equals(hostId)) { // ignore GOSSIP update related to our self node. logger.debug("Ignoring GOSSIP update for node id={} ip={} because it's mine", hostId, endpoint); return; } else { logger.debug("updating id={} endpoint={} source=[{}] status=[{}] x1=[{}]", hostId, endpoint, source, status, x1); } final TransportAddress addr = new TransportAddress(Boolean.getBoolean("es.use_internal_address") ? internalIp : rpcAddress, publishPort()); remoteMembers.compute(hostId, (k,gn) -> { boolean nodeUpdate = false; boolean routingUpdate = false; Map<String, ShardRoutingState> x1Map = new HashMap<>(); Set<String> updatedIndices = Collections.EMPTY_SET; if (x1 != null && status.isAlive()) { try { x1Map = jsonMapper.readValue(x1, indexShardStateTypeReference); } catch (IOException e) { logger.error("Failed to parse X1 for node [{}] x1={}", hostId, x1); } } if (gn == null) { // new node ImmutableMap.Builder<String, String> attrs = ImmutableMap.builder(); attrs.put("dc", localDc); attrs.put("rack", DatabaseDescriptor.getEndpointSnitch().getRack(endpoint)); logger.debug("Add node NEW host_id={} endpoint={} internal_ip={}, rpc_address={}, status={}", hostId, NetworkAddress.format(endpoint), internalIp == null ? null : NetworkAddress.format(internalIp), rpcAddress == null ? null : NetworkAddress.format(rpcAddress), status); gn = new GossipNode(new DiscoveryNode(buildNodeName(endpoint), hostId.toString(), addr, attrs.build(), CASSANDRA_ROLES, Version.CURRENT, status), x1Map); nodeUpdate = true; routingUpdate = status.isAlive(); } else { DiscoveryNode dn = gn.discoveryNode; // status changed if (!dn.getStatus().equals(status)) { logger.debug("Update node STATUS host_id={} endpoint={} internal_ip={} rpc_address={}, status={}", hostId, NetworkAddress.format(endpoint), internalIp == null ? null : NetworkAddress.format(internalIp), rpcAddress == null ? null : NetworkAddress.format(rpcAddress), status); dn.status(status); nodeUpdate = true; routingUpdate = true; if (!status.isAlive()) { // node probably down, notify metaDataVersionAckListener.. notifyHandler(Gossiper.instance.getEndpointStateForEndpoint(endpoint)); } } // Node name or IP changed if (!dn.getName().equals(buildNodeName(endpoint)) || !dn.getInetAddress().equals(addr)) { // update DiscoveryNode IP if endpoint is ALIVE if (status.equals(DiscoveryNodeStatus.ALIVE)) { logger.debug("Update node IP host_id={} endpoint={} internal_ip={}, rpc_address={}, status={}", hostId, NetworkAddress.format(endpoint), internalIp == null ? null : NetworkAddress.format(internalIp), rpcAddress == null ? null : NetworkAddress.format(rpcAddress), status); gn = new GossipNode(new DiscoveryNode(buildNodeName(endpoint), hostId.toString(), addr, dn.getAttributes(), CASSANDRA_ROLES, Version.CURRENT, status), gn.shardRoutingStateMap); nodeUpdate = true; } else { logger.debug("Ignoring node DEAD host_id={} endpoint={} internal_ip={}, rpc_address={}, status={}", hostId, NetworkAddress.format(endpoint), internalIp == null ? null : NetworkAddress.format(internalIp), rpcAddress == null ? null : NetworkAddress.format(rpcAddress), status); } } // X1 changed if (!gn.shardRoutingStateMap.equals(x1Map)) { routingUpdate = true; if (nodeUpdate == false && !x1Map.isEmpty()) { MapDifference<String, ShardRoutingState> mapDifference = Maps.difference(x1Map, gn.shardRoutingStateMap); if (!mapDifference.entriesDiffering().isEmpty() || !mapDifference.entriesOnlyOnRight().isEmpty()) { updatedIndices = mapDifference.entriesDiffering().keySet(); logger.trace("Updating routing table source=[{}] for indices={}", source, updatedIndices); } } } gn.shardRoutingStateMap = x1Map; if (allowClusterStateUpdate && (nodeUpdate || routingUpdate)) { logger.debug("Updating routing table node source=[{}] nodeUpdate={} routingUpdate={}", source, nodeUpdate, routingUpdate); RoutingTableUpdateTask routingTableUpdateTask = (routingUpdate && !nodeUpdate) ? new RoutingTableUpdateTask(true, updatedIndices) : new RoutingTableUpdateTask(routingUpdate); clusterService.submitStateUpdateTask(source, routingTableUpdateTask, routingTableUpdateTaskExecutor, routingTableUpdateTaskExecutor, routingTableUpdateTaskExecutor); } } return gn; }); } } public PendingClusterStatesQueue pendingStatesQueue() { return this.pendingStatesQueue; } public static String buildNodeName(InetAddress addr) { String hostname = NetworkAddress.format(addr); if (hostname != null) return hostname; return String.format(Locale.getDefault(), "node%03d%03d%03d%03d", (int) (addr.getAddress()[0] & 0xFF), (int) (addr.getAddress()[1] & 0xFF), (int) (addr.getAddress()[2] & 0xFF), (int) (addr.getAddress()[3] & 0xFF)); } @Override protected void doStart() { Gossiper.instance.register(this); synchronized (gossipCluster) { logger.debug("Connected to cluster [{}]", clusterName.value()); logger.info("localNode name={} id={} localAddress={} publish_host={}", localNode().getName(), localNode().getId(), localAddress, localNode().getAddress()); // initialize cluster from cassandra local token map for(InetAddress endpoint : StorageService.instance.getTokenMetadata().getAllEndpoints()) { if (!this.localAddress.equals(endpoint) && this.localDc.equals(DatabaseDescriptor.getEndpointSnitch().getDatacenter(endpoint))) { String hostId = StorageService.instance.getHostId(endpoint).toString(); UntypedResultSet rs = executeInternal("SELECT preferred_ip, rpc_address from system." + SystemKeyspace.PEERS +" WHERE peer = ?", endpoint); if (!rs.isEmpty()) { UntypedResultSet.Row row = rs.one(); EndpointState epState = Gossiper.instance.getEndpointStateForEndpoint(endpoint); gossipCluster.update(endpoint, epState, "discovery-init", false); } } } // walk the gossip states for (Entry<InetAddress, EndpointState> entry : Gossiper.instance.getEndpointStates()) { EndpointState epState = entry.getValue(); InetAddress endpoint = entry.getKey(); if (!epState.getStatus().equals(VersionedValue.STATUS_NORMAL) && !epState.getStatus().equals(VersionedValue.SHUTDOWN)) { logger.info("Ignoring node state={}", epState); continue; } if (isLocal(endpoint)) { VersionedValue vv = epState.getApplicationState(ApplicationState.HOST_ID); if (vv != null) { String hostId = vv.value; if (!this.localNode().getId().equals(hostId)) { gossipCluster.update(endpoint, epState, "discovery-init-gossip", false); } } } } } // Cassandra is usually in the NORMAL state when discovery start. if (isNormal(Gossiper.instance.getEndpointStateForEndpoint(this.localAddress)) && isAutoEnableSearch()) { try { this.setSearchEnabled(true); } catch (IOException e) { logger.error("Failed to set searchEnabled",e); } } clusterService.submitStateUpdateTask("starting-cassandra-discovery", new RoutingTableUpdateTask(true), routingTableUpdateTaskExecutor, routingTableUpdateTaskExecutor, routingTableUpdateTaskExecutor); } public ClusterState initClusterState(DiscoveryNode localNode) { ClusterState.Builder builder = clusterApplier.newClusterStateBuilder(); ClusterState clusterState = builder.nodes(DiscoveryNodes.builder().add(localNode) .localNodeId(localNode.getId()) .masterNodeId(localNode.getId()) .build()) .blocks(ClusterBlocks.builder() .addGlobalBlock(STATE_NOT_RECOVERED_BLOCK) .addGlobalBlock(CassandraGatewayService.NO_CASSANDRA_RING_BLOCK)) .build(); setCommittedState(clusterState); this.clusterApplier.setInitialState(clusterState); return clusterState; } class RoutingTableUpdateTask { final Set<String> indices; // update routing for these indices final boolean updateRouting;// update routinTable (X1 or status change) RoutingTableUpdateTask(String index) { this(true, Collections.singleton(index)); } RoutingTableUpdateTask(boolean updateRouting) { this(updateRouting, Collections.EMPTY_SET); } RoutingTableUpdateTask(boolean updateRouting, Set<String> indices) { this.indices = indices; this.updateRouting = updateRouting; } public Set<String> indices() { return this.indices; } public boolean updateRouting() { return this.updateRouting; } } /** * Computation of the routing table for several indices (or for all indices if nodes changed) for batched cluster state updates. */ class RoutingTableUpdateTaskExecutor implements ClusterStateTaskExecutor<RoutingTableUpdateTask>, ClusterStateTaskConfig, ClusterStateTaskListener { @Override public ClusterTasksResult<RoutingTableUpdateTask> execute(ClusterState currentState, List<RoutingTableUpdateTask> tasks) throws Exception { boolean updateRouting = tasks.stream().filter(RoutingTableUpdateTask::updateRouting).count() > 0; Set<Index> indices = tasks.stream().map(RoutingTableUpdateTask::indices) .flatMap(Set::stream) .map(i -> Optional.ofNullable(currentState.metaData().hasIndex(i) ? currentState.metaData().index(i).getIndex() : null)) .filter(Optional::isPresent) .map(Optional::get) .collect(Collectors.toSet()); DiscoveryNodes discoverNodes = nodes(); ClusterState.Builder clusterStateBuilder = ClusterState.builder(currentState); clusterStateBuilder.nodes(discoverNodes); if (currentState.nodes().getSize() != discoverNodes.getSize() || updateRouting) { // update numberOfShards/numberOfReplicas for all indices. MetaData.Builder metaDataBuilder = MetaData.builder(currentState.metaData()); for(Iterator<IndexMetaData> it = currentState.metaData().iterator(); it.hasNext(); ) { IndexMetaData indexMetaData = it.next(); IndexMetaData.Builder indexMetaDataBuilder = IndexMetaData.builder(indexMetaData); indexMetaDataBuilder.numberOfShards(discoverNodes.getSize()); int rf = ClusterService.replicationFactor(indexMetaData.keyspace()); indexMetaDataBuilder.numberOfReplicas( Math.max(0, rf - 1) ); metaDataBuilder.put(indexMetaDataBuilder.build(), false); } clusterStateBuilder.metaData(metaDataBuilder.build()); ClusterState workingClusterState = clusterStateBuilder.build(); RoutingTable routingTable = RoutingTable.build(clusterService, workingClusterState); ClusterState resultingState = ClusterState.builder(workingClusterState).routingTable(routingTable).build(); return ClusterTasksResult.builder().successes((List)tasks).build(resultingState); } // only update routing table for some indices RoutingTable routingTable = indices.isEmpty() ? RoutingTable.build(clusterService, clusterStateBuilder.build()) : RoutingTable.build(clusterService, clusterStateBuilder.build(), indices); ClusterState resultingState = ClusterState.builder(currentState).routingTable(routingTable).build(); return ClusterTasksResult.builder().successes((List)tasks).build(resultingState); } @Override public TimeValue timeout() { return null; } @Override public Priority priority() { return Priority.NORMAL; } @Override public void onFailure(String source, Exception e) { logger.error("unexpected failure during [{}]", e, source); } } private long getMetadataVersion(VersionedValue versionValue) { int i = versionValue.value.indexOf('/'); if (i > 0) { try { return Long.valueOf(versionValue.value.substring(i+1)); } catch (NumberFormatException e) { logger.error("Unexpected gossip.X2 value "+versionValue.value, e); } } return -1; } private int publishPort() { try { return settings.getAsInt("transport.netty.publish_port", settings.getAsInt("transport.publish_port",settings.getAsInt("transport.tcp.port", 9300))); } catch (SettingsException | NumberFormatException e) { String publishPort = settings.get("transport.netty.publish_port", settings.get("transport.publish_port",settings.get("transport.tcp.port", "9300"))); if (publishPort.indexOf("-") > 0) { return Integer.parseInt(publishPort.split("-")[0]); } else { throw e; } } } private boolean isLocal(InetAddress endpoint) { return DatabaseDescriptor.getEndpointSnitch().getDatacenter(endpoint).equals(localDc); } private boolean isMember(InetAddress endpoint) { return !this.localAddress.equals(endpoint) && DatabaseDescriptor.getEndpointSnitch().getDatacenter(endpoint).equals(localDc); } /** * #183 lookup EndpointState with the node name = cassandra broadcast address. * ES RPC adress can be different from the cassandra broadcast address. */ public boolean isNormal(DiscoveryNode node) { // endpoint address = C* broadcast address = Elasticsearch node name (transport may be bound to C* internal or C* RPC broadcast) EndpointState state = Gossiper.instance.getEndpointStateForEndpoint(InetAddresses.forString(node.getName())); if (state == null) { logger.warn("Node endpoint address=[{}] name=[{}] state not found", node.getInetAddress(), node.getName()); return false; } return state.isAlive() && state.getStatus().equals(VersionedValue.STATUS_NORMAL); } private boolean isNormal(EndpointState state) { return state != null && state.isAlive() && state.getStatus().equals(VersionedValue.STATUS_NORMAL); } public static InetAddress getInternalIp(EndpointState epState) { return epState.getApplicationState(ApplicationState.INTERNAL_IP) == null ? null : InetAddresses.forString(epState.getApplicationState(ApplicationState.INTERNAL_IP).value); } public static InetAddress getRpcAddress(EndpointState epState) { return epState.getApplicationState(ApplicationState.RPC_ADDRESS) == null ? null : InetAddresses.forString(epState.getApplicationState(ApplicationState.RPC_ADDRESS).value); } @Override public void beforeChange(InetAddress endpoint, EndpointState state, ApplicationState appState, VersionedValue value) { //logger.debug("beforeChange Endpoint={} EndpointState={} ApplicationState={} value={}", endpoint, state, appState, value); } @Override public void onChange(InetAddress endpoint, ApplicationState state, VersionedValue versionValue) { EndpointState epState = Gossiper.instance.getEndpointStateForEndpoint(endpoint); traceEpState(endpoint, epState); String hostId = epState.getApplicationState(ApplicationState.HOST_ID).value; if (hostId != null && isMember(endpoint)) { if (logger.isTraceEnabled()) logger.trace("Endpoint={} ApplicationState={} value={}", endpoint, state, versionValue); gossipCluster.update(endpoint, epState, "onChange-" + endpoint, true); } // self status update. if (this.localAddress.equals(endpoint)) { switch (state) { case STATUS: if (logger.isTraceEnabled()) logger.trace("Endpoint={} STATUS={} => may update searchEnabled", endpoint, versionValue); // update searchEnabled according to the node status and autoEnableSearch. if (isNormal(Gossiper.instance.getEndpointStateForEndpoint(endpoint))) { if (!this.searchEnabled.get() && this.autoEnableSearch.get()) { try { setSearchEnabled(true, true); } catch (IOException e) { logger.error("Failed to enable search",e); } } publishX2(this.committedState.get(), true); } else { // node is leaving or whatever, disabling search. if (this.searchEnabled.get()) { try { setSearchEnabled(false, true); } catch (IOException e) { logger.error("Failed to disable search",e); } } } break; } } } /** * Warning: IEndpointStateChangeSubscriber.onXXXX should not block (on connection timeout or clusterState update) to avoid gossip issues. */ private void traceEpState(InetAddress endpoint, EndpointState epState) { if (logger.isTraceEnabled()) logger.trace("Endpoint={} isAlive={} STATUS={} HOST_ID={} INTERNAL_IP={} RPC_ADDRESS={} SCHEMA={} X1={} X2={}", endpoint, epState.isAlive(), epState.getStatus(), epState.getApplicationState(ApplicationState.HOST_ID), epState.getApplicationState(ApplicationState.INTERNAL_IP), epState.getApplicationState(ApplicationState.RPC_ADDRESS), epState.getApplicationState(ApplicationState.SCHEMA), epState.getApplicationState(ApplicationState.X1), epState.getApplicationState(ApplicationState.X2)); } @Override public void onAlive(InetAddress endpoint, EndpointState epState) { if (isMember(endpoint)) { traceEpState(endpoint, epState); logger.debug("Endpoint={} isAlive={} => update node + connecting", endpoint, epState.isAlive()); gossipCluster.update(endpoint, epState, "onAlive-" + endpoint, true); } } @Override public void onDead(InetAddress endpoint, EndpointState epState) { if (isMember(endpoint)) { traceEpState(endpoint, epState); logger.warn("Endpoint={} isAlive={} => update node + disconnecting", endpoint, epState.isAlive()); gossipCluster.update(endpoint, epState, "onDead-" + endpoint, true); } } @Override public void onRestart(InetAddress endpoint, EndpointState epState) { if (isMember(endpoint)) { traceEpState(endpoint, epState); gossipCluster.update(endpoint, epState, "onAlive-" + endpoint, true); } } @Override public void onJoin(InetAddress endpoint, EndpointState epState) { if (isLocal(endpoint)) { traceEpState(endpoint, epState); gossipCluster.update(endpoint, epState, "onAlive-" + endpoint, true); } } @Override public void onRemove(InetAddress endpoint) { if (this.localAddress.equals(endpoint)) { try { setSearchEnabled(false); } catch (IOException e) { } } else if (isMember(endpoint)) { EndpointState ep = Gossiper.instance.getEndpointStateForEndpoint(endpoint); if (ep != null) { VersionedValue vv = ep.getApplicationState(ApplicationState.HOST_ID); if (vv != null && vv.value != null) { String hostId = vv.value; UUID hostUuid = UUID.fromString(vv.value); if (!localNode().getId().equals(hostId) && gossipCluster.contains(hostUuid)) { logger.warn("Removing node ip={} node={} => disconnecting", endpoint, hostId); notifyHandler(Gossiper.instance.getEndpointStateForEndpoint(endpoint)); gossipCluster.remove(hostUuid, "onRemove-" + endpoint.getHostAddress()); } } } } } /** * Release the listener when all attendees have reached the expected version or become down. * Called by the cassandra gossiper thread from onChange() or onDead() or onRemove(). */ public void notifyHandler(EndpointState endPointState) { VersionedValue hostIdValue = endPointState.getApplicationState(ApplicationState.HOST_ID); if (hostIdValue == null) return; // happen when we are removing a node while updating the mapping String hostId = hostIdValue.value; if (hostId == null || localNode().getId().equals(hostId)) return; if (!endPointState.isAlive() || !endPointState.getStatus().equals("NORMAL")) { // node was removed from the gossiper, down or leaving, acknowledge to avoid locking. AckClusterStatePublishResponseHandler handler = handlerRef.get(); if (handler != null) { DiscoveryNode node = nodes().get(hostId); if (node != null) { logger.debug("nack node={}", node.getId()); handler.onFailure(node, new NoNodeAvailableException("Node "+hostId+" unavailable")); } } } } @Override protected void doStop() throws ElasticsearchException { Gossiper.instance.unregister(this); synchronized (gossipCluster) { gossipCluster.remoteMembers.clear(); } } private static final ApplicationState ELASTIC_SHARDS_STATES = ApplicationState.X1; private static final ApplicationState ELASTIC_META_DATA = ApplicationState.X2; private static final com.fasterxml.jackson.databind.ObjectMapper jsonMapper = new com.fasterxml.jackson.databind.ObjectMapper(); private static final TypeReference<Map<String, ShardRoutingState>> indexShardStateTypeReference = new TypeReference<Map<String, ShardRoutingState>>() {}; public Map<String,ShardRoutingState> getShardRoutingState(UUID nodeUuid) { return gossipCluster.getShardRoutingState(nodeUuid); } public boolean isSearchEnabled() { return this.searchEnabled.get(); } public boolean isAutoEnableSearch() { return this.autoEnableSearch.get(); } public void setAutoEnableSearch(boolean newValue) { this.autoEnableSearch.set(newValue); } // warning: called from the gossiper. public void setSearchEnabled(boolean ready) throws IOException { setSearchEnabled(ready, false); } public void setSearchEnabled(boolean ready, boolean forcePublishX1) throws IOException { if (ready && !isNormal(Gossiper.instance.getEndpointStateForEndpoint(this.localAddress))) { throw new IOException("Cassandra not ready for search"); } if (searchEnabled.getAndSet(ready) != ready || forcePublishX1) { logger.info("searchEnabled set to [{}]", ready); publishX1(forcePublishX1); clusterService.submitStateUpdateTask("searchEnabled-changed-to-"+ready, new RoutingTableUpdateTask(true), routingTableUpdateTaskExecutor, routingTableUpdateTaskExecutor, routingTableUpdateTaskExecutor); } } public void updateRoutingTable(Set<String> indices, ClusterStateTaskListener listener) { clusterService.submitStateUpdateTask("update-routing-table", new RoutingTableUpdateTask(true, indices), routingTableUpdateTaskExecutor, routingTableUpdateTaskExecutor, listener); } public void publishX1() throws JsonGenerationException, JsonMappingException, IOException { publishX1(false); } // Warning: on nodetool enablegossip, Gossiper.instance.isEnable() may be false while receiving a onChange event ! private void publishX1(boolean force) throws JsonGenerationException, JsonMappingException, IOException { if (Gossiper.instance.isEnabled() || force) { ClusterBlockException blockException = clusterState().blocks().globalBlockedException(ClusterBlockLevel.READ); if (blockException != null) logger.debug("Node not ready for READ block={}", clusterState().blocks()); if (searchEnabled.get() && blockException == null) { Map<String, ShardRoutingState> localShardStateMap = new HashMap<>(); if (clusterService.getIndicesService() != null) { for(IndexService indexService : clusterService.getIndicesService()) { try { IndexShard localIndexShard = indexService.getShardOrNull(0); localShardStateMap.put(indexService.index().getName(), (localIndexShard != null && localIndexShard.routingEntry() != null) ? localIndexShard.routingEntry().state() : ShardRoutingState.INITIALIZING); } catch (ShardNotFoundException | IndexNotFoundException e) { } } } String newValue = jsonMapper.writerWithType(indexShardStateTypeReference).writeValueAsString(localShardStateMap); Gossiper.instance.addLocalApplicationState(ELASTIC_SHARDS_STATES, StorageService.instance.valueFactory.datacenter(gzip ? compress(newValue) : newValue)); } else { // publish an empty map, so other nodes will see local shards UNASSIGNED. // empty doesn't have to be GZipped Gossiper.instance.addLocalApplicationState(ELASTIC_SHARDS_STATES, StorageService.instance.valueFactory.datacenter("{}")); } } else { logger.warn("Gossiper not yet enabled to publish X1"); } } public void publishX2(ClusterState clusterState) { publishX2(clusterState, false); } public void publishX2(ClusterState clusterState, boolean force) { if (Gossiper.instance.isEnabled() || force) { Gossiper.instance.addLocalApplicationState(ELASTIC_META_DATA, StorageService.instance.valueFactory.datacenter(clusterState.metaData().x2())); if (logger.isTraceEnabled()) logger.trace("X2={} published in gossip state", clusterState.metaData().x2()); } else { logger.warn("Gossiper not yet enabled to publish X2"); } } @Override protected void doClose() { Gossiper.instance.unregister(this); } public ClusterState clusterState() { ClusterState clusterState = committedState.get(); assert clusterState != null : "accessing cluster state before it is set"; return clusterState; } // visible for testing void setCommittedState(ClusterState clusterState) { synchronized (stateMutex) { committedState.set(clusterState); publishX2(clusterState); } } public DiscoveryNode localNode() { return this.transportService.getLocalNode(); } public String nodeDescription() { return clusterName.value() + "/" + localNode().getId(); } public DiscoveryNodes nodes() { return this.gossipCluster.nodes(); } public DiscoveryNodeStatus discoveryNodeStatus(final EndpointState epState) { if (epState == null || !epState.isAlive()) { return DiscoveryNodeStatus.DEAD; } if (epState.getApplicationState(ApplicationState.X2) == null) { return DiscoveryNodeStatus.DISABLED; } if (VersionedValue.STATUS_NORMAL.equals(epState.getStatus()) || VersionedValue.STATUS_LEAVING.equals(epState.getStatus()) || VersionedValue.STATUS_MOVING.equals(epState.getStatus()) ) { return DiscoveryNodeStatus.ALIVE; } return DiscoveryNodeStatus.DEAD; } @Override public void startInitialJoin() { publishX2(this.committedState.get()); } /** * Publish all the changes to the cluster from the master (can be called just by the master). The publish * process should apply this state to the master as well! * * The {@link AckListener} allows to keep track of the ack received from nodes, and verify whether * they updated their own cluster state or not. * * The method is guaranteed to throw a {@link FailedToCommitClusterStateException} if the change is not committed and should be rejected. * Any other exception signals the something wrong happened but the change is committed. * * Strapdata NOTES: * Publish is blocking until change is apply locally, but not while waiting remote nodes. * When the last remote node acknowledge a metadata version, this finally acknowledge the calling task. * According to the Metadata.clusterUuid in the new clusterState, the node acts as the coordinator or participant. */ @Override public void publish(final ClusterChangedEvent clusterChangedEvent, final AckListener ackListener) { ClusterState previousClusterState = clusterChangedEvent.previousState(); ClusterState newClusterState = clusterChangedEvent.state(); long startTimeNS = System.nanoTime(); try { if (clusterChangedEvent.schemaUpdate().updated()) { // update and broadcast the metadata through a CQL schema update + ack from participant nodes if (localNode().getId().equals(newClusterState.metaData().clusterUUID())) { publishAsCoordinator(clusterChangedEvent, ackListener); } else { publishAsParticipator(clusterChangedEvent, ackListener); } } else { // publish local cluster state update (for blocks, nodes or routing update) publishLocalUpdate(clusterChangedEvent, ackListener); } } catch (Exception e) { TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - startTimeNS))); StringBuilder sb = new StringBuilder("failed to execute cluster state update in ").append(executionTime) .append(", state:\nversion [") .append(previousClusterState.version()). append("], source [").append(clusterChangedEvent.source()).append("]\n"); logger.warn(sb.toString(), e); throw new ElasticsearchException(e); } } /** * Publish the new metadata through a CQL schema update (a blocking schema update unless we update a CQL map as a dynamic nested object), * and wait acks (AckClusterStatePublishResponseHandler) from participant nodes with state alive+NORMAL. */ void publishAsCoordinator(final ClusterChangedEvent clusterChangedEvent, final AckListener ackListener) throws InterruptedException, IOException { logger.debug("Coordinator update source={} metadata={}", clusterChangedEvent.source(), clusterChangedEvent.state().metaData().x2()); ClusterState previousClusterState = clusterChangedEvent.previousState(); ClusterState newClusterState = clusterChangedEvent.state(); DiscoveryNodes nodes = clusterChangedEvent.state().nodes(); DiscoveryNode localNode = nodes.getLocalNode(); // increment metadata.version newClusterState = ClusterState.builder(newClusterState) .metaData(MetaData.builder(newClusterState.metaData()).incrementVersion().build()) .build(); Collection<Mutation> mutations = clusterChangedEvent.mutations() == null ? new ArrayList<>() : clusterChangedEvent.mutations(); Collection<Event.SchemaChange> events = clusterChangedEvent.events() == null ? new ArrayList<>() : clusterChangedEvent.events(); try { // TODO: track change to update CQL schema when really needed clusterService.writeMetadataToSchemaMutations(newClusterState.metaData(), mutations, events); } catch (ConfigurationException | IOException e1) { throw new ElasticsearchException(e1); } try { // PAXOS schema update commit clusterService.commitMetaData(previousClusterState.metaData(), newClusterState.metaData(), clusterChangedEvent.source()); // compute alive node for awaiting applied acknowledgment long publishingStartInNanos = System.nanoTime(); Set<DiscoveryNode> nodesToPublishTo = new HashSet<>(nodes.getSize()); for (final DiscoveryNode node : nodes) { if (node.status() == DiscoveryNodeStatus.ALIVE && isNormal(node)) nodesToPublishTo.add(node); } logger.trace("New coordinator handler for nodes={}", nodesToPublishTo); final AckClusterStatePublishResponseHandler handler = new AckClusterStatePublishResponseHandler(nodesToPublishTo, ackListener); handlerRef.set(handler); // apply new CQL schema if (mutations != null && mutations.size() > 0) { logger.debug("Applying CQL schema source={} update={} mutations={} ", clusterChangedEvent.source(), clusterChangedEvent.schemaUpdate(), mutations); // unless update is UPDATE_ASYNCHRONOUS, block until schema is applied. Future<?> future = MigrationManager.announce(mutations, this.clusterService.getSchemaManager().getInhibitedSchemaListeners()); if (!SchemaUpdate.UPDATE_ASYNCHRONOUS.equals(clusterChangedEvent.schemaUpdate())) FBUtilities.waitOnFuture(future); // build routing table when keyspaces are created locally newClusterState = ClusterState.builder(newClusterState) .routingTable(RoutingTable.build(this.clusterService, newClusterState)) .build(); logger.debug("CQL source={} SchemaChanges={}", clusterChangedEvent.source(), events); } // add new cluster state into the pending-to-apply cluster states queue, listening ack from remote nodes. final AtomicBoolean processedOrFailed = new AtomicBoolean(); pendingStatesQueue.addPending(newClusterState, new PendingClusterStatesQueue.StateProcessedListener() { @Override public void onNewClusterStateProcessed() { processedOrFailed.set(true); handler.onResponse(localNode); } @Override public void onNewClusterStateFailed(Exception e) { processedOrFailed.set(true); handler.onFailure(localNode, e); logger.warn((org.apache.logging.log4j.util.Supplier<?>) () -> new ParameterizedMessage( "failed while applying cluster state locally [{}]", clusterChangedEvent.source()), e); } }); // apply the next-to-process cluster state. synchronized (stateMutex) { if (clusterChangedEvent.previousState() != this.committedState.get()) { throw new FailedToCommitClusterStateException("local state was mutated while CS update was published to other nodes"); } boolean sentToApplier = processNextCommittedClusterState("committed source=" + clusterChangedEvent.source() + " metadata=" + newClusterState.metaData().x2()); if (sentToApplier == false && processedOrFailed.get() == false) { logger.warn("metadata={} has neither been processed nor failed", newClusterState.metaData().x2()); assert false : "cluster state published locally neither processed nor failed: " + newClusterState; return; } } // wait all nodes are applied. final TimeValue publishTimeout = discoverySettings.getPublishTimeout(); long timeLeftInNanos = Math.max(0, publishTimeout.nanos() - (System.nanoTime() - publishingStartInNanos)); if (!handler.awaitAllNodes(TimeValue.timeValueNanos(timeLeftInNanos))) { logger.info("commit source={} metadata={} timeout with pending nodes={}", clusterChangedEvent.source(), newClusterState.metaData().x2(), Arrays.toString(handler.pendingNodes())); } else { logger.debug("commit source={} metadata={} applied succefully on nodes={}", clusterChangedEvent.source(), newClusterState.metaData().x2(), nodesToPublishTo); } } catch (ConcurrentMetaDataUpdateException e) { // should replay the task later when current cluster state will match the expected metadata uuid and version logger.warn("PAXOS concurrent update, source={} metadata={}, resubmit task on next metadata change", clusterChangedEvent.source(), newClusterState.metaData().x2()); resubmitTaskOnNextChange(clusterChangedEvent); return; } catch(UnavailableException e) { logger.error("PAXOS not enough available nodes, source={} metadata={}", clusterChangedEvent.source(), newClusterState.metaData().x2()); ackListener.onNodeAck(localNode, e); throw e; } catch(WriteTimeoutException e) { // see https://www.datastax.com/dev/blog/cassandra-error-handling-done-right logger.warn("PAXOS write timeout, source={} metadata={} writeType={}, reading the owner of version={}", clusterChangedEvent.source(), newClusterState.metaData().x2(), e.writeType, newClusterState.metaData().version()); // read the owner for the expected version to know if PAXOS transaction succeed or not. UUID owner = clusterService.readMetaDataOwner(newClusterState.metaData().version()); if (owner == null || !owner.equals(newClusterState.metaData().clusterUUID())) { logger.warn("PAXOS timeout and failed to write version={}, owner={}", newClusterState.metaData().version(), owner); throw new PaxosMetaDataUpdateException(e); } logger.warn("PAXOS timeout but succesfully write x2={}", newClusterState.metaData().x2()); ackListener.onNodeAck(localNode, e); } finally { handlerRef.set(null); } } /** * Publish the new metadata and notify the coordinator through an appliedClusterStateAction. */ void publishAsParticipator(final ClusterChangedEvent clusterChangedEvent, final AckListener ackListener) { ClusterState newClusterState = clusterChangedEvent.state(); String reason = clusterChangedEvent.source(); final DiscoveryNode coordinatorNode = newClusterState.nodes().get(newClusterState.metaData().clusterUUID()); logger.debug("Participator update reason={} metadata={} coordinator={}", reason, newClusterState.metaData().x2(), coordinatorNode); if (newClusterState.metaData().version() <= clusterState().metaData().version()) { logger.warn("Ignore and acknowlegde obsolete update metadata={}", newClusterState.metaData().x2()); if (coordinatorNode != null) { // coordinator from a remote DC maybe null. CassandraDiscovery.this.appliedClusterStateAction.sendAppliedToNode(coordinatorNode, newClusterState, null); } return; } final AtomicBoolean processedOrFailed = new AtomicBoolean(); this.pendingStatesQueue.addPending(newClusterState, new PendingClusterStatesQueue.StateProcessedListener() { @Override public void onNewClusterStateProcessed() { if (coordinatorNode != null) { logger.trace("sending applied state=[{}] to coordinator={} reason={}", newClusterState.metaData().x2(), coordinatorNode, reason); CassandraDiscovery.this.appliedClusterStateAction.sendAppliedToNode(coordinatorNode, newClusterState, null); } } @Override public void onNewClusterStateFailed(Exception e) { if (coordinatorNode != null) { logger.trace("sending failed state=[{}] to coordinator={} reason={} exception={}", newClusterState.metaData().x2(), coordinatorNode, reason, e.toString()); CassandraDiscovery.this.appliedClusterStateAction.sendAppliedToNode(coordinatorNode, newClusterState, e); } } }); // apply the next-to-process cluster state. synchronized (stateMutex) { boolean sentToApplier = processNextCommittedClusterState( "committed version [" + newClusterState.metaData().x2() + "] source [" + reason + "]"); if (sentToApplier == false && processedOrFailed.get() == false) { logger.warn("metadata={} has neither been processed nor failed", newClusterState.metaData().x2()); assert false : "cluster state published locally neither processed nor failed: " + newClusterState; return; } } } /** * Publish a local cluster state update (no coordination) coming from a CQL schema update. */ void publishLocalUpdate(final ClusterChangedEvent clusterChangedEvent, final AckListener ackListener) { ClusterState newClusterState = clusterChangedEvent.state(); logger.debug("Local update source={} metadata={}", clusterChangedEvent.source(), newClusterState.metaData().x2()); final AtomicBoolean processedOrFailed = new AtomicBoolean(); pendingStatesQueue.addPending(newClusterState, new PendingClusterStatesQueue.StateProcessedListener() { @Override public void onNewClusterStateProcessed() { processedOrFailed.set(true); // simulate ack from all nodes, elassandra only update the local clusterState here. clusterChangedEvent.state().nodes().forEach(node -> ackListener.onNodeAck(node, null)); } @Override public void onNewClusterStateFailed(Exception e) { processedOrFailed.set(true); // simulate nack from all nodes, elassandra only update the local clusterState here. clusterChangedEvent.state().nodes().forEach(node -> ackListener.onNodeAck(node, e)); logger.warn((org.apache.logging.log4j.util.Supplier<?>) () -> new ParameterizedMessage( "failed while applying cluster state locally source={}", clusterChangedEvent.source()), e); } }); // apply the next-to-process cluster state. synchronized (stateMutex) { if (clusterChangedEvent.previousState() != this.committedState.get()) { throw new FailedToCommitClusterStateException("local state was mutated while CS update was published to other nodes"); } boolean sentToApplier = processNextCommittedClusterState( "committed version [" + newClusterState.metaData().x2() + "] source [" + clusterChangedEvent.source() + "]"); if (sentToApplier == false && processedOrFailed.get() == false) { logger.warn("metadata={} source=[{}] has neither been processed nor failed", newClusterState.metaData().x2(), clusterChangedEvent.source()); assert false : "cluster state published locally neither processed nor failed: " + newClusterState; return; } } } protected void resubmitTaskOnNextChange(final ClusterChangedEvent clusterChangedEvent) { final long resubmitTimeMillis = System.currentTimeMillis(); clusterService.addListener(new ClusterStateListener() { @Override public void clusterChanged(ClusterChangedEvent event) { if (event.metaDataChanged()) { final long lostTimeMillis = System.currentTimeMillis() - resubmitTimeMillis; Priority priority = Priority.URGENT; TimeValue timeout = TimeValue.timeValueMillis(30*1000 - lostTimeMillis); Map<Object, ClusterStateTaskListener> map = clusterChangedEvent.taskInputs().updateTasksToMap(priority, lostTimeMillis); logger.warn("metadata={} => resubmit delayed update source={} tasks={} priority={} remaing timeout={}", event.state().metaData().x2(), clusterChangedEvent.source(), clusterChangedEvent.taskInputs().updateTasks, priority, timeout); clusterService.submitStateUpdateTasks(clusterChangedEvent.source(), map, ClusterStateTaskConfig.build(priority, timeout), clusterChangedEvent.taskInputs().executor); clusterService.removeListener(this); // replay only once. } } }); } // receive ack from remote nodes when cluster state applied. @Override public void onClusterStateApplied(String nodeId, String x2, Exception e, ActionListener<Void> processedListener) { logger.trace("received state=[{}] applied from={}", x2, nodeId); try { AckClusterStatePublishResponseHandler handler = this.handlerRef.get(); DiscoveryNode node = this.committedState.get().nodes().get(nodeId); if (handler != null && node != null) { if (e != null) { logger.trace("state=[{}] apply failed from node={}", x2, nodeId); handler.onFailure(node, e); } else { logger.trace("state=[{}] apply from node={}", x2, nodeId); handler.onResponse(node); } } processedListener.onResponse(null); } catch(Exception ex) { processedListener.onFailure(ex); } } // return true if state has been sent to applier boolean processNextCommittedClusterState(String reason) { assert Thread.holdsLock(stateMutex); final ClusterState newClusterState = pendingStatesQueue.getNextClusterStateToProcess(); final ClusterState currentState = committedState.get(); // all pending states have been processed if (newClusterState == null) { return false; } assert newClusterState.nodes().getMasterNode() != null : "received a cluster state without a master"; assert !newClusterState.blocks().hasGlobalBlock(discoverySettings.getNoMasterBlock()) : "received a cluster state with a master block"; try { if (shouldIgnoreOrRejectNewClusterState(logger, currentState, newClusterState)) { String message = String.format( Locale.ROOT, "rejecting cluster state version [%d] uuid [%s] received from [%s]", newClusterState.version(), newClusterState.stateUUID(), newClusterState.nodes().getMasterNodeId() ); throw new IllegalStateException(message); } } catch (Exception e) { try { pendingStatesQueue.markAsFailed(newClusterState, e); } catch (Exception inner) { inner.addSuppressed(e); logger.error((java.util.function.Supplier<?>) () -> new ParameterizedMessage("unexpected exception while failing [{}]", reason), inner); } return false; } if (currentState.blocks().hasGlobalBlock(discoverySettings.getNoMasterBlock())) { // its a fresh update from the master as we transition from a start of not having a master to having one logger.debug("got first state from fresh master [{}]", newClusterState.nodes().getMasterNodeId()); } if (currentState == newClusterState) { return false; } committedState.set(newClusterState); clusterApplier.onNewClusterState("apply cluster state (from " + newClusterState.metaData().clusterUUID() + "[" + reason + "])", this::clusterState, new ClusterApplier.ClusterApplyListener() { @Override public void onSuccess(String source) { try { pendingStatesQueue.markAsProcessed(newClusterState); } catch (Exception e) { onFailure(source, e); } } @Override public void onFailure(String source, Exception e) { logger.error((java.util.function.Supplier<?>) () -> new ParameterizedMessage("unexpected failure applying [{}]", reason), e); try { // TODO: use cluster state uuid instead of full cluster state so that we don't keep reference to CS around // for too long. pendingStatesQueue.markAsFailed(newClusterState, e); } catch (Exception inner) { inner.addSuppressed(e); logger.error((java.util.function.Supplier<?>) () -> new ParameterizedMessage("unexpected exception while failing [{}]", reason), inner); } } }); return true; } public static boolean shouldIgnoreOrRejectNewClusterState(Logger logger, ClusterState currentState, ClusterState newClusterState) { if (newClusterState.version() < currentState.version()) { logger.debug("received a cluster state that is not newer than the current one, ignoring (received {}, current {})", newClusterState.version(), currentState.version()); return true; } if (newClusterState.metaData().version() < currentState.metaData().version()) { logger.debug("received a cluster state metadata.verson that is not newer than the current one, ignoring (received {}, current {})", newClusterState.metaData().version(), currentState.metaData().version()); return true; } if (!newClusterState.metaData().clusterUUID().equals(currentState.metaData().clusterUUID()) && newClusterState.metaData().version() == currentState.metaData().version() && currentState.metaData().version() > 0) { logger.debug("received a remote cluster state with same metadata.version, ignoring (received {}, current {})", newClusterState.metaData().version(), currentState.metaData().version()); return true; } return false; } /** * does simple sanity check of the incoming cluster state. Throws an exception on rejections. */ static void validateIncomingState(Logger logger, ClusterState incomingState, ClusterState lastState) { final ClusterName incomingClusterName = incomingState.getClusterName(); if (!incomingClusterName.equals(lastState.getClusterName())) { logger.warn("received cluster state from [{}] which is also master but with a different cluster name [{}]", incomingState.nodes().getMasterNode(), incomingClusterName); throw new IllegalStateException("received state from a node that is not part of the cluster"); } if (lastState.nodes().getLocalNode().equals(incomingState.nodes().getLocalNode()) == false) { logger.warn("received a cluster state from [{}] and not part of the cluster, should not happen", incomingState.nodes().getMasterNode()); throw new IllegalStateException("received state with a local node that does not match the current local node"); } if (shouldIgnoreOrRejectNewClusterState(logger, lastState, incomingState)) { String message = String.format( Locale.ROOT, "rejecting cluster state version [%d] received from [%s]", incomingState.metaData().x2(), incomingState.nodes().getMasterNodeId() ); logger.warn(message); throw new IllegalStateException(message); } } @Override public DiscoveryStats stats() { return null; } @Override public DiscoverySettings getDiscoverySettings() { return this.discoverySettings; } }
package com.goodformentertainment.canary.zown.listener; import com.goodformentertainment.canary.zown.Flag; import com.goodformentertainment.canary.zown.ZownPlugin; import com.goodformentertainment.canary.zown.api.IZown; import com.goodformentertainment.canary.zown.api.IZownManager; import com.goodformentertainment.canary.zown.api.impl.Tree; import net.canarymod.api.entity.ArmorStand; import net.canarymod.api.entity.Entity; import net.canarymod.api.entity.EntityType; import net.canarymod.api.entity.hanging.HangingEntity; import net.canarymod.api.entity.living.humanoid.Player; import net.canarymod.api.inventory.Item; import net.canarymod.api.inventory.ItemType; import net.canarymod.api.world.blocks.Block; import net.canarymod.api.world.blocks.BlockType; import net.canarymod.api.world.blocks.TileEntity; import net.canarymod.api.world.blocks.properties.helpers.DoorProperties; import net.canarymod.api.world.position.Location; import net.canarymod.hook.HookHandler; import net.canarymod.hook.entity.EndermanDropBlockHook; import net.canarymod.hook.entity.EndermanPickupBlockHook; import net.canarymod.hook.entity.HangingEntityDestroyHook; import net.canarymod.hook.player.ArmorStandModifyHook; import net.canarymod.hook.player.BlockDestroyHook; import net.canarymod.hook.player.BlockPlaceHook; import net.canarymod.hook.player.BlockRightClickHook; import net.canarymod.hook.player.EntityRightClickHook; import net.canarymod.hook.player.ItemFrameRotateHook; import net.canarymod.hook.player.ItemFrameSetItemHook; import net.canarymod.hook.player.ItemUseHook; import net.canarymod.hook.world.ExplosionHook; import net.canarymod.hook.world.FlowHook; import net.canarymod.hook.world.IgnitionHook; import net.canarymod.hook.world.LiquidDestroyHook; import net.canarymod.plugin.PluginListener; import net.canarymod.plugin.Priority; import java.util.List; public class ModifyWorldListener implements PluginListener { private final IZownManager zownManager; public ModifyWorldListener(final IZownManager zownManager) { this.zownManager = zownManager; } @HookHandler(priority = Priority.CRITICAL) public void onBlockPlace(final BlockPlaceHook hook) { final Player player = hook.getPlayer(); final Block block = hook.getBlockPlaced(); if (!player.isOperator()) { final Tree<? extends IZown> zownTree = zownManager.getZown(block.getLocation()); if (!zownTree.getData().isOwnerOrMember(player)) { final Boolean flag = zownTree.getData().getConfiguration().getFlag(Flag.build.name()); if (flag != null) { final boolean excluded = zownTree.getData().getConfiguration() .hasBlockBuildExclusion(block.getType()); if (flag) { if (excluded) { hook.setCanceled(); ZownPlugin.LOG.info("Cancelled block place: " + flag + ":" + excluded); } } else { if (!excluded) { hook.setCanceled(); ZownPlugin.LOG.info("Cancelled block place: " + flag + ":" + excluded); } } } } } } @HookHandler(priority = Priority.CRITICAL) public void onBlockDestroy(final BlockDestroyHook hook) { final Player player = hook.getPlayer(); final Block block = hook.getBlock(); if (!player.isOperator()) { final Tree<? extends IZown> zownTree = zownManager.getZown(block.getLocation()); if (!zownTree.getData().isOwnerOrMember(player)) { final Boolean flag = zownTree.getData().getConfiguration().getFlag(Flag.build.name()); if (flag != null) { final boolean excluded = zownTree.getData().getConfiguration() .hasBlockBuildExclusion(block.getType()); if (flag) { if (excluded) { hook.setCanceled(); } } else { if (!excluded) { hook.setCanceled(); } } } } } } @HookHandler(priority = Priority.CRITICAL) public void onHangingEntityDestroy(final HangingEntityDestroyHook hook) { final Player player = hook.getPlayer(); final HangingEntity entity = hook.getPainting(); if (player == null || !player.isOperator()) { final Tree<? extends IZown> zownTree = zownManager.getZown(entity.getLocation()); if (player == null || !zownTree.getData().isOwnerOrMember(player)) { final Boolean flag = zownTree.getData().getConfiguration().getFlag(Flag.build.name()); if (flag != null) { final boolean excluded = zownTree.getData().getConfiguration() .hasEntityCreateExclusion(entity.getClass()); if (flag) { if (excluded) { hook.setCanceled(); } } else { if (!excluded) { hook.setCanceled(); } } } } } } @HookHandler(priority = Priority.CRITICAL) public void onItemUse(final ItemUseHook hook) { final Player player = hook.getPlayer(); final Block block = hook.getBlockClicked(); if (!player.isOperator()) { final Tree<? extends IZown> zownTree = zownManager.getZown(block.getLocation()); if (!zownTree.getData().isOwnerOrMember(player)) { final Boolean flag = zownTree.getData().getConfiguration().getFlag(Flag.build.name()); if (flag != null && !flag) { final ItemType type = hook.getItem().getType(); if (type == ItemType.FlintAndSteel || type == ItemType.WaterBucket || type == ItemType.LavaBucket || type == ItemType.Bonemeal) { hook.setCanceled(); } } } } } @HookHandler(priority = Priority.CRITICAL) public void onLiquidDestroy(final LiquidDestroyHook hook) { final Block block = hook.getBlock(); final Tree<? extends IZown> zownTree = zownManager.getZown(block.getLocation()); final Boolean flag = zownTree.getData().getConfiguration().getFlag(Flag.flow.name()); if (flag != null && !flag) { hook.setCanceled(); } } @HookHandler(priority = Priority.CRITICAL) public void onFlow(final FlowHook hook) { final Block block = hook.getBlockTo(); final Tree<? extends IZown> zownTree = zownManager.getZown(block.getLocation()); final Boolean flag = zownTree.getData().getConfiguration().getFlag(Flag.flow.name()); if (flag != null && !flag) { hook.setCanceled(); } } @HookHandler(priority = Priority.CRITICAL) public void onIgnition(final IgnitionHook hook) { final Player player = hook.getPlayer(); final Block block = hook.getBlock(); if (player == null) { final Tree<? extends IZown> zownTree = zownManager.getZown(block.getLocation()); final Boolean flag = zownTree.getData().getConfiguration().getFlag(Flag.firespread.name()); if (flag != null && !flag) { hook.setCanceled(); } } else if (!player.isOperator()) { final Tree<? extends IZown> zownTree = zownManager.getZown(block.getLocation()); if (!zownTree.getData().isOwnerOrMember(player)) { final Boolean flag = zownTree.getData().getConfiguration().getFlag(Flag.build.name()); if (flag != null && !flag) { hook.setCanceled(); } } } } @HookHandler(priority = Priority.CRITICAL) public void onExplosion(final ExplosionHook hook) { final List<Block> blocks = hook.getAffectedBlocks(); final Entity cause = hook.getEntity(); boolean isTnt = false; if (cause != null && cause.getEntityType() == EntityType.TNTMINECART && cause.getEntityType() == EntityType.TNTPRIMED) { isTnt = true; } for (final Block block : blocks) { final Tree<? extends IZown> zownTree = zownManager.getZown(block.getLocation()); final Boolean flag; if (isTnt) { flag = zownTree.getData().getConfiguration().getFlag(Flag.build.name()); } else { flag = zownTree.getData().getConfiguration().getFlag(Flag.mobgrief.name()); } if (flag != null && !flag) { hook.setCanceled(); break; } } } @HookHandler(priority = Priority.CRITICAL) public void onEnderPickupBlock(final EndermanPickupBlockHook hook) { final Block block = hook.getBlock(); final Tree<? extends IZown> zownTree = zownManager.getZown(block.getLocation()); final Boolean flag = zownTree.getData().getConfiguration().getFlag(Flag.mobgrief.name()); if (flag != null && !flag) { hook.setCanceled(); } } @HookHandler(priority = Priority.CRITICAL) public void onEnderDropBlock(final EndermanDropBlockHook hook) { final Location location = hook.getEnderman().getLocation(); final Tree<? extends IZown> zownTree = zownManager.getZown(location); final Boolean flag = zownTree.getData().getConfiguration().getFlag(Flag.mobgrief.name()); if (flag != null && !flag) { hook.setCanceled(); } } @HookHandler(priority = Priority.CRITICAL) public void onItemFrameRotate(final ItemFrameRotateHook hook) { final Player player = hook.getPlayer(); final Entity entity = hook.getItemFrame(); if (!player.isOperator()) { final Tree<? extends IZown> zownTree = zownManager.getZown(entity.getLocation()); if (!zownTree.getData().isOwnerOrMember(player)) { final Boolean flag = zownTree.getData().getConfiguration().getFlag(Flag.interact.name()); if (flag != null) { final boolean excluded = zownTree.getData().getConfiguration() .hasEntityInteractExclusion(entity.getClass()); if (flag) { if (excluded) { hook.setCanceled(); } } else { if (!excluded) { hook.setCanceled(); } } } } } } @HookHandler(priority = Priority.CRITICAL) public void onItemFrameSetItem(final ItemFrameSetItemHook hook) { final Player player = hook.getPlayer(); final Entity entity = hook.getItemFrame(); if (!player.isOperator()) { final Tree<? extends IZown> zownTree = zownManager.getZown(entity.getLocation()); if (!zownTree.getData().isOwnerOrMember(player)) { final Boolean flag = zownTree.getData().getConfiguration().getFlag(Flag.interact.name()); if (flag != null) { final boolean excluded = zownTree.getData().getConfiguration() .hasEntityInteractExclusion(entity.getClass()); if (flag) { if (excluded) { hook.setCanceled(); } } else { if (!excluded) { hook.setCanceled(); } } } } } } @HookHandler(priority = Priority.CRITICAL) public void onArmorStandModify(final ArmorStandModifyHook hook) { final Player player = hook.getPlayer(); // TODO need access to the armor stand being modified // final Entity entity = hook.getArmorStand(); final Entity armorStand = player; if (!player.isOperator()) { final Tree<? extends IZown> zownTree = zownManager.getZown(armorStand.getLocation()); if (!zownTree.getData().isOwnerOrMember(player)) { final Boolean flag = zownTree.getData().getConfiguration().getFlag(Flag.interact.name()); if (flag != null) { final boolean excluded = zownTree.getData().getConfiguration() .hasEntityInteractExclusion(ArmorStand.class); if (flag) { if (excluded) { hook.setCanceled(); } } else { if (!excluded) { hook.setCanceled(); } } } } } } @HookHandler(priority = Priority.CRITICAL) public void onBlockRightClick(final BlockRightClickHook hook) { final Player player = hook.getPlayer(); final Block block = hook.getBlockClicked(); final Item item = player.getItemHeld(); final TileEntity tileEntity = block.getTileEntity(); final BlockType type = block.getType(); if (item == null || tileEntity != null || type == BlockType.AcaciaDoor || type == BlockType.AcaciaFenceGate || type == BlockType.Bed || type == BlockType.BirchDoor || type == BlockType.BirchFenceGate || type == BlockType.Cake || type == BlockType.Cauldron || type == BlockType.DarkOakDoor || type == BlockType.DarkOakFenceGate || type == BlockType.EndPortalFrame || type == BlockType.Farmland || type == BlockType.FenceGate || type == BlockType.Flowerpot || type == BlockType.JungleDoor || type == BlockType.JungleFenceGate || type == BlockType.Lever || type == BlockType.MobSpawner || type == BlockType.OakDoor || type == BlockType.RedstoneRepeaterOff || type == BlockType.RedstoneRepeaterOn || type == BlockType.SpruceDoor || type == BlockType.SpruceFenceGate || type == BlockType.StoneButton || type == BlockType.Trapdoor || type == BlockType.WoodenButton || type == BlockType.WoodenDoor) { if (!player.isOperator() && !player.safeHasPermission("zown.admin.mod")) { final Tree<? extends IZown> zownTree = zownManager.getZown(block.getLocation()); if (!zownTree.getData().isOwnerOrMember(player)) { final Boolean flag = zownTree.getData().getConfiguration().getFlag(Flag.interact.name()); if (flag != null) { final boolean excluded = zownTree.getData().getConfiguration() .hasBlockInteractExclusion(block.getType()); if (flag) { if (excluded) { hook.setCanceled(); closeDoor(block); } } else { if (!excluded) { hook.setCanceled(); closeDoor(block); } } } } } } } @HookHandler(priority = Priority.CRITICAL) public void onEntityRightClick(final EntityRightClickHook hook) { final Player player = hook.getPlayer(); final Entity entity = hook.getEntity(); if (!player.isOperator()) { final Tree<? extends IZown> zownTree = zownManager.getZown(entity.getLocation()); if (!zownTree.getData().isOwnerOrMember(player)) { final Boolean flag = zownTree.getData().getConfiguration().getFlag(Flag.interact.name()); if (flag != null) { final boolean excluded = zownTree.getData().getConfiguration() .hasEntityInteractExclusion(entity.getClass()); if (flag) { if (excluded) { hook.setCanceled(); } } else { if (!excluded) { hook.setCanceled(); } } } } } } private void closeDoor(final Block block) { final BlockType type = block.getType(); if (type == BlockType.OakDoor || type == BlockType.SpruceDoor || type == BlockType.BirchDoor || type == BlockType.AcaciaDoor || type == BlockType.DarkOakDoor || type == BlockType.JungleDoor) { block.setPropertyValue(DoorProperties.open, false); } } }
/* * Copyright (C) 2011 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jasonjson.core.internal.bind; import org.jasonjson.core.JsonArray; import org.jasonjson.core.JsonElement; import org.jasonjson.core.JsonNull; import org.jasonjson.core.JsonObject; import org.jasonjson.core.JsonPrimitive; import org.jasonjson.core.stream.JsonReader; import org.jasonjson.core.stream.JsonToken; import java.io.IOException; import java.io.Reader; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; /** * This reader walks the elements of a JsonElement as if it was coming from a * character stream. * * @author Jesse Wilson */ public final class JsonTreeReader extends JsonReader { private static final Reader UNREADABLE_READER = new Reader() { @Override public int read(char[] buffer, int offset, int count) throws IOException { throw new AssertionError(); } @Override public void close() throws IOException { throw new AssertionError(); } }; private static final Object SENTINEL_CLOSED = new Object(); private final List<Object> stack = new ArrayList<Object>(); public JsonTreeReader(JsonElement element) { super(UNREADABLE_READER); stack.add(element); } @Override public void beginArray() throws IOException { expect(JsonToken.BEGIN_ARRAY); JsonArray array = (JsonArray) peekStack(); stack.add(array.iterator()); } @Override public void endArray() throws IOException { expect(JsonToken.END_ARRAY); popStack(); // empty iterator popStack(); // array } @Override public void beginObject() throws IOException { expect(JsonToken.BEGIN_OBJECT); JsonObject object = (JsonObject) peekStack(); stack.add(object.entrySet().iterator()); } @Override public void endObject() throws IOException { expect(JsonToken.END_OBJECT); popStack(); // empty iterator popStack(); // object } @Override public boolean hasNext() throws IOException { JsonToken token = peek(); return token != JsonToken.END_OBJECT && token != JsonToken.END_ARRAY; } @Override public JsonToken peek() throws IOException { if (stack.isEmpty()) { return JsonToken.END_DOCUMENT; } Object o = peekStack(); if (o instanceof Iterator) { boolean isObject = stack.get(stack.size() - 2) instanceof JsonObject; Iterator<?> iterator = (Iterator<?>) o; if (iterator.hasNext()) { if (isObject) { return JsonToken.NAME; } else { stack.add(iterator.next()); return peek(); } } else { return isObject ? JsonToken.END_OBJECT : JsonToken.END_ARRAY; } } else if (o instanceof JsonObject) { return JsonToken.BEGIN_OBJECT; } else if (o instanceof JsonArray) { return JsonToken.BEGIN_ARRAY; } else if (o instanceof JsonPrimitive) { JsonPrimitive primitive = (JsonPrimitive) o; if (primitive.isString()) { return JsonToken.STRING; } else if (primitive.isBoolean()) { return JsonToken.BOOLEAN; } else if (primitive.isNumber()) { return JsonToken.NUMBER; } else { throw new AssertionError(); } } else if (o instanceof JsonNull) { return JsonToken.NULL; } else if (o == SENTINEL_CLOSED) { throw new IllegalStateException("JsonReader is closed"); } else { throw new AssertionError(); } } private Object peekStack() { return stack.get(stack.size() - 1); } private Object popStack() { return stack.remove(stack.size() - 1); } private void expect(JsonToken expected) throws IOException { if (peek() != expected) { throw new IllegalStateException("Expected " + expected + " but was " + peek()); } } @Override public String nextName() throws IOException { expect(JsonToken.NAME); Iterator<?> i = (Iterator<?>) peekStack(); Map.Entry<?, ?> entry = (Map.Entry<?, ?>) i.next(); stack.add(entry.getValue()); return (String) entry.getKey(); } @Override public String nextString() throws IOException { JsonToken token = peek(); if (token != JsonToken.STRING && token != JsonToken.NUMBER) { throw new IllegalStateException("Expected " + JsonToken.STRING + " but was " + token); } return ((JsonPrimitive) popStack()).getAsString(); } @Override public boolean nextBoolean() throws IOException { expect(JsonToken.BOOLEAN); return ((JsonPrimitive) popStack()).getAsBoolean(); } @Override public void nextNull() throws IOException { expect(JsonToken.NULL); popStack(); } @Override public double nextDouble() throws IOException { JsonToken token = peek(); if (token != JsonToken.NUMBER && token != JsonToken.STRING) { throw new IllegalStateException("Expected " + JsonToken.NUMBER + " but was " + token); } double result = ((JsonPrimitive) peekStack()).getAsDouble(); if (!isLenient() && (Double.isNaN(result) || Double.isInfinite(result))) { throw new NumberFormatException("JSON forbids NaN and infinities: " + result); } popStack(); return result; } @Override public long nextLong() throws IOException { JsonToken token = peek(); if (token != JsonToken.NUMBER && token != JsonToken.STRING) { throw new IllegalStateException("Expected " + JsonToken.NUMBER + " but was " + token); } long result = ((JsonPrimitive) peekStack()).getAsLong(); popStack(); return result; } @Override public int nextInt() throws IOException { JsonToken token = peek(); if (token != JsonToken.NUMBER && token != JsonToken.STRING) { throw new IllegalStateException("Expected " + JsonToken.NUMBER + " but was " + token); } int result = ((JsonPrimitive) peekStack()).getAsInt(); popStack(); return result; } @Override public void close() throws IOException { stack.clear(); stack.add(SENTINEL_CLOSED); } @Override public void skipValue() throws IOException { if (peek() == JsonToken.NAME) { nextName(); } else { popStack(); } } @Override public String toString() { return getClass().getSimpleName(); } public void promoteNameToValue() throws IOException { expect(JsonToken.NAME); Iterator<?> i = (Iterator<?>) peekStack(); Map.Entry<?, ?> entry = (Map.Entry<?, ?>) i.next(); stack.add(entry.getValue()); stack.add(new JsonPrimitive((String)entry.getKey())); } }
package nl.tudelft.watchdog.eclipse.ui.preferences; import java.io.IOException; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.Date; import java.util.List; import nl.tudelft.watchdog.core.ui.preferences.PreferencesBase; import nl.tudelft.watchdog.core.ui.preferences.ProjectPreferenceSetting; import nl.tudelft.watchdog.core.util.WatchDogGlobals; import nl.tudelft.watchdog.core.util.WatchDogLogger; import nl.tudelft.watchdog.eclipse.Activator; import nl.tudelft.watchdog.eclipse.util.WatchDogUtils; import org.eclipse.jface.preference.IPreferenceStore; import org.eclipse.ui.preferences.ScopedPreferenceStore; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; /** * Utilities for accessing WatchDog's Eclipse preferences. */ public class Preferences implements PreferencesBase { /** The user's id on the WatchDog server. */ public final static String USERID_KEY = "USERID"; /** The URL of the WatchDog server. */ public final static String SERVER_KEY = "SERVERURL"; /** The number of successfully transfered intervals. */ public final static String TRANSFERED_INTERVALS_KEY = "TRANSFERED_INTERVALS"; /** The last date of successfully transfered intervals. */ public final static String LAST_TRANSFERED_INTERVALS_KEY = "LAST_TRANSFERED_INTERVALS"; /** Flag denoting whether WatchDog plugin should do logging or not. */ public final static String LOGGING_ENABLED_KEY = "ENABLE_LOGGING"; /** Flag denoting whether the WatchDog plugin is outdated. */ public final static String IS_OLD_VERSION = "OLD_VERSION"; /** Flag denoting whether there's a big update for WatchDog. */ public final static String IS_BIG_UPDATE_AVAILABLE = "BIG_UPDATE"; /** Flag denoting whether the user already answer the update question. */ public final static String IS_BIG_UPDATE_ANSWERED = "BIG_UPDATE_ANSWERED"; /** Flag denoting whether WatchDog plugin should do authentication or not. */ public final static String AUTHENTICATION_ENABLED_KEY = "ENABLE_AUTH"; /** A serialized List of {@link ProjectPreferenceSetting}s. */ public final static String WORKSPACES_KEY = "WORKSPACE_SETTINGS"; /** The type of a list of {@link ProjectPreferenceSetting}s for Gson. */ private final static Type TYPE_WORKSPACE_SETTINGS = new TypeToken<List<ProjectPreferenceSetting>>() { // intentionally empty class }.getType(); /** The Gson object. */ private final static Gson GSON = new Gson(); /** The preference store. */ private final ScopedPreferenceStore store; /** The map of registered workspaces. */ private List<ProjectPreferenceSetting> workspaceSettings = new ArrayList<ProjectPreferenceSetting>(); /** The WatchDog preference instance. */ private static volatile Preferences singletonInstance; /** * Constructor internally implements a singleton, not visible to class * users. The preferences are stored on a per eclipse installation basis. */ private Preferences() { store = (ScopedPreferenceStore) Activator.getDefault() .getPreferenceStore(); store.setDefault(LOGGING_ENABLED_KEY, false); store.setDefault(AUTHENTICATION_ENABLED_KEY, true); store.setDefault(USERID_KEY, ""); store.setDefault(SERVER_KEY, WatchDogGlobals.DEFAULT_SERVER_URI); store.setDefault(WORKSPACES_KEY, ""); store.setDefault(TRANSFERED_INTERVALS_KEY, 0); store.setDefault(LAST_TRANSFERED_INTERVALS_KEY, "never"); store.setDefault(IS_OLD_VERSION, false); store.setDefault(IS_BIG_UPDATE_ANSWERED, false); store.setDefault(IS_BIG_UPDATE_AVAILABLE, false); workspaceSettings = readSerializedWorkspaceSettings(WORKSPACES_KEY); } /** * Reads and constructs a HashMap object from a serialized String preference * key. */ private List<ProjectPreferenceSetting> readSerializedWorkspaceSettings( String key) { String serializedWorksapceSettings = store.getString(key); if (WatchDogUtils.isEmpty(serializedWorksapceSettings)) { return new ArrayList<ProjectPreferenceSetting>(); } return GSON.fromJson(serializedWorksapceSettings, TYPE_WORKSPACE_SETTINGS); } /** Returns the singleton instance from WatchdogPreferences. */ public static Preferences getInstance() { if (singletonInstance == null) { singletonInstance = new Preferences(); } return singletonInstance; } /** * Returns whether logging is enabled (<code>true</code>) or not ( * <code>false</code>). */ public boolean isLoggingEnabled() { return store.getBoolean(LOGGING_ENABLED_KEY); } /** * Returns whether authentication on the url is enabled (<code>true</code>) * or not ( <code>false</code>). */ public boolean isAuthenticationEnabled() { return store.getBoolean(AUTHENTICATION_ENABLED_KEY); } /** @return The userid. */ public String getUserid() { return store.getString(USERID_KEY); } /** Sets the userid for the store. */ public void setUserid(String userid) { store.setValue(USERID_KEY, userid); } /** @return Whether this client version is outdated. */ public Boolean isOldVersion() { return store.getBoolean(IS_OLD_VERSION); } /** Sets whether this client version is outdated. */ public void setIsOldVersion(Boolean outdated) { store.setValue(IS_OLD_VERSION, outdated); } /** @return Whether this client version is outdated. */ public Boolean isBigUpdateAvailable() { return store.getBoolean(IS_BIG_UPDATE_AVAILABLE); } /** Sets whether this client version has a big update available. */ public void setBigUpdateAvailable(Boolean available) { store.setValue(IS_BIG_UPDATE_AVAILABLE, available); } /** @return Whether the user answered to the big update question. */ public Boolean isBigUpdateAnswered() { return store.getBoolean(IS_BIG_UPDATE_ANSWERED); } /** Sets whether this client version has a big update available. */ public void setBigUpdateAnswered(Boolean answered) { store.setValue(IS_BIG_UPDATE_ANSWERED, answered); } /** @return The number of successfully transfered intervals. */ public long getIntervals() { return store.getLong(TRANSFERED_INTERVALS_KEY); } /** Adds the number to the transfered intervals for the store. */ public void addTransferedIntervals(long number) { store.setValue(TRANSFERED_INTERVALS_KEY, getIntervals() + number); } /** @return The number of successfully transfered intervals. */ public String getLastIntervalTransferDate() { return store.getString(LAST_TRANSFERED_INTERVALS_KEY); } /** Adds the number to the transfered intervals for the store. */ public void setLastTransferedInterval() { store.setValue(LAST_TRANSFERED_INTERVALS_KEY, new Date().toString()); } /** @return The serverURL. */ public String getServerURI() { return store.getString(SERVER_KEY); } /** * @return <code>true</code> if this workspace has already been registered * with WatchDog, <code>false</code> otherwise. Note: This does not * say whether WatchDog should be activated, which is returned by * {@link #shouldWatchDogBeActive(String)}. */ public boolean isProjectRegistered(String workspace) { ProjectPreferenceSetting workspaceSetting = getProjectSetting(workspace); return (workspaceSetting != null && workspaceSetting.startupQuestionAsked) ? true : false; } /** * @return The matching {@link ProjectPreferenceSetting}, or a completely * new one in case there was no match. */ public ProjectPreferenceSetting getOrCreateProjectSetting(String workspace) { ProjectPreferenceSetting setting = getProjectSetting(workspace); if (setting == null) { setting = new ProjectPreferenceSetting(); setting.project = workspace; workspaceSettings.add(setting); } return setting; } /** * @return The matching {@link ProjectPreferenceSetting}, or * <code>null</code> in case there was no match. */ private ProjectPreferenceSetting getProjectSetting(String workspace) { for (ProjectPreferenceSetting setting : workspaceSettings) { if (setting.project.equals(workspace)) { return setting; } } return null; } /** * Registers the given workspace with WatchDog. If use is <code>true</code>, * WatchDog will be used. */ public void registerProjectUse(String workspace, boolean use) { ProjectPreferenceSetting setting = getOrCreateProjectSetting(workspace); setting.enableWatchdog = use; setting.startupQuestionAsked = true; storeProjectSettings(); } /** Registers the given projectId with the given workspace. */ public void registerProjectId(String workspace, String projectId) { ProjectPreferenceSetting setting = getOrCreateProjectSetting(workspace); setting.projectId = projectId; storeProjectSettings(); } /** Updates the serialized workspace settings in the preference store. */ private void storeProjectSettings() { store.setValue(WORKSPACES_KEY, GSON.toJson(workspaceSettings, TYPE_WORKSPACE_SETTINGS)); try { store.save(); } catch (IOException exception) { // If this happens, our plugin is basically not functional in this // client setup! WatchDogLogger.getInstance().logSevere(exception); } } /** @return The {@link IPreferenceStore} for WatchDog. */ public IPreferenceStore getStore() { return store; } /** @return a list of workspace settings. */ public List<ProjectPreferenceSetting> getProjectSettings() { return workspaceSettings; } /** * Resets certain WatchDog values to the default which are only used * internally. */ public void setDefaults() { store.setValue(WORKSPACES_KEY, ""); store.setValue(TRANSFERED_INTERVALS_KEY, 0); store.setValue(LAST_TRANSFERED_INTERVALS_KEY, "never"); store.setValue(IS_OLD_VERSION, false); store.setValue(IS_BIG_UPDATE_ANSWERED, false); store.setValue(IS_BIG_UPDATE_AVAILABLE, false); } }
/** * The MIT License (MIT) * * Copyright (c) 2011-2016 Incapture Technologies LLC * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package rapture.repo; import java.util.List; import java.util.Map; import rapture.common.CallingContext; import rapture.common.ForeignKey; import rapture.common.StoredProcedureParams; import rapture.common.StoredProcedureResponse; import rapture.common.TableIndex; import rapture.common.TableMeta; import rapture.structured.StructuredStore; /** * Created by seanchen on 7/1/15. */ public class StructuredRepo { private StructuredStore store; public StructuredRepo (StructuredStore store) { this.store = store; } public void drop(){ store.drop(); } public boolean commit(String txId){ return store.commit(txId); } public boolean rollback(String txId){ return store.rollback(txId); } public Boolean createTableUsingSql(CallingContext context, String rawSql){ return store.createTableUsingSql(context, rawSql); } public Boolean createTable(String tableName, Map<String, String> columns){ return store.createTable(tableName, columns); } public Boolean dropTable(String tableName){ return store.dropTable(tableName); } public Boolean tableExists(String tableName){ return store.tableExists(tableName); } public List<String> getTables() { return store.getTables(); } public TableMeta describeTable(String tableName){ return store.describeTable(tableName); } public Boolean addTableColumns(String tableName, Map<String, String> columns){ // TODO RAP-3141: Does this count as a write??? return store.addTableColumns(tableName, columns); } public Boolean deleteTableColumns(String tableName, List<String> columnNames){ return store.deleteTableColumns(tableName, columnNames); } public Boolean updateTableColumns(String tableName, Map<String, String> columns){ // TODO RAP-3141: Does this count as a write??? return store.updateTableColumns(tableName, columns); } public Boolean renameTableColumns(String tableName, Map<String, String> columnNames){ return store.renameTableColumns(tableName, columnNames); } public Boolean createIndex(String tableName, String indexName, List<String> columnNames){ return store.createIndex(tableName, indexName, columnNames); } public Boolean dropIndex(String indexName){ return store.dropIndex(indexName); } public List<TableIndex> getIndexes(String tablename) { return store.getIndexes(tablename); } public String getPrimaryKey(String tableName) { return store.getPrimaryKey(tableName); } public List<ForeignKey> getForeignKeys(String tableName) { return store.getForeignKeys(tableName); } public List<Map<String, Object>> selectJoinedRows(List<String> tables, List<String> columnNames, String from, String where, List<String> order, Boolean ascending, int limit){ return store.selectJoinedRows(tables, columnNames, from, where, order, ascending, limit); } public List<Map<String, Object>> selectUsingSql(CallingContext context, String rawSql){ return store.selectUsingSql(context, rawSql); } public List<Map<String, Object>> selectRows(String tableName, List<String> columnNames, String where, List<String> order, Boolean ascending, int limit){ return store.selectRows(tableName, columnNames, where, order, ascending, limit); } public Boolean insertUsingSql(CallingContext context, String rawSql){ // TODO RAP-3141: Figure out how to measure data size and report to graphite return store.insertUsingSql(context, rawSql); } public Boolean insertRow(String tableName, Map<String, Object> values){ // TODO RAP-3141: Figure out how to measure data size and report to graphite return store.insertRow(tableName, values); } public Boolean insertRows(String tableName, List<Map<String, Object>> values){ // TODO RAP-3141: Figure out how to measure data size and report to graphite return store.insertRows(tableName, values); } public Boolean updateUsingSql(CallingContext context, String rawSql){ // TODO RAP-3141: Does this count as a write??? return store.updateUsingSql(context, rawSql); } public Boolean deleteUsingSql(CallingContext context, String rawSql){ return store.deleteUsingSql(context, rawSql); } public Boolean updateRows(String tableName, Map<String, Object> values, String where){ // TODO RAP-3141: Does this count as a write??? return store.updateRows(tableName, values, where); } public Boolean deleteRows(String tableName, String where){ return store.deleteRows(tableName, where); } public String getDdl(String tableName, Boolean includeTableData){ return store.getDdl(tableName, includeTableData); } public void executeDdl(String ddl, boolean alter) { store.executeDdl(ddl, alter); } public String getCursorUsingSql(CallingContext context, String rawSql){ return store.getCursorUsingSql(context, rawSql); } public String getCursor(String tableName, List<String> columnNames, String where, List<String> order, Boolean ascending, int limit){ return store.getCursor(tableName, columnNames, where, order, ascending, limit); } public String getCursorForJoin(List<String> tables, List<String> columnNames, String from, String where, List<String> order, Boolean ascending, int limit){ return store.getCursorForJoin(tables, columnNames, from, where, order, ascending, limit); } public List<Map<String, Object>> next(String tableName, String cursorId, int count){ return store.next(tableName, cursorId, count); } public List<Map<String, Object>> previous(String tableName, String cursorId, int count){ return store.previous(tableName, cursorId, count); } public Boolean closeCursor(String tableName, String cursorId){ return store.closeCursor(tableName, cursorId); } public Boolean createProcedureCallUsingSql(CallingContext context, String rawSql){ return store.createProcedureCallUsingSql(context, rawSql); } public StoredProcedureResponse callProcedure(CallingContext context, String procName, StoredProcedureParams params){ return store.callProcedure(context, procName, params); } public Boolean dropProcedureUsingSql(CallingContext context, String rawSql){ return store.dropProcedureUsingSql(context, rawSql); } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.psi.impl.references; import com.intellij.codeInsight.lookup.LookupElement; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.util.Comparing; import com.intellij.psi.PsiElement; import com.intellij.psi.util.PsiTreeUtil; import com.jetbrains.python.PyNames; import com.jetbrains.python.psi.*; import com.jetbrains.python.psi.impl.PyKeywordArgumentProvider; import com.jetbrains.python.psi.resolve.PyResolveContext; import com.jetbrains.python.psi.resolve.QualifiedResolveResult; import com.jetbrains.python.psi.search.PySuperMethodsSearch; import com.jetbrains.python.psi.types.TypeEvalContext; import org.jetbrains.annotations.NotNull; import java.util.Collection; import java.util.HashSet; import java.util.List; public class KeywordArgumentCompletionUtil { public static void collectFunctionArgNames(PyElement element, List<LookupElement> ret, final @NotNull TypeEvalContext context) { PyCallExpression callExpr = PsiTreeUtil.getParentOfType(element, PyCallExpression.class); if (callExpr != null) { PyExpression callee = callExpr.getCallee(); if (callee instanceof PyReferenceExpression && element.getParent() == callExpr.getArgumentList()) { final PyResolveContext resolveContext = PyResolveContext.defaultContext().withTypeEvalContext(context); final QualifiedResolveResult result = ((PyReferenceExpression)callee).followAssignmentsChain(resolveContext); PsiElement def = result.getElement(); if (def instanceof PyFunction) { addKeywordArgumentVariants((PyFunction)def, callExpr, ret); } else if (def instanceof PyClass) { PyFunction init = ((PyClass)def).findMethodByName(PyNames.INIT, true); // search in superclasses if (init != null) { addKeywordArgumentVariants(init, callExpr, ret); } } } } } public static void addKeywordArgumentVariants(PyFunction def, PyCallExpression callExpr, final List<LookupElement> ret) { addKeywordArgumentVariants(def, callExpr, ret, new HashSet<PyFunction>()); } public static void addKeywordArgumentVariants(PyFunction def, PyCallExpression callExpr, List<LookupElement> ret, Collection<PyFunction> visited) { if (visited.contains(def)) { return; } visited.add(def); boolean needSelf = def.getContainingClass() != null && def.getModifier() != PyFunction.Modifier.STATICMETHOD; final KwArgParameterCollector collector = new KwArgParameterCollector(needSelf, ret); final TypeEvalContext context = TypeEvalContext.codeCompletion(def.getProject(), def.getContainingFile()); final List<PyParameter> parameters = PyUtil.getParameters(def, context); for (PyParameter parameter : parameters) { parameter.accept(collector); } if (collector.hasKwArgs()) { for (PyKeywordArgumentProvider provider : Extensions.getExtensions(PyKeywordArgumentProvider.EP_NAME)) { final List<String> arguments = provider.getKeywordArguments(def, callExpr); for (String argument : arguments) { ret.add(PyUtil.createNamedParameterLookup(argument, callExpr.getProject())); } } KwArgFromStatementCallCollector fromStatementCallCollector = new KwArgFromStatementCallCollector(ret, collector.getKwArgs()); final PyStatementList statementList = def.getStatementList(); if (statementList != null) statementList.acceptChildren(fromStatementCallCollector); //if (collector.hasOnlySelfAndKwArgs()) { // nothing interesting besides self and **kwargs, let's look at superclass (PY-778) if (fromStatementCallCollector.isKwArgsTransit()) { final PsiElement superMethod = PySuperMethodsSearch.search(def).findFirst(); if (superMethod instanceof PyFunction) { addKeywordArgumentVariants((PyFunction)superMethod, callExpr, ret, visited); } } } //} } public static class KwArgParameterCollector extends PyElementVisitor { private int myCount; private final boolean myNeedSelf; private final List<LookupElement> myRet; private boolean myHasSelf = false; private boolean myHasKwArgs = false; private PyParameter kwArgsParam = null; public KwArgParameterCollector(boolean needSelf, List<LookupElement> ret) { myNeedSelf = needSelf; myRet = ret; } @Override public void visitPyParameter(PyParameter par) { myCount++; if (myCount == 1 && myNeedSelf) { myHasSelf = true; return; } PyNamedParameter namedParam = par.getAsNamed(); if (namedParam != null) { if (!namedParam.isKeywordContainer() && !namedParam.isPositionalContainer()) { final LookupElement item = PyUtil.createNamedParameterLookup(namedParam.getName(), par.getProject()); myRet.add(item); } else if (namedParam.isKeywordContainer()) { myHasKwArgs = true; kwArgsParam = namedParam; } } else { PyTupleParameter nestedTParam = par.getAsTuple(); if (nestedTParam != null) { for (PyParameter inner_par : nestedTParam.getContents()) inner_par.accept(this); } // else it's a lone star that can't contribute } } public PyParameter getKwArgs() { return kwArgsParam; } public boolean hasKwArgs() { return myHasKwArgs; } public boolean hasOnlySelfAndKwArgs() { return myCount == 2 && myHasSelf && myHasKwArgs; } } public static class KwArgFromStatementCallCollector extends PyElementVisitor { private final List<LookupElement> myRet; private final PyParameter myKwArgs; private boolean kwArgsTransit = true; public KwArgFromStatementCallCollector(List<LookupElement> ret, @NotNull PyParameter kwArgs) { myRet = ret; this.myKwArgs = kwArgs; } @Override public void visitPyElement(PyElement node) { node.acceptChildren(this); } @Override public void visitPySubscriptionExpression(PySubscriptionExpression node) { String operandName = node.getOperand().getName(); processGet(operandName, node.getIndexExpression()); } @Override public void visitPyCallExpression(PyCallExpression node) { if (node.isCalleeText("pop", "get", "getattr")) { PyReferenceExpression child = PsiTreeUtil.getChildOfType(node.getCallee(), PyReferenceExpression.class); if (child != null) { String operandName = child.getName(); if (node.getArguments().length > 0) { PyExpression argument = node.getArguments()[0]; processGet(operandName, argument); } } } else if (node.isCalleeText("__init__")) { kwArgsTransit = false; for (PyExpression e : node.getArguments()) { if (e instanceof PyStarArgument) { PyStarArgument kw = (PyStarArgument)e; if (Comparing.equal(myKwArgs.getName(), kw.getFirstChild().getNextSibling().getText())) { kwArgsTransit = true; break; } } } } super.visitPyCallExpression(node); } private void processGet(String operandName, PyExpression argument) { if (Comparing.equal(myKwArgs.getName(), operandName) && argument instanceof PyStringLiteralExpression) { String name = ((PyStringLiteralExpression)argument).getStringValue(); if (PyUtil.isPythonIdentifier(name)) { myRet.add(PyUtil.createNamedParameterLookup(name, argument.getProject())); } } } /** * is name of kwargs parameter the same as transmitted to __init__ call * * @return */ public boolean isKwArgsTransit() { return kwArgsTransit; } } }
// The MIT License(MIT) // // Copyright(c) 2016 Kevin Krol // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. package com.snakybo.torch.graphics.shader; import com.snakybo.torch.asset.AssetData; import com.snakybo.torch.util.FileUtils; import com.snakybo.torch.util.debug.Logger; import com.snakybo.torch.util.debug.LoggerInternal; import java.io.IOException; import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.lwjgl.opengl.GL20.GL_COMPILE_STATUS; import static org.lwjgl.opengl.GL20.GL_FRAGMENT_SHADER; import static org.lwjgl.opengl.GL20.GL_LINK_STATUS; import static org.lwjgl.opengl.GL20.GL_VALIDATE_STATUS; import static org.lwjgl.opengl.GL20.GL_VERTEX_SHADER; import static org.lwjgl.opengl.GL20.glAttachShader; import static org.lwjgl.opengl.GL20.glCompileShader; import static org.lwjgl.opengl.GL20.glCreateProgram; import static org.lwjgl.opengl.GL20.glCreateShader; import static org.lwjgl.opengl.GL20.glDeleteProgram; import static org.lwjgl.opengl.GL20.glDeleteShader; import static org.lwjgl.opengl.GL20.glDetachShader; import static org.lwjgl.opengl.GL20.glGetProgramInfoLog; import static org.lwjgl.opengl.GL20.glGetProgrami; import static org.lwjgl.opengl.GL20.glGetShaderInfoLog; import static org.lwjgl.opengl.GL20.glGetShaderi; import static org.lwjgl.opengl.GL20.glGetUniformLocation; import static org.lwjgl.opengl.GL20.glLinkProgram; import static org.lwjgl.opengl.GL20.glShaderSource; import static org.lwjgl.opengl.GL20.glValidateProgram; import static org.lwjgl.opengl.GL32.GL_GEOMETRY_SHADER; import static org.lwjgl.opengl.GL40.GL_TESS_CONTROL_SHADER; import static org.lwjgl.opengl.GL40.GL_TESS_EVALUATION_SHADER; import static org.lwjgl.opengl.GL43.GL_COMPUTE_SHADER; import static org.lwjgl.system.MemoryUtil.NULL; /** * @author Snakybo * @since 1.0 */ final class ShaderAsset extends AssetData { private static class Uniform { public final String type; public final String name; public Uniform(String type, String name) { this.type = type; this.name = name; } } static Map<String, ShaderAsset> all = new HashMap<>(); Map<String, Integer> uniforms; Map<String, String> uniformTypes; List<Integer> attachedShaders; int programId; ShaderAsset(String name) { super(name); uniforms = new HashMap<>(); uniformTypes = new HashMap<>(); attachedShaders = new ArrayList<>(); programId = glCreateProgram(); if(programId == NULL) { Logger.logError("Unable to create shader program"); } if(name != null && !name.isEmpty()) { all.put(name, this); } } @Override public final void destroy() { if(name != null && !name.isEmpty()) { all.remove(name); } for(Integer shader : attachedShaders) { glDetachShader(programId, shader); glDeleteShader(shader); } glDeleteProgram(programId); } final void init() { String source = null; try { String target = name; try { FileUtils.toURI(target); } catch(NoSuchFileException ex) { target = "torch_internal/" + target; } Path p = Paths.get(FileUtils.toURI(target)); List<String> sourceLines = Files.readAllLines(p); StringBuilder stringBuilder = new StringBuilder(); for(String line : sourceLines) { stringBuilder.append(line).append("\n"); } source = stringBuilder.toString(); } catch(IOException e) { Logger.logError(e.toString(), e); } createShader(GL_VERTEX_SHADER, name, parseShader(source, "VERTEX_PASS")); createShader(GL_FRAGMENT_SHADER, name, parseShader(source, "FRAGMENT_PASS")); createShader(GL_GEOMETRY_SHADER, name, parseShader(source, "GEOMETRY_PASS")); createShader(GL_COMPUTE_SHADER, name, parseShader(source, "COMPUTE_PASS")); createShader(GL_TESS_CONTROL_SHADER, name, parseShader(source, "TESS_CONTROL_PASS")); createShader(GL_TESS_EVALUATION_SHADER, name, parseShader(source, "TESS_EVAL_PASS")); link(); addUniforms(name, source); } private void createShader(int type, String name, String source) { if(source.length() > 0) { int id = glCreateShader(type); if(id == NULL) { Logger.logError("Unable to create shader"); return; } glShaderSource(id, source); glCompileShader(id); if(glGetShaderi(id, GL_COMPILE_STATUS) == NULL) { Logger.logError("Unable to compile shader source: " + name + ": " + glGetShaderInfoLog(id, 1024)); Logger.logError("Source dump: \n" + dumpShaderSource(source)); return; } glAttachShader(programId, id); attachedShaders.add(id); } } private void link() { glLinkProgram(programId); if(glGetProgrami(programId, GL_LINK_STATUS) == NULL) { Logger.logError("Unable to link shader program: " + glGetProgramInfoLog(programId, 1024)); return; } glValidateProgram(programId); if(glGetProgrami(programId, GL_VALIDATE_STATUS) == NULL) { Logger.logError("Unable to validate shader program: " + glGetProgramInfoLog(programId, 1024)); } } private void addUniforms(String uri, String source) { String[] lines = source.split("\n"); for(String line : lines) { if(line.startsWith("uniform ")) { Uniform uniform = getUniformFromLine(line); Iterable<Uniform> uniforms = getSubUniform(uniform.type, uniform.name, source); for(Uniform u : uniforms) { addUniform(uri, u.type, u.name); } } } } private void addUniform(String uri, String type, String name) { int location = glGetUniformLocation(programId, name); if(location < 0) { throw new RuntimeException("Unable to find a uniform with name: " + name); } uniforms.put(name, location); uniformTypes.put(name, type); LoggerInternal.log("Added uniform: (" + type + ") " + name + " to shader: " + FileUtils.getSimpleName(uri)); } private Iterable<Uniform> getSubUniform(String type, String name, String source) { List<Uniform> result = new ArrayList<>(); if(source.contains("struct " + type)) { int startIndex = source.indexOf("struct " + type); startIndex = source.indexOf('{', startIndex); int endIndex = source.indexOf('}', startIndex); String struct = source.substring(startIndex + 1, endIndex); String[] lines = struct.split("\n"); for(String line : lines) { if(line.isEmpty()) { continue; } Uniform uniform = getUniformFromLine(line); Iterable<String> uniformNames = getArrayUniformNames(name); for(String n : uniformNames) { result.add(new Uniform(uniform.type, n + "." + uniform.name)); } } } else { Iterable<String> uniformNames = getArrayUniformNames(name); for(String n : uniformNames) { result.add(new Uniform(type, n)); } } return result; } private Iterable<String> getArrayUniformNames(String name) { List<String> result = new ArrayList<>(); if(name.endsWith("]")) { int arrayStart = name.indexOf('['); int arrayEnd = name.length(); String size = name.substring(arrayStart + 1, arrayEnd - 1); int sizeInt = Integer.parseInt(size); name = name.substring(0, arrayStart); for(int i = 0; i < sizeInt; i++) { result.add(name + "[" + i + "]"); } } else { result.add(name); } return result; } private Uniform getUniformFromLine(String line) { String[] segments; line = line.trim(); if(line.startsWith("uniform")) { segments = line.substring("uniform ".length(), line.length() - 1).split(" "); } else { segments = line.substring(0, line.length() - 1).split(" "); } return new Uniform(segments[0], segments[1]); } private String parseShader(String source, String keyword) { if(source.contains("#ifdef " + keyword)) { return "#version " + GLSLVersion.GLSL_VERSION + "\n\n#define " + keyword + "\n\n" + source; } return ""; } private String dumpShaderSource(String source) { String[] lines = source.split("\n"); StringBuilder prefixedSource = new StringBuilder(); for(int i = 0; i < lines.length; i++) { prefixedSource.append(i + 1).append(". ").append(lines[i]).append("\n"); } return prefixedSource.toString(); } }
/* * Copyright (c) 2019, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.siddhi.core.query.table.cache; import io.siddhi.core.SiddhiAppRuntime; import io.siddhi.core.SiddhiManager; import io.siddhi.core.event.Event; import io.siddhi.core.exception.StoreQueryCreationException; import io.siddhi.core.stream.input.InputHandler; import io.siddhi.core.util.EventPrinter; import io.siddhi.query.api.definition.Attribute; import io.siddhi.query.compiler.SiddhiCompiler; import io.siddhi.query.compiler.exception.SiddhiParserException; import org.apache.log4j.Logger; import org.testng.Assert; import org.testng.AssertJUnit; import org.testng.annotations.Test; public class QueryAPITestCaseForTableWithCache { private static final Logger log = Logger.getLogger(QueryAPITestCaseForTableWithCache.class); @Test public void test0() throws InterruptedException { log.info("Test0 table with cache"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream StockStream (symbol string, price float, volume long); " + "@Store(type=\"testStoreDummyForCache\", @Cache(size=\"10\"))\n" + "define table StockTable (symbol string, price float, volume long); "; String query = "" + "@info(name = 'query1') " + "from StockStream " + "insert into StockTable ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); InputHandler stockStream = siddhiAppRuntime.getInputHandler("StockStream"); siddhiAppRuntime.start(); stockStream.send(new Object[]{"WSO2", 55.6f, 100L}); stockStream.send(new Object[]{"IBM", 75.6f, 100L}); stockStream.send(new Object[]{"WSO2", 57.6f, 200L}); Thread.sleep(500); Event[] events = siddhiAppRuntime.query("" + "from StockTable "); EventPrinter.print(events); AssertJUnit.assertEquals(3, events.length); events = siddhiAppRuntime.query("" + "from StockTable " + "on price > 75 "); EventPrinter.print(events); AssertJUnit.assertEquals(1, events.length); events = siddhiAppRuntime.query("" + "from StockTable " + "on price > volume*3/4 "); EventPrinter.print(events); AssertJUnit.assertEquals(1, events.length); siddhiAppRuntime.shutdown(); } @Test public void test1() throws InterruptedException { log.info("Test1 table with cache"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream StockStream (symbol string, price float, volume long); " + "@Store(type=\"testStoreDummyForCache\", @Cache(size=\"10\"))\n" + "define table StockTable (symbol string, price float, volume long); "; String query = "" + "@info(name = 'query1') " + "from StockStream " + "insert into StockTable ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); InputHandler stockStream = siddhiAppRuntime.getInputHandler("StockStream"); siddhiAppRuntime.start(); stockStream.send(new Object[]{"WSO2", 55.6f, 100L}); stockStream.send(new Object[]{"IBM", 75.6f, 100L}); stockStream.send(new Object[]{"WSO3", 57.6f, 100L}); Thread.sleep(500); Event[] events = siddhiAppRuntime.query("" + "from StockTable "); EventPrinter.print(events); AssertJUnit.assertEquals(3, events.length); events = siddhiAppRuntime.query("" + "from StockTable " + "on price > 75 "); EventPrinter.print(events); AssertJUnit.assertEquals(1, events.length); events = siddhiAppRuntime.query("" + "from StockTable " + "on price > volume*3/4 "); EventPrinter.print(events); AssertJUnit.assertEquals(1, events.length); siddhiAppRuntime.shutdown(); } @Test public void test2() throws InterruptedException { log.info("Test2 table with cache"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream StockStream (symbol string, price float, volume long); " + "@Store(type=\"testStoreDummyForCache\", @Cache(size=\"10\"))\n" + "define table StockTable (symbol string, price float, volume long); "; String query = "" + "@info(name = 'query1') " + "from StockStream " + "insert into StockTable ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); InputHandler stockStream = siddhiAppRuntime.getInputHandler("StockStream"); siddhiAppRuntime.start(); stockStream.send(new Object[]{"WSO2", 55.6f, 100L}); stockStream.send(new Object[]{"IBM", 75.6f, 100L}); stockStream.send(new Object[]{"WSO2", 57.6f, 100L}); Thread.sleep(500); Event[] events = siddhiAppRuntime.query("" + "from StockTable " + "on price > 75 " + "select symbol, volume "); EventPrinter.print(events); AssertJUnit.assertEquals(1, events.length); Object[] eventData = events[0].getData(); AssertJUnit.assertEquals(2, events[0].getData().length); events = siddhiAppRuntime.query("" + "from StockTable " + "select symbol, volume "); EventPrinter.print(events); AssertJUnit.assertEquals(3, events.length); AssertJUnit.assertEquals(2, events[0].getData().length); events = siddhiAppRuntime.query("" + "from StockTable " + "on price > 5 " + "select symbol " + "group by symbol " + "having symbol == 'WSO2' "); EventPrinter.print(events); AssertJUnit.assertEquals(1, events.length); siddhiAppRuntime.shutdown(); } @Test public void test3() throws InterruptedException { log.info("Test3 table with cache"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream StockStream (symbol string, price float, volume long); " + "@Store(type=\"testStoreDummyForCache\", @Cache(size=\"10\"))\n" + "define table StockTable (symbol string, price float, volume long); "; String query = "" + "@info(name = 'query1') " + "from StockStream " + "insert into StockTable ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); InputHandler stockStream = siddhiAppRuntime.getInputHandler("StockStream"); siddhiAppRuntime.start(); stockStream.send(new Object[]{"WSO2", 55.6f, 100L}); stockStream.send(new Object[]{"IBM", 75.6f, 100L}); stockStream.send(new Object[]{"WSO2", 57.6f, 100L}); Thread.sleep(500); Event[] events = siddhiAppRuntime.query("" + "from StockTable " + "on price > 5 " + "select symbol, sum(volume) as totalVolume " + "group by symbol " + "having symbol == 'WSO2' "); EventPrinter.print(events); AssertJUnit.assertEquals(1, events.length); AssertJUnit.assertEquals(200L, events[0].getData(1)); events = siddhiAppRuntime.query("" + "from StockTable " + "on price > 5 " + "select symbol, sum(volume) as totalVolume " + "group by symbol "); EventPrinter.print(events); AssertJUnit.assertEquals(2, events.length); events = siddhiAppRuntime.query("" + "from StockTable " + "on price > 5 " + "select symbol, sum(volume) as totalVolume " + "group by symbol,price "); EventPrinter.print(events); AssertJUnit.assertEquals(3, events.length); siddhiAppRuntime.shutdown(); } @Test(expectedExceptions = StoreQueryCreationException.class) public void test4() throws InterruptedException { log.info("Test4 table with cache"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream StockStream (symbol string, price float, volume long); " + "@Store(type=\"testStoreDummyForCache\", @Cache(size=\"10\"))\n" + "define table StockTable (symbol string, price float, volume long); "; String query = "" + "@info(name = 'query1') " + "from StockStream " + "insert into StockTable ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); try { InputHandler stockStream = siddhiAppRuntime.getInputHandler("StockStream"); siddhiAppRuntime.start(); stockStream.send(new Object[]{"WSO2", 55.6f, 100L}); stockStream.send(new Object[]{"IBM", 75.6f, 100L}); stockStream.send(new Object[]{"WSO2", 57.6f, 100L}); Thread.sleep(500); Event[] events = siddhiAppRuntime.query("" + "from StockTabled " + "on price > 5 " + "select symbol1, sum(volume) as totalVolume " + "group by symbol " + "having totalVolume >150 "); EventPrinter.print(events); AssertJUnit.assertEquals(1, events.length); AssertJUnit.assertEquals(400L, events[0].getData(1)); } finally { siddhiAppRuntime.shutdown(); } } @Test(expectedExceptions = StoreQueryCreationException.class) public void test5() { log.info("Test5 table with cache"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream StockStream (symbol string, price float, volume long); " + "@Store(type=\"testStoreDummyForCache\", @Cache(size=\"10\"))\n" + "define table StockTable (symbol string, price float, volume long); "; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams); try { siddhiAppRuntime.start(); Event[] events = siddhiAppRuntime.query("" + "from StockTable " + "on price > 5 " + "select symbol1, sum(volume) as totalVolume " + "group by symbol " + "having totalVolume >150 "); EventPrinter.print(events); AssertJUnit.assertEquals(1, events.length); AssertJUnit.assertEquals(200L, events[0].getData(1)); } finally { siddhiAppRuntime.shutdown(); } } @Test(expectedExceptions = SiddhiParserException.class) public void test6() { log.info("Test6 table with cache"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream StockStream (symbol string, price float, volume long); " + "@Store(type=\"testStoreDummyForCache\", @Cache(size=\"10\"))\n" + "define table StockTable (symbol string, price float, volume long); "; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams); try { siddhiAppRuntime.start(); Event[] events = siddhiAppRuntime.query("" + "from StockTable " + "on price > 5 " + "select symbol1, sum(volume) totalVolume " + "group by symbol " + "having totalVolume >150 "); EventPrinter.print(events); AssertJUnit.assertEquals(1, events.length); AssertJUnit.assertEquals(200L, events[0].getData(1)); } finally { siddhiAppRuntime.shutdown(); } } @Test public void test7() throws InterruptedException { log.info("Test7 table with cache"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream StockStream (symbol string, price float, volume long); " + "@Store(type=\"testStoreDummyForCache\", @Cache(size=\"10\"))\n" + "define table StockTable (symbol string, price float, volume long); "; String query = "" + "@info(name = 'query1') " + "from StockStream " + "insert into StockTable ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); InputHandler stockStream = siddhiAppRuntime.getInputHandler("StockStream"); siddhiAppRuntime.start(); stockStream.send(new Object[]{"WSO2", 55.6f, 100L}); stockStream.send(new Object[]{"IBM", 75.6f, 100L}); stockStream.send(new Object[]{"WSO2", 57.6f, 100L}); Thread.sleep(500); Event[] events = siddhiAppRuntime.query("" + "from StockTable " + "on symbol == 'IBM' " + "select symbol, volume "); EventPrinter.print(events); AssertJUnit.assertEquals(1, events.length); AssertJUnit.assertEquals("IBM", events[0].getData()[0]); } @Test public void test9() throws InterruptedException { log.info("Test9 table with cache"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream StockStream (symbol string, price float, volume long); " + "@Store(type=\"testStoreDummyForCache\", @Cache(size=\"10\"))\n" + "define table StockTable (symbol string, price float, volume long); "; String query = "" + "@info(name = 'query1') " + "from StockStream " + "insert into StockTable ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); InputHandler stockStream = siddhiAppRuntime.getInputHandler("StockStream"); siddhiAppRuntime.start(); stockStream.send(new Object[]{"WSO2", 55.6f, 100L}); stockStream.send(new Object[]{"IBM", 75.6f, 100L}); stockStream.send(new Object[]{"WSO2", 57.6f, 100L}); Thread.sleep(500); Event[] events = siddhiAppRuntime.query("" + "from StockTable " + "on volume > 10 " + "select symbol, price, volume " + "order by price " + "limit 2 "); EventPrinter.print(events); AssertJUnit.assertEquals(2, events.length); AssertJUnit.assertEquals(55.6F, events[0].getData()[1]); AssertJUnit.assertEquals(57.6f, events[1].getData()[1]); } @Test public void test10() throws InterruptedException { log.info("Test10 table with cache"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream StockStream (symbol string, price float, volume long); " + "@Store(type=\"testStoreDummyForCache\", @Cache(size=\"10\"))\n" + "define table StockTable (symbol string, price float, volume long); "; String query = "" + "@info(name = 'query1') " + "from StockStream " + "insert into StockTable ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); InputHandler stockStream = siddhiAppRuntime.getInputHandler("StockStream"); siddhiAppRuntime.start(); stockStream.send(new Object[]{"WSO2", 55.6f, 100L}); stockStream.send(new Object[]{"IBM", 75.6f, 100L}); stockStream.send(new Object[]{"WSO2", 57.6f, 100L}); Thread.sleep(500); String storeQuery = "" + "from StockTable " + "on volume > 10 " + "select symbol, sum(volume) as totalVolume " + "group by symbol " + "having symbol == 'WSO2'"; Event[] events = siddhiAppRuntime.query(storeQuery); EventPrinter.print(events); AssertJUnit.assertEquals(1, events.length); AssertJUnit.assertEquals(200L, events[0].getData()[1]); events = siddhiAppRuntime.query(storeQuery); EventPrinter.print(events); AssertJUnit.assertEquals(1, events.length); AssertJUnit.assertEquals(200L, events[0].getData()[1]); } @Test public void test11() throws InterruptedException { log.info("Test10 table with cache"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream StockStream (symbol string, price float, volume long); " + "@Store(type=\"testStoreDummyForCache\", @Cache(size=\"10\"))\n" + "define table StockTable (symbol string, price float, volume long); "; String query = "" + "@info(name = 'query1') " + "from StockStream " + "insert into StockTable ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); InputHandler stockStream = siddhiAppRuntime.getInputHandler("StockStream"); siddhiAppRuntime.start(); stockStream.send(new Object[]{"WSO2", 55.6f, 100L}); stockStream.send(new Object[]{"IBM", 75.6f, 100L}); stockStream.send(new Object[]{"WSO2", 57.6f, 100L}); Thread.sleep(500); String storeQuery = "" + "from StockTable " + "on price > 56 " + "select symbol, price, sum(volume) as totalVolume " + "group by symbol, price "; Event[] events = siddhiAppRuntime.query(storeQuery); EventPrinter.print(events); AssertJUnit.assertEquals(2, events.length); AssertJUnit.assertEquals(100L, events[0].getData()[2]); AssertJUnit.assertEquals(100L, events[1].getData()[2]); events = siddhiAppRuntime.query(storeQuery); EventPrinter.print(events); AssertJUnit.assertEquals(2, events.length); AssertJUnit.assertEquals(100L, events[0].getData()[2]); AssertJUnit.assertEquals(100L, events[1].getData()[2]); } @Test public void test12() { log.info("Test12 - Test output attributes and its types for table"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream StockStream (symbol string, price float, volume long); " + "@Store(type=\"testStoreDummyForCache\", @Cache(size=\"10\"))\n" + "define table StockTable (symbol string, price float, volume long); "; String storeQuery = "" + "from StockTable " + "select * ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams); siddhiAppRuntime.start(); Attribute[] actualAttributeArray = siddhiAppRuntime.getStoreQueryOutputAttributes(SiddhiCompiler.parseStoreQuery (storeQuery)); Attribute symbolAttribute = new Attribute("symbol", Attribute.Type.STRING); Attribute priceAttribute = new Attribute("price", Attribute.Type.FLOAT); Attribute volumeAttribute = new Attribute("volume", Attribute.Type.LONG); Attribute[] expectedAttributeArray = new Attribute[]{symbolAttribute, priceAttribute, volumeAttribute}; AssertJUnit.assertArrayEquals(expectedAttributeArray, actualAttributeArray); storeQuery = "" + "from StockTable " + "select symbol, sum(volume) as totalVolume ;"; actualAttributeArray = siddhiAppRuntime.getStoreQueryOutputAttributes(SiddhiCompiler.parseStoreQuery (storeQuery)); Attribute totalVolumeAttribute = new Attribute("totalVolume", Attribute.Type.LONG); expectedAttributeArray = new Attribute[]{symbolAttribute, totalVolumeAttribute}; siddhiAppRuntime.shutdown(); AssertJUnit.assertArrayEquals(expectedAttributeArray, actualAttributeArray); } @Test public void test21() throws InterruptedException { log.info("Testing store query with limit"); SiddhiManager siddhiManager = new SiddhiManager(); String streams = "" + "define stream StockStream (id int, symbol string, volume int); " + "@Store(type=\"testStoreDummyForCache\", @Cache(size=\"10\"))\n" + "define table StockTable3 (id int, symbol string, volume int); "; String query = "" + "@info(name = 'query1') " + "from StockStream " + "insert into StockTable3 ;"; SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); InputHandler stockStream = siddhiAppRuntime.getInputHandler("StockStream"); siddhiAppRuntime.start(); stockStream.send(new Object[]{1, "WSO2", 100}); stockStream.send(new Object[]{2, "IBM", 200}); stockStream.send(new Object[]{3, "GOOGLE", 300}); Thread.sleep(500); Event[] allEvents = siddhiAppRuntime.query("from StockTable3 select * LIMIT 2;"); Assert.assertEquals(allEvents.length, 2); Assert.assertEquals(allEvents[0].getData()[0], 1); Assert.assertEquals(allEvents[1].getData()[0], 2); allEvents = siddhiAppRuntime.query("from StockTable3 select * LIMIT 1 OFFSET 0;"); Assert.assertEquals(allEvents.length, 1); Assert.assertEquals(allEvents[0].getData()[0], 1); allEvents = siddhiAppRuntime.query("from StockTable3 select * LIMIT 1 OFFSET 1;"); Assert.assertEquals(allEvents.length, 1); Assert.assertEquals(allEvents[0].getData()[0], 2); siddhiAppRuntime.shutdown(); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: Docflow/RecipientSignatureDocflow.proto package Diadoc.Api.Proto.Docflow; public final class RecipientSignatureDocflowProtos { private RecipientSignatureDocflowProtos() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface RecipientSignatureDocflowOrBuilder extends // @@protoc_insertion_point(interface_extends:Diadoc.Api.Proto.Docflow.RecipientSignatureDocflow) com.google.protobuf.MessageOrBuilder { /** * <code>optional bool IsFinished = 1;</code> */ boolean hasIsFinished(); /** * <code>optional bool IsFinished = 1;</code> */ boolean getIsFinished(); /** * <code>optional .Diadoc.Api.Proto.Docflow.Signature RecipientSignature = 2;</code> */ boolean hasRecipientSignature(); /** * <code>optional .Diadoc.Api.Proto.Docflow.Signature RecipientSignature = 2;</code> */ Diadoc.Api.Proto.Docflow.AttachmentProtos.Signature getRecipientSignature(); /** * <code>optional .Diadoc.Api.Proto.Docflow.Signature RecipientSignature = 2;</code> */ Diadoc.Api.Proto.Docflow.AttachmentProtos.SignatureOrBuilder getRecipientSignatureOrBuilder(); /** * <code>optional .Diadoc.Api.Proto.Timestamp DeliveryTimestamp = 3;</code> */ boolean hasDeliveryTimestamp(); /** * <code>optional .Diadoc.Api.Proto.Timestamp DeliveryTimestamp = 3;</code> */ Diadoc.Api.Proto.TimestampProtos.Timestamp getDeliveryTimestamp(); /** * <code>optional .Diadoc.Api.Proto.Timestamp DeliveryTimestamp = 3;</code> */ Diadoc.Api.Proto.TimestampProtos.TimestampOrBuilder getDeliveryTimestampOrBuilder(); } /** * Protobuf type {@code Diadoc.Api.Proto.Docflow.RecipientSignatureDocflow} */ public static final class RecipientSignatureDocflow extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:Diadoc.Api.Proto.Docflow.RecipientSignatureDocflow) RecipientSignatureDocflowOrBuilder { // Use RecipientSignatureDocflow.newBuilder() to construct. private RecipientSignatureDocflow(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private RecipientSignatureDocflow(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final RecipientSignatureDocflow defaultInstance; public static RecipientSignatureDocflow getDefaultInstance() { return defaultInstance; } public RecipientSignatureDocflow getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private RecipientSignatureDocflow( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; isFinished_ = input.readBool(); break; } case 18: { Diadoc.Api.Proto.Docflow.AttachmentProtos.Signature.Builder subBuilder = null; if (((bitField0_ & 0x00000002) == 0x00000002)) { subBuilder = recipientSignature_.toBuilder(); } recipientSignature_ = input.readMessage(Diadoc.Api.Proto.Docflow.AttachmentProtos.Signature.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(recipientSignature_); recipientSignature_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } case 26: { Diadoc.Api.Proto.TimestampProtos.Timestamp.Builder subBuilder = null; if (((bitField0_ & 0x00000004) == 0x00000004)) { subBuilder = deliveryTimestamp_.toBuilder(); } deliveryTimestamp_ = input.readMessage(Diadoc.Api.Proto.TimestampProtos.Timestamp.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(deliveryTimestamp_); deliveryTimestamp_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000004; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.internal_static_Diadoc_Api_Proto_Docflow_RecipientSignatureDocflow_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.internal_static_Diadoc_Api_Proto_Docflow_RecipientSignatureDocflow_fieldAccessorTable .ensureFieldAccessorsInitialized( Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow.class, Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow.Builder.class); } public static com.google.protobuf.Parser<RecipientSignatureDocflow> PARSER = new com.google.protobuf.AbstractParser<RecipientSignatureDocflow>() { public RecipientSignatureDocflow parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new RecipientSignatureDocflow(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<RecipientSignatureDocflow> getParserForType() { return PARSER; } private int bitField0_; public static final int ISFINISHED_FIELD_NUMBER = 1; private boolean isFinished_; /** * <code>optional bool IsFinished = 1;</code> */ public boolean hasIsFinished() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bool IsFinished = 1;</code> */ public boolean getIsFinished() { return isFinished_; } public static final int RECIPIENTSIGNATURE_FIELD_NUMBER = 2; private Diadoc.Api.Proto.Docflow.AttachmentProtos.Signature recipientSignature_; /** * <code>optional .Diadoc.Api.Proto.Docflow.Signature RecipientSignature = 2;</code> */ public boolean hasRecipientSignature() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .Diadoc.Api.Proto.Docflow.Signature RecipientSignature = 2;</code> */ public Diadoc.Api.Proto.Docflow.AttachmentProtos.Signature getRecipientSignature() { return recipientSignature_; } /** * <code>optional .Diadoc.Api.Proto.Docflow.Signature RecipientSignature = 2;</code> */ public Diadoc.Api.Proto.Docflow.AttachmentProtos.SignatureOrBuilder getRecipientSignatureOrBuilder() { return recipientSignature_; } public static final int DELIVERYTIMESTAMP_FIELD_NUMBER = 3; private Diadoc.Api.Proto.TimestampProtos.Timestamp deliveryTimestamp_; /** * <code>optional .Diadoc.Api.Proto.Timestamp DeliveryTimestamp = 3;</code> */ public boolean hasDeliveryTimestamp() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .Diadoc.Api.Proto.Timestamp DeliveryTimestamp = 3;</code> */ public Diadoc.Api.Proto.TimestampProtos.Timestamp getDeliveryTimestamp() { return deliveryTimestamp_; } /** * <code>optional .Diadoc.Api.Proto.Timestamp DeliveryTimestamp = 3;</code> */ public Diadoc.Api.Proto.TimestampProtos.TimestampOrBuilder getDeliveryTimestampOrBuilder() { return deliveryTimestamp_; } private void initFields() { isFinished_ = false; recipientSignature_ = Diadoc.Api.Proto.Docflow.AttachmentProtos.Signature.getDefaultInstance(); deliveryTimestamp_ = Diadoc.Api.Proto.TimestampProtos.Timestamp.getDefaultInstance(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (hasRecipientSignature()) { if (!getRecipientSignature().isInitialized()) { memoizedIsInitialized = 0; return false; } } if (hasDeliveryTimestamp()) { if (!getDeliveryTimestamp().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBool(1, isFinished_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, recipientSignature_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeMessage(3, deliveryTimestamp_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, isFinished_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, recipientSignature_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(3, deliveryTimestamp_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } public static Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code Diadoc.Api.Proto.Docflow.RecipientSignatureDocflow} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:Diadoc.Api.Proto.Docflow.RecipientSignatureDocflow) Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflowOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.internal_static_Diadoc_Api_Proto_Docflow_RecipientSignatureDocflow_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.internal_static_Diadoc_Api_Proto_Docflow_RecipientSignatureDocflow_fieldAccessorTable .ensureFieldAccessorsInitialized( Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow.class, Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow.Builder.class); } // Construct using Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getRecipientSignatureFieldBuilder(); getDeliveryTimestampFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); isFinished_ = false; bitField0_ = (bitField0_ & ~0x00000001); if (recipientSignatureBuilder_ == null) { recipientSignature_ = Diadoc.Api.Proto.Docflow.AttachmentProtos.Signature.getDefaultInstance(); } else { recipientSignatureBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); if (deliveryTimestampBuilder_ == null) { deliveryTimestamp_ = Diadoc.Api.Proto.TimestampProtos.Timestamp.getDefaultInstance(); } else { deliveryTimestampBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.internal_static_Diadoc_Api_Proto_Docflow_RecipientSignatureDocflow_descriptor; } public Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow getDefaultInstanceForType() { return Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow.getDefaultInstance(); } public Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow build() { Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow buildPartial() { Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow result = new Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.isFinished_ = isFinished_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (recipientSignatureBuilder_ == null) { result.recipientSignature_ = recipientSignature_; } else { result.recipientSignature_ = recipientSignatureBuilder_.build(); } if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } if (deliveryTimestampBuilder_ == null) { result.deliveryTimestamp_ = deliveryTimestamp_; } else { result.deliveryTimestamp_ = deliveryTimestampBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow) { return mergeFrom((Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow other) { if (other == Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow.getDefaultInstance()) return this; if (other.hasIsFinished()) { setIsFinished(other.getIsFinished()); } if (other.hasRecipientSignature()) { mergeRecipientSignature(other.getRecipientSignature()); } if (other.hasDeliveryTimestamp()) { mergeDeliveryTimestamp(other.getDeliveryTimestamp()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (hasRecipientSignature()) { if (!getRecipientSignature().isInitialized()) { return false; } } if (hasDeliveryTimestamp()) { if (!getDeliveryTimestamp().isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (Diadoc.Api.Proto.Docflow.RecipientSignatureDocflowProtos.RecipientSignatureDocflow) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private boolean isFinished_ ; /** * <code>optional bool IsFinished = 1;</code> */ public boolean hasIsFinished() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional bool IsFinished = 1;</code> */ public boolean getIsFinished() { return isFinished_; } /** * <code>optional bool IsFinished = 1;</code> */ public Builder setIsFinished(boolean value) { bitField0_ |= 0x00000001; isFinished_ = value; onChanged(); return this; } /** * <code>optional bool IsFinished = 1;</code> */ public Builder clearIsFinished() { bitField0_ = (bitField0_ & ~0x00000001); isFinished_ = false; onChanged(); return this; } private Diadoc.Api.Proto.Docflow.AttachmentProtos.Signature recipientSignature_ = Diadoc.Api.Proto.Docflow.AttachmentProtos.Signature.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< Diadoc.Api.Proto.Docflow.AttachmentProtos.Signature, Diadoc.Api.Proto.Docflow.AttachmentProtos.Signature.Builder, Diadoc.Api.Proto.Docflow.AttachmentProtos.SignatureOrBuilder> recipientSignatureBuilder_; /** * <code>optional .Diadoc.Api.Proto.Docflow.Signature RecipientSignature = 2;</code> */ public boolean hasRecipientSignature() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>optional .Diadoc.Api.Proto.Docflow.Signature RecipientSignature = 2;</code> */ public Diadoc.Api.Proto.Docflow.AttachmentProtos.Signature getRecipientSignature() { if (recipientSignatureBuilder_ == null) { return recipientSignature_; } else { return recipientSignatureBuilder_.getMessage(); } } /** * <code>optional .Diadoc.Api.Proto.Docflow.Signature RecipientSignature = 2;</code> */ public Builder setRecipientSignature(Diadoc.Api.Proto.Docflow.AttachmentProtos.Signature value) { if (recipientSignatureBuilder_ == null) { if (value == null) { throw new NullPointerException(); } recipientSignature_ = value; onChanged(); } else { recipientSignatureBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .Diadoc.Api.Proto.Docflow.Signature RecipientSignature = 2;</code> */ public Builder setRecipientSignature( Diadoc.Api.Proto.Docflow.AttachmentProtos.Signature.Builder builderForValue) { if (recipientSignatureBuilder_ == null) { recipientSignature_ = builderForValue.build(); onChanged(); } else { recipientSignatureBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .Diadoc.Api.Proto.Docflow.Signature RecipientSignature = 2;</code> */ public Builder mergeRecipientSignature(Diadoc.Api.Proto.Docflow.AttachmentProtos.Signature value) { if (recipientSignatureBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && recipientSignature_ != Diadoc.Api.Proto.Docflow.AttachmentProtos.Signature.getDefaultInstance()) { recipientSignature_ = Diadoc.Api.Proto.Docflow.AttachmentProtos.Signature.newBuilder(recipientSignature_).mergeFrom(value).buildPartial(); } else { recipientSignature_ = value; } onChanged(); } else { recipientSignatureBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>optional .Diadoc.Api.Proto.Docflow.Signature RecipientSignature = 2;</code> */ public Builder clearRecipientSignature() { if (recipientSignatureBuilder_ == null) { recipientSignature_ = Diadoc.Api.Proto.Docflow.AttachmentProtos.Signature.getDefaultInstance(); onChanged(); } else { recipientSignatureBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>optional .Diadoc.Api.Proto.Docflow.Signature RecipientSignature = 2;</code> */ public Diadoc.Api.Proto.Docflow.AttachmentProtos.Signature.Builder getRecipientSignatureBuilder() { bitField0_ |= 0x00000002; onChanged(); return getRecipientSignatureFieldBuilder().getBuilder(); } /** * <code>optional .Diadoc.Api.Proto.Docflow.Signature RecipientSignature = 2;</code> */ public Diadoc.Api.Proto.Docflow.AttachmentProtos.SignatureOrBuilder getRecipientSignatureOrBuilder() { if (recipientSignatureBuilder_ != null) { return recipientSignatureBuilder_.getMessageOrBuilder(); } else { return recipientSignature_; } } /** * <code>optional .Diadoc.Api.Proto.Docflow.Signature RecipientSignature = 2;</code> */ private com.google.protobuf.SingleFieldBuilder< Diadoc.Api.Proto.Docflow.AttachmentProtos.Signature, Diadoc.Api.Proto.Docflow.AttachmentProtos.Signature.Builder, Diadoc.Api.Proto.Docflow.AttachmentProtos.SignatureOrBuilder> getRecipientSignatureFieldBuilder() { if (recipientSignatureBuilder_ == null) { recipientSignatureBuilder_ = new com.google.protobuf.SingleFieldBuilder< Diadoc.Api.Proto.Docflow.AttachmentProtos.Signature, Diadoc.Api.Proto.Docflow.AttachmentProtos.Signature.Builder, Diadoc.Api.Proto.Docflow.AttachmentProtos.SignatureOrBuilder>( getRecipientSignature(), getParentForChildren(), isClean()); recipientSignature_ = null; } return recipientSignatureBuilder_; } private Diadoc.Api.Proto.TimestampProtos.Timestamp deliveryTimestamp_ = Diadoc.Api.Proto.TimestampProtos.Timestamp.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< Diadoc.Api.Proto.TimestampProtos.Timestamp, Diadoc.Api.Proto.TimestampProtos.Timestamp.Builder, Diadoc.Api.Proto.TimestampProtos.TimestampOrBuilder> deliveryTimestampBuilder_; /** * <code>optional .Diadoc.Api.Proto.Timestamp DeliveryTimestamp = 3;</code> */ public boolean hasDeliveryTimestamp() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .Diadoc.Api.Proto.Timestamp DeliveryTimestamp = 3;</code> */ public Diadoc.Api.Proto.TimestampProtos.Timestamp getDeliveryTimestamp() { if (deliveryTimestampBuilder_ == null) { return deliveryTimestamp_; } else { return deliveryTimestampBuilder_.getMessage(); } } /** * <code>optional .Diadoc.Api.Proto.Timestamp DeliveryTimestamp = 3;</code> */ public Builder setDeliveryTimestamp(Diadoc.Api.Proto.TimestampProtos.Timestamp value) { if (deliveryTimestampBuilder_ == null) { if (value == null) { throw new NullPointerException(); } deliveryTimestamp_ = value; onChanged(); } else { deliveryTimestampBuilder_.setMessage(value); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .Diadoc.Api.Proto.Timestamp DeliveryTimestamp = 3;</code> */ public Builder setDeliveryTimestamp( Diadoc.Api.Proto.TimestampProtos.Timestamp.Builder builderForValue) { if (deliveryTimestampBuilder_ == null) { deliveryTimestamp_ = builderForValue.build(); onChanged(); } else { deliveryTimestampBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .Diadoc.Api.Proto.Timestamp DeliveryTimestamp = 3;</code> */ public Builder mergeDeliveryTimestamp(Diadoc.Api.Proto.TimestampProtos.Timestamp value) { if (deliveryTimestampBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && deliveryTimestamp_ != Diadoc.Api.Proto.TimestampProtos.Timestamp.getDefaultInstance()) { deliveryTimestamp_ = Diadoc.Api.Proto.TimestampProtos.Timestamp.newBuilder(deliveryTimestamp_).mergeFrom(value).buildPartial(); } else { deliveryTimestamp_ = value; } onChanged(); } else { deliveryTimestampBuilder_.mergeFrom(value); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .Diadoc.Api.Proto.Timestamp DeliveryTimestamp = 3;</code> */ public Builder clearDeliveryTimestamp() { if (deliveryTimestampBuilder_ == null) { deliveryTimestamp_ = Diadoc.Api.Proto.TimestampProtos.Timestamp.getDefaultInstance(); onChanged(); } else { deliveryTimestampBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } /** * <code>optional .Diadoc.Api.Proto.Timestamp DeliveryTimestamp = 3;</code> */ public Diadoc.Api.Proto.TimestampProtos.Timestamp.Builder getDeliveryTimestampBuilder() { bitField0_ |= 0x00000004; onChanged(); return getDeliveryTimestampFieldBuilder().getBuilder(); } /** * <code>optional .Diadoc.Api.Proto.Timestamp DeliveryTimestamp = 3;</code> */ public Diadoc.Api.Proto.TimestampProtos.TimestampOrBuilder getDeliveryTimestampOrBuilder() { if (deliveryTimestampBuilder_ != null) { return deliveryTimestampBuilder_.getMessageOrBuilder(); } else { return deliveryTimestamp_; } } /** * <code>optional .Diadoc.Api.Proto.Timestamp DeliveryTimestamp = 3;</code> */ private com.google.protobuf.SingleFieldBuilder< Diadoc.Api.Proto.TimestampProtos.Timestamp, Diadoc.Api.Proto.TimestampProtos.Timestamp.Builder, Diadoc.Api.Proto.TimestampProtos.TimestampOrBuilder> getDeliveryTimestampFieldBuilder() { if (deliveryTimestampBuilder_ == null) { deliveryTimestampBuilder_ = new com.google.protobuf.SingleFieldBuilder< Diadoc.Api.Proto.TimestampProtos.Timestamp, Diadoc.Api.Proto.TimestampProtos.Timestamp.Builder, Diadoc.Api.Proto.TimestampProtos.TimestampOrBuilder>( getDeliveryTimestamp(), getParentForChildren(), isClean()); deliveryTimestamp_ = null; } return deliveryTimestampBuilder_; } // @@protoc_insertion_point(builder_scope:Diadoc.Api.Proto.Docflow.RecipientSignatureDocflow) } static { defaultInstance = new RecipientSignatureDocflow(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:Diadoc.Api.Proto.Docflow.RecipientSignatureDocflow) } private static final com.google.protobuf.Descriptors.Descriptor internal_static_Diadoc_Api_Proto_Docflow_RecipientSignatureDocflow_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_Diadoc_Api_Proto_Docflow_RecipientSignatureDocflow_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\'Docflow/RecipientSignatureDocflow.prot" + "o\022\030Diadoc.Api.Proto.Docflow\032\017Timestamp.p" + "roto\032\030Docflow/Attachment.proto\"\250\001\n\031Recip" + "ientSignatureDocflow\022\022\n\nIsFinished\030\001 \001(\010" + "\022?\n\022RecipientSignature\030\002 \001(\0132#.Diadoc.Ap" + "i.Proto.Docflow.Signature\0226\n\021DeliveryTim" + "estamp\030\003 \001(\0132\033.Diadoc.Api.Proto.Timestam" + "pB!B\037RecipientSignatureDocflowProtos" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { Diadoc.Api.Proto.TimestampProtos.getDescriptor(), Diadoc.Api.Proto.Docflow.AttachmentProtos.getDescriptor(), }, assigner); internal_static_Diadoc_Api_Proto_Docflow_RecipientSignatureDocflow_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_Diadoc_Api_Proto_Docflow_RecipientSignatureDocflow_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Diadoc_Api_Proto_Docflow_RecipientSignatureDocflow_descriptor, new java.lang.String[] { "IsFinished", "RecipientSignature", "DeliveryTimestamp", }); Diadoc.Api.Proto.TimestampProtos.getDescriptor(); Diadoc.Api.Proto.Docflow.AttachmentProtos.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }
package com.cyberlightning.realvirtualsensorsimulator; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Observable; import java.util.Set; import com.cyberlightning.realvirtualsensorsimulator.interfaces.IMainActivity; import com.cyberlightning.realvirtualsensorsimulator.interfaces.ISensorListener; import com.cyberlightning.realvirtualsensorsimulator.staticresources.JsonParser; import com.cyberlightning.realvirtualsensorsimulator.views.SettingsViewFragment; import android.content.Context; import android.content.SharedPreferences; import android.graphics.Point; import android.hardware.Sensor; import android.hardware.SensorEvent; import android.hardware.SensorEventListener; import android.hardware.SensorManager; import android.location.Location; import android.location.LocationListener; import android.location.LocationManager; import android.os.Bundle; import android.os.Message; import android.view.Display; import android.view.WindowManager; public class SensorListener extends Observable implements SensorEventListener,ISensorListener,Runnable { private ArrayList<ActuatorComponent> actuators = new ArrayList<ActuatorComponent>(); private ArrayList<SensorEventObject> events; private IMainActivity application; private List<Sensor> deviceSensors; private Location location; private LocationListener locationListener; private String contextualLocation; private long sensorEventInterval; private boolean suspendFlag = true; private boolean destroyFlag = false; private volatile boolean isBusy = false; public static final long SENSOR_EVENT_INTERVAL = 4000; public SensorListener(MainActivity _activity) { this.application = _activity; Thread thread= new Thread(this); thread.start(); } @Override public void run() { this.initializeActuators(); //this needs to be done only once while(true) { synchronized(this) { while(suspendFlag && !destroyFlag) { try { wait(); } catch (InterruptedException e) { e.printStackTrace(); return; } } if (destroyFlag) break; } this.unregisterAllSensors(); if (!events.isEmpty()) { sendMessageToServer(JsonParser.createFromSensorEvent(events, location, contextualLocation,actuators)); for (SensorEventObject o : events) { sendMessageToUI( JsonParser.getTimeStamp() + ": " + o.type); } } events.clear(); new Thread((Runnable)new IntervalTimer()).start(); this.registerSensorListeners(); this.suspendThread(); } } public synchronized void suspendThread() { suspendFlag = true; } private synchronized void wakeThread() { suspendFlag = false; notify(); } private synchronized void destroy() { this.destroyFlag = true; notify(); } private Set<String> loadSettings() { Set<String> defaultValues = new HashSet<String>( this.deviceSensors.size()); for (Sensor sensor: this.deviceSensors) { defaultValues.add(Integer.toString(sensor.getType())); } SharedPreferences settings = this.application.getContext().getSharedPreferences(SettingsViewFragment.PREFS_NAME, 0); Set<String> sensors = settings.getStringSet(SettingsViewFragment.SHARED_SENSORS, defaultValues); this.sensorEventInterval = settings.getLong(SettingsViewFragment.SHARED_INTERVAL,SENSOR_EVENT_INTERVAL); this.contextualLocation = settings.getString(SettingsViewFragment.SHARED_LOCATION, null); return sensors; } private void initializeActuators() { WindowManager wm = (WindowManager) application.getContext().getSystemService(Context.WINDOW_SERVICE); Display display = wm.getDefaultDisplay(); Point size = new Point(); display.getSize(size); ActuatorComponent actuator = new ActuatorComponent(); actuator.attributes.put("dimensions", "[" + size.x+","+size.y+"]"); String[] values = {"marker1","marker2","marker3","marker4","marker6","marker7","marker8","marker9","marker10","marker11","marker12","marker13","marker14","marker15","marker15","marker16","marker17","marker18","marker19"}; StringBuilder builder = new StringBuilder(); builder.append("["); for(String s : values) { builder.append(s + ","); } builder.replace(builder.length()-1, builder.length(), ""); builder.append("]"); HashMap<String,Object> action = new HashMap<String,Object>(); action.put("parameter", "viewstate"); action.put("primitive", "array"); action.put("unit", "string"); action.put("value", builder.toString()); action.put("state", null); actuator.actions.add(action); HashMap<String,Object> param = new HashMap<String,Object>(); param.put("name", "viewsize"); param.put("unit", "percent"); param.put("value","100"); actuator.configuration.add(param); HashMap<String,Object> callback = new HashMap<String,Object>(); callback.put("target", "viewstate"); callback.put("return_type", "boolean"); actuator.callbacks.add(callback); this.actuators.add(actuator); } private void registerGpsListener() { SharedPreferences settings = this.application.getContext().getSharedPreferences(SettingsViewFragment.PREFS_NAME, 0); boolean useGPS = settings.getBoolean(SettingsViewFragment.SHARED_GPS, false); if (useGPS) { LocationManager locationManager = (LocationManager)this.application.getContext().getSystemService(Context.LOCATION_SERVICE); if (!locationManager.isProviderEnabled(LocationManager.GPS_PROVIDER)){ this.application.showNoGpsAlert(); }else{ this.locationListener = new GpsListener(); locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 3000, 0, this.locationListener); } } } private void unregisterGpsListener() { LocationManager locationManager = (LocationManager)this.application.getContext().getSystemService(Context.LOCATION_SERVICE); if (this.locationListener != null)locationManager.removeUpdates(this.locationListener); } private Integer registerSensorListeners(){ this.deviceSensors = ((SensorManager) this.application.getContext().getApplicationContext().getSystemService(Context.SENSOR_SERVICE)).getSensorList(Sensor.TYPE_ALL); Set<String> selectedSensors = this.loadSettings(); for (Sensor sensor : this.deviceSensors) { if (selectedSensors.contains(JsonParser.resolveSensorTypeById(sensor.getType()))) { ((SensorManager) this.application.getContext().getApplicationContext().getSystemService(Context.SENSOR_SERVICE)).registerListener(this, sensor, SensorManager.SENSOR_DELAY_NORMAL); } } this.events = new ArrayList<SensorEventObject>(selectedSensors.size()); return selectedSensors.size(); } private void unregisterSpecificSensor(int _type) { for (Sensor sensor : this.deviceSensors) { if (sensor.getType() ==_type) ((SensorManager) this.application.getContext().getApplicationContext().getSystemService(Context.SENSOR_SERVICE)).unregisterListener(this, sensor); } } private void unregisterAllSensors() { ((SensorManager) this.application.getContext().getApplicationContext().getSystemService(Context.SENSOR_SERVICE)).unregisterListener(this); } private void sendMessageToServer(String _payload) { setChanged(); notifyObservers(Message.obtain(null, MainActivity.MESSAGE_FROM_SENSOR_LISTENER, _payload)); } private void sendMessageToUI(String _payload) { Message msg = Message.obtain(null, MainActivity.MESSAGE_FROM_SENSOR_LISTENER, _payload); msg.setTarget(this.application.getTarget()); msg.sendToTarget(); } @Override public void onAccuracyChanged(Sensor sensor, int accuracy) { // TODO Auto-generated method stub } @Override public void onSensorChanged(SensorEvent _event) { if(!this.isBusy) { String type = JsonParser.resolveSensorTypeById(_event.sensor.getType()); boolean contains = false; for (int i = 0 ; i < this.events.size(); i++) { if (this.events.get(i).type.contentEquals(type)){ contains = true; } } if (!contains) this.events.add(new SensorEventObject(_event,type)); } } @Override public void pause() { this.isBusy = true; this.suspendThread(); this.unregisterAllSensors(); this.unregisterGpsListener(); } @Override public Integer resume() { this.isBusy = false; int numOfSensors = this.registerSensorListeners(); if ( numOfSensors > 0) { this.wakeThread(); this.registerGpsListener(); } return numOfSensors; } @Override public void end() { this.isBusy = true; this.unregisterAllSensors(); this.unregisterGpsListener(); this.destroy(); } @Override public void toggleSensor(int _type) { this.unregisterSpecificSensor(_type); } @Override public void changeEventInterval(int _duration) { //TODO Auto-generated text block } /** * * @author Cyberlightning Ltd. <tomi.sarni@cyberlightning.com> * */ private class GpsListener implements LocationListener { @Override public void onProviderDisabled(String provider) { //TODO Auto-generated text block } @Override public void onProviderEnabled(String provider) { //TODO Auto-generated text block } @Override public void onLocationChanged(Location _location) { location = _location; } @Override public void onStatusChanged(String provider, int status, Bundle extras) { //TODO Auto-generated text block } } private class IntervalTimer implements Runnable { @Override public void run() { try { Thread.sleep(sensorEventInterval); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } if (!isBusy)wakeThread(); return; } } public class SensorEventObject { public SensorEvent event; public String type; public SensorEventObject(SensorEvent _event, String _type) { this.event = _event; this.type = _type; } } public class ActuatorComponent { public HashMap<String, Object> attributes = new HashMap<String,Object>(); public ArrayList<HashMap<String, Object>> configuration = new ArrayList<HashMap<String, Object>>(); public ArrayList<HashMap<String, Object>> actions = new ArrayList<HashMap<String, Object>>(); public ArrayList<HashMap<String, Object>> callbacks = new ArrayList<HashMap<String, Object>>(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.river.outrigger; import java.io.IOException; import java.rmi.RemoteException; import java.security.AccessControlContext; import java.security.AccessController; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.util.logging.Level; import java.util.logging.Logger; import net.jini.core.event.UnknownEventException; import net.jini.config.Configuration; import net.jini.config.ConfigurationException; import net.jini.security.ProxyPreparer; import net.jini.space.JavaSpace; import org.apache.river.constants.ThrowableConstants; import org.apache.river.config.Config; import org.apache.river.logging.Levels; import org.apache.river.thread.wakeup.RetryTask; import org.apache.river.thread.wakeup.WakeupManager; import java.util.concurrent.ExecutorService; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import org.apache.river.thread.NamedThreadFactory; /** * The notifier thread. This thread is responsible for notifying * objects for which interest has been registered. It operates in * transient space as much as possible. Pending notifications will be * lost when the server goes down, but registrations of interest * survive across server crashes for persistent servers. * * @author Sun Microsystems, Inc. * * @see JavaSpace#notify * @see OutriggerServerImpl#notify */ // @see NotifyChit class Notifier implements org.apache.river.constants.TimeConstants { /** * The object to use for the <code>source</code> when creating * events. */ private final JavaSpace source; /** Proxy preparer to use on recovered listeners */ private final ProxyPreparer recoveredListenerPreparer; /** wakeup manager for <code>NotifyTask</code> */ private final WakeupManager wakeupMgr = new WakeupManager(new WakeupManager.ThreadDesc(null, true)); /** pending notifications tasks */ private final ExecutorService pending; private final static int MAX_ATTEMPTS = 10; // max times to retry /** Logger for logging event related information */ private static final Logger logger = Logger.getLogger(OutriggerServerImpl.eventLoggerName); /** * Create a notifier connected to the given <code>space</code>. * @param source the value to use for the <code>source</code> in * remote event objects. * @param recoveredListenerPreparer <code>ProxyPreparer</code> to * apply to recovered listeners. * @param config a source of configuration data.a * @throws ConfigurationException if there is a problem * with the passed configuration. * @throws NullPointerException if <code>source</code> or * <code>config</code> arguments are <code>null</code>. */ Notifier(JavaSpace source, ProxyPreparer recoveredListenerPreparer, Configuration config) throws ConfigurationException { if (source == null) throw new NullPointerException("source must be non-null"); this.source = source; this.recoveredListenerPreparer = recoveredListenerPreparer; pending = Config.getNonNullEntry(config, OutriggerServerImpl.COMPONENT_NAME, "notificationsExecutorService", ExecutorService.class, new ThreadPoolExecutor( 10, 10, /* Ignored */ 15, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>(), /* Unbounded queue */ new NamedThreadFactory("OutriggerServerImpl Notifier", false) ) ); } /** * Terminate the notifier, shutting down any threads * it has running. This method can assume that * the constructor completed. */ void terminate() { pending.shutdown(); wakeupMgr.stop(); wakeupMgr.cancelAll(); } /** * Queue up an event for delivery. * @param sender An object that on request will * attempt to deliver its event * to the associated listener. * @throws NullPointerException if <code>sender</code> is * <code>null</code> */ void enqueueDelivery(EventSender sender, AccessControlContext context) { pending.execute(new NotifyTask(sender, context)); } /* * Static stuff for Pending (can't put it in the class, unfortunately). */ // 1 day =hrs mins secs milliseconds private static final long MAX_TIME = 1 * DAYS; private static final long delays[] = { 1 * SECONDS, 5 * SECONDS, 10 * SECONDS, 60 * SECONDS, 60 * SECONDS }; static { /* * Make the delays the amount of time since the start -- it * is easier to declare the intervals, but the elapsed time is * more <i>useful</i>. */ for (int i = 1; i < delays.length; i++) delays[i] += delays[i - 1]; } /** * A task that represent a notification of matching a particular * template under a given transaction. */ private class NotifyTask extends RetryTask { /** Who and what to send a event to. */ private final EventSender sender; private final AccessControlContext context; /** * Create an object to represent this list of chits needing * notification. * @param sender An object that on request will * attempt to deliver its event * to the associated listener. * @throws NullPointerException if <code>sender</code> is * <code>null</code> */ NotifyTask(EventSender sender, AccessControlContext context) { super(Notifier.this.pending, Notifier.this.wakeupMgr); if (sender == null) throw new NullPointerException("sender must be non-null"); this.sender = sender; this.context = context; } /** * Try to notify the target. Return <code>true</code> if the * notification was successful. * <p> * We know that we are the only one dealing with the given chit * because <code>runAfter</code> makes sure of it. */ @Override public boolean tryOnce() { long curTime = System.currentTimeMillis(); if (curTime - startTime() > MAX_TIME) { if (logger.isLoggable(Levels.FAILED)) { logger.log(Levels.FAILED, "giving up on delivering event, keeping registration"); } return true; // just stop here, we are declaring "success" } boolean successful = true; // notification successful? try { try { AccessController.doPrivileged( new PrivilegedExceptionAction(){ @Override public Object run() throws Exception { sender.sendEvent(source, curTime, recoveredListenerPreparer); return null; } }, context ); } catch (PrivilegedActionException ex) { Exception e = ex.getException(); if (e instanceof IOException) throw (IOException) e; if (e instanceof UnknownEventException) throw (UnknownEventException)e; if (e instanceof ClassNotFoundException) throw (ClassNotFoundException)e; if (e instanceof RuntimeException) throw (RuntimeException)e; throw new IOException("Unexpected Exception", ex); } } catch (UnknownEventException e) { // they didn't want to know about this, so stop them getting // future notifications, too. logFailure("UnknownEventException", Level.FINER, true, e); sender.cancelRegistration(); // this is still "successful" -- we know to stop sending this } catch (RemoteException e) { final int cat = ThrowableConstants.retryable(e); if (cat == ThrowableConstants.BAD_INVOCATION || cat == ThrowableConstants.BAD_OBJECT) { // Listener probably bad, retry likely to fail. logFailure("definite exception", Level.INFO, true, e); sender.cancelRegistration(); } else if (cat == ThrowableConstants.INDEFINITE) { // try, try, again logFailure("indefinite exception", Levels.FAILED, false, e); successful = false; } else if (cat == ThrowableConstants.UNCATEGORIZED) { // Same as above but log differently. logFailure("uncategorized exception", Level.INFO, false, e); successful = false; } else { logger.log(Level.WARNING, "ThrowableConstants.retryable " + "returned out of range value, " + cat, new AssertionError(e)); successful = false; } } catch (IOException e) { // corrupted listener? unlikely to get better, cancel logFailure("IOException", Level.INFO, true, e); sender.cancelRegistration(); } catch (ClassNotFoundException e) { // probably a codebase problem, retry logFailure("ClassNotFoundException", Levels.FAILED, false, e); successful = false; } catch (RuntimeException e) { /* bad listener, or preparer, either way unlikely to * get better */ logFailure("RuntimeException", Level.INFO, true, e); sender.cancelRegistration(); } if (!successful && attempt() > MAX_ATTEMPTS) { if (logger.isLoggable(Levels.FAILED)) { logger.log(Levels.FAILED, "giving up on delivering event, keeping registration"); } return true; // as successful as we're going to be } return successful; } /** Log a failed delivery attempt */ private void logFailure(String exceptionDescription, Level level, boolean terminal, Throwable t) { if (logger.isLoggable(level)) { logger.log(level, "Encountered " + exceptionDescription + "while preparing to send/sending event, " + (terminal?"dropping":"keeping") + " registration", t); } } } }
/* Copyright (c) 2010, Siemens Corporate Research a Division of Siemens Corporation All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vhudson-jaxb-ri-2.1-833 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2009.08.17 at 06:25:05 AM BST // package gme.cacore_cacore._3_2.edu_northwestern_radiology; import java.math.BigInteger; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for ImagingObservation complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="ImagingObservation"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="imagingObservationCharacteristicCollection" minOccurs="0"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="ImagingObservationCharacteristic" type="{gme://caCORE.caCORE/3.2/edu.northwestern.radiology.AIM}ImagingObservationCharacteristic" maxOccurs="unbounded" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="segmentation" minOccurs="0"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="Segmentation" type="{gme://caCORE.caCORE/3.2/edu.northwestern.radiology.AIM}Segmentation" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;/sequence> * &lt;attribute name="id" use="required" type="{http://www.w3.org/2001/XMLSchema}integer" /> * &lt;attribute name="codeValue" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="codeMeaning" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="codingSchemeDesignator" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="codingSchemeVersion" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="comment" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="confidence" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "ImagingObservation", propOrder = { "imagingObservationCharacteristicCollection", "segmentation" }) public class ImagingObservation { protected ImagingObservation.ImagingObservationCharacteristicCollection imagingObservationCharacteristicCollection; protected ImagingObservation.Segmentation segmentation; @XmlAttribute(required = true) protected BigInteger id; @XmlAttribute(required = true) protected String codeValue; @XmlAttribute(required = true) protected String codeMeaning; @XmlAttribute(required = true) protected String codingSchemeDesignator; @XmlAttribute protected String codingSchemeVersion; @XmlAttribute protected String comment; @XmlAttribute protected String confidence; /** * Gets the value of the imagingObservationCharacteristicCollection property. * * @return * possible object is * {@link ImagingObservation.ImagingObservationCharacteristicCollection } * */ public ImagingObservation.ImagingObservationCharacteristicCollection getImagingObservationCharacteristicCollection() { return imagingObservationCharacteristicCollection; } /** * Sets the value of the imagingObservationCharacteristicCollection property. * * @param value * allowed object is * {@link ImagingObservation.ImagingObservationCharacteristicCollection } * */ public void setImagingObservationCharacteristicCollection(ImagingObservation.ImagingObservationCharacteristicCollection value) { this.imagingObservationCharacteristicCollection = value; } /** * Gets the value of the segmentation property. * * @return * possible object is * {@link ImagingObservation.Segmentation } * */ public ImagingObservation.Segmentation getSegmentation() { return segmentation; } /** * Sets the value of the segmentation property. * * @param value * allowed object is * {@link ImagingObservation.Segmentation } * */ public void setSegmentation(ImagingObservation.Segmentation value) { this.segmentation = value; } /** * Gets the value of the id property. * * @return * possible object is * {@link BigInteger } * */ public BigInteger getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link BigInteger } * */ public void setId(BigInteger value) { this.id = value; } /** * Gets the value of the codeValue property. * * @return * possible object is * {@link String } * */ public String getCodeValue() { return codeValue; } /** * Sets the value of the codeValue property. * * @param value * allowed object is * {@link String } * */ public void setCodeValue(String value) { this.codeValue = value; } /** * Gets the value of the codeMeaning property. * * @return * possible object is * {@link String } * */ public String getCodeMeaning() { return codeMeaning; } /** * Sets the value of the codeMeaning property. * * @param value * allowed object is * {@link String } * */ public void setCodeMeaning(String value) { this.codeMeaning = value; } /** * Gets the value of the codingSchemeDesignator property. * * @return * possible object is * {@link String } * */ public String getCodingSchemeDesignator() { return codingSchemeDesignator; } /** * Sets the value of the codingSchemeDesignator property. * * @param value * allowed object is * {@link String } * */ public void setCodingSchemeDesignator(String value) { this.codingSchemeDesignator = value; } /** * Gets the value of the codingSchemeVersion property. * * @return * possible object is * {@link String } * */ public String getCodingSchemeVersion() { return codingSchemeVersion; } /** * Sets the value of the codingSchemeVersion property. * * @param value * allowed object is * {@link String } * */ public void setCodingSchemeVersion(String value) { this.codingSchemeVersion = value; } /** * Gets the value of the comment property. * * @return * possible object is * {@link String } * */ public String getComment() { return comment; } /** * Sets the value of the comment property. * * @param value * allowed object is * {@link String } * */ public void setComment(String value) { this.comment = value; } /** * Gets the value of the confidence property. * * @return * possible object is * {@link String } * */ public String getConfidence() { return confidence; } /** * Sets the value of the confidence property. * * @param value * allowed object is * {@link String } * */ public void setConfidence(String value) { this.confidence = value; } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="ImagingObservationCharacteristic" type="{gme://caCORE.caCORE/3.2/edu.northwestern.radiology.AIM}ImagingObservationCharacteristic" maxOccurs="unbounded" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "imagingObservationCharacteristic" }) public static class ImagingObservationCharacteristicCollection { @XmlElement(name = "ImagingObservationCharacteristic") protected List<ImagingObservationCharacteristic> imagingObservationCharacteristic; /** * Gets the value of the imagingObservationCharacteristic property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the imagingObservationCharacteristic property. * * <p> * For example, to add a new item, do as follows: * <pre> * getImagingObservationCharacteristic().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link ImagingObservationCharacteristic } * * */ public List<ImagingObservationCharacteristic> getImagingObservationCharacteristic() { if (imagingObservationCharacteristic == null) { imagingObservationCharacteristic = new ArrayList<ImagingObservationCharacteristic>(); } return this.imagingObservationCharacteristic; } } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="Segmentation" type="{gme://caCORE.caCORE/3.2/edu.northwestern.radiology.AIM}Segmentation" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "segmentation" }) public static class Segmentation { @XmlElement(name = "Segmentation") protected gme.cacore_cacore._3_2.edu_northwestern_radiology.Segmentation segmentation; /** * Gets the value of the segmentation property. * * @return * possible object is * {@link gme.cacore_cacore._3_2.edu_northwestern_radiology.Segmentation } * */ public gme.cacore_cacore._3_2.edu_northwestern_radiology.Segmentation getSegmentation() { return segmentation; } /** * Sets the value of the segmentation property. * * @param value * allowed object is * {@link gme.cacore_cacore._3_2.edu_northwestern_radiology.Segmentation } * */ public void setSegmentation(gme.cacore_cacore._3_2.edu_northwestern_radiology.Segmentation value) { this.segmentation = value; } } }
/* * Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package shapegen; import java.util.ArrayList; import java.util.List; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import static shapegen.ClassCase.Kind.*; /** * * @author Robert Field */ public class Hierarchy { public final ClassCase root; public final Set<ClassCase> all; public Hierarchy(ClassCase root) { this.root = root; root.init(new HashMap<String,Integer>()); Set<ClassCase> allClasses = new HashSet<>(); root.collectClasses(allClasses); this.all = allClasses; } public boolean anyDefaults() { for (ClassCase cc : all) { if (cc.kind == IDEFAULT) { return true; } } return false; } public boolean get_OK() { return root.get_OK(); } public String testName() { return root + "Test"; } private static void genInterfaceList(StringBuilder buf, String prefix, List<ClassCase> interfaces) { if (!interfaces.isEmpty()) { buf.append(" "); buf.append(prefix); buf.append(" "); buf.append(interfaces.get(0)); for (int i = 1; i < interfaces.size(); ++i) { buf.append(", " + interfaces.get(i)); } } } public static void genClassDef(StringBuilder buf, ClassCase cc, String implClass, List<ClassCase> defaultRef) { if (cc.isInterface()) { buf.append("interface "); buf.append(cc.getName() + " "); genInterfaceList(buf, "extends", cc.getInterfaces()); buf.append(" {\n"); switch (cc.kind) { case IDEFAULT: buf.append(" default String m() { return \"\"; }\n"); defaultRef.add(cc); break; case IPRESENT: buf.append(" String m();\n"); break; case IVAC: break; default: throw new AssertionError("Unexpected kind"); } buf.append("}\n\n"); } else { buf.append((cc.isAbstract()? "abstract " : "")); buf.append(" class " + cc.getName()); if (cc.getSuperclass() != null) { buf.append(" extends " + cc.getSuperclass()); } genInterfaceList(buf, "implements", cc.getInterfaces()); buf.append(" {\n"); switch (cc.kind) { case CCONCRETE: buf.append(" public String m() { return \"\"; }\n"); break; case CABSTRACT: buf.append(" public abstract String m();\n"); break; case CNONE: break; default: throw new AssertionError("Unexpected kind"); } buf.append("}\n\n"); } } @Override public boolean equals(Object obj) { return obj instanceof Hierarchy && root.getID().equals(((Hierarchy)obj).root.getID()); } @Override public int hashCode() { return root.getID().hashCode(); } @Override public String toString() { return root.getName(); } private static String classNames[] = { "C", "D", "E", "F", "G", "H", "S", "T", "U", "V" }; private static String interfaceNames[] = { "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R" }; private static int CLASS_INDEX = 0; private static int INTERFACE_INDEX = 1; private static int NUM_INDICIES = 2; public List<String> getDescription() { Map<ClassCase,String> nameMap = new HashMap<>(); assignNames(root, new int[NUM_INDICIES], nameMap); ArrayList<String> res = new ArrayList<>(); if (root.getSupertypes().size() == 0) { res.add(nameMap.get(root) + root.kind.getPrefix() + "()"); } else { genCaseDescription(root, res, new HashSet<ClassCase>(), nameMap); } return res; } private static void assignNames( ClassCase cc, int indices[], Map<ClassCase,String> names) { String name = names.get(cc); if (name == null) { if (cc.isInterface()) { names.put(cc, interfaceNames[indices[INTERFACE_INDEX]++]); } else { names.put(cc, classNames[indices[CLASS_INDEX]++]); } for (int i = 0; i < cc.getSupertypes().size(); ++i) { assignNames(cc.getSupertypes().get(i), indices, names); } } } private static void genCaseDescription( ClassCase cc, List<String> res, Set<ClassCase> alreadyDone, Map<ClassCase,String> nameMap) { if (!alreadyDone.contains(cc)) { if (cc.getSupertypes().size() > 0) { StringBuilder sb = new StringBuilder(); sb.append(nameMap.get(cc)); sb.append(cc.kind.getPrefix()); sb.append("("); for (int i = 0; i < cc.getSupertypes().size(); ++i) { ClassCase supertype = cc.getSupertypes().get(i); if (i != 0) { sb.append(","); } genCaseDescription(supertype, res, alreadyDone, nameMap); sb.append(nameMap.get(supertype)); sb.append(supertype.kind.getPrefix()); } sb.append(")"); res.add(sb.toString()); } } alreadyDone.add(cc); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.client; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsRequest; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresRequest; import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.suggest.SuggestRequest; import org.elasticsearch.common.xcontent.XContentType; /** * A handy one stop shop for creating requests (make sure to import static this class). */ public class Requests { /** * The content type used to generate request builders (query / search). */ public static XContentType CONTENT_TYPE = XContentType.SMILE; /** * The default content type to use to generate source documents when indexing. */ public static XContentType INDEX_CONTENT_TYPE = XContentType.JSON; public static IndexRequest indexRequest() { return new IndexRequest(); } /** * Create an index request against a specific index. Note the {@link IndexRequest#type(String)} must be * set as well and optionally the {@link IndexRequest#id(String)}. * * @param index The index name to index the request against * @return The index request * @see org.elasticsearch.client.Client#index(org.elasticsearch.action.index.IndexRequest) */ public static IndexRequest indexRequest(String index) { return new IndexRequest(index); } /** * Creates a delete request against a specific index. Note the {@link DeleteRequest#type(String)} and * {@link DeleteRequest#id(String)} must be set. * * @param index The index name to delete from * @return The delete request * @see org.elasticsearch.client.Client#delete(org.elasticsearch.action.delete.DeleteRequest) */ public static DeleteRequest deleteRequest(String index) { return new DeleteRequest(index); } /** * Creates a new bulk request. */ public static BulkRequest bulkRequest() { return new BulkRequest(); } /** * Creates a get request to get the JSON source from an index based on a type and id. Note, the * {@link GetRequest#type(String)} and {@link GetRequest#id(String)} must be set. * * @param index The index to get the JSON source from * @return The get request * @see org.elasticsearch.client.Client#get(org.elasticsearch.action.get.GetRequest) */ public static GetRequest getRequest(String index) { return new GetRequest(index); } /** * Creates a suggest request for getting suggestions from provided <code>indices</code>. * The suggest query has to be set using the JSON source using {@link org.elasticsearch.action.suggest.SuggestRequest#suggest(org.elasticsearch.common.bytes.BytesReference)}. * @param indices The indices to suggest from. Use <tt>null</tt> or <tt>_all</tt> to execute against all indices * @see org.elasticsearch.client.Client#suggest(org.elasticsearch.action.suggest.SuggestRequest) */ public static SuggestRequest suggestRequest(String... indices) { return new SuggestRequest(indices); } /** * Creates a search request against one or more indices. Note, the search source must be set either using the * actual JSON search source, or the {@link org.elasticsearch.search.builder.SearchSourceBuilder}. * * @param indices The indices to search against. Use <tt>null</tt> or <tt>_all</tt> to execute against all indices * @return The search request * @see org.elasticsearch.client.Client#search(org.elasticsearch.action.search.SearchRequest) */ public static SearchRequest searchRequest(String... indices) { return new SearchRequest(indices); } /** * Creates a search scroll request allowing to continue searching a previous search request. * * @param scrollId The scroll id representing the scrollable search * @return The search scroll request * @see org.elasticsearch.client.Client#searchScroll(org.elasticsearch.action.search.SearchScrollRequest) */ public static SearchScrollRequest searchScrollRequest(String scrollId) { return new SearchScrollRequest(scrollId); } public static IndicesSegmentsRequest indicesSegmentsRequest(String... indices) { return new IndicesSegmentsRequest(indices); } /** * Creates an indices shard stores info request. * @param indices The indices to get shard store information on * @return The indices shard stores request * @see org.elasticsearch.client.IndicesAdminClient#shardStores(IndicesShardStoresRequest) */ public static IndicesShardStoresRequest indicesShardStoresRequest(String... indices) { return new IndicesShardStoresRequest(indices); } /** * Creates an indices exists request. * * @param indices The indices to check if they exists or not. * @return The indices exists request * @see org.elasticsearch.client.IndicesAdminClient#exists(org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest) */ public static IndicesExistsRequest indicesExistsRequest(String... indices) { return new IndicesExistsRequest(indices); } /** * Creates a create index request. * * @param index The index to create * @return The index create request * @see org.elasticsearch.client.IndicesAdminClient#create(org.elasticsearch.action.admin.indices.create.CreateIndexRequest) */ public static CreateIndexRequest createIndexRequest(String index) { return new CreateIndexRequest(index); } /** * Creates a delete index request. * * @param index The index to delete * @return The delete index request * @see org.elasticsearch.client.IndicesAdminClient#delete(org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest) */ public static DeleteIndexRequest deleteIndexRequest(String index) { return new DeleteIndexRequest(index); } /** * Creates a close index request. * * @param index The index to close * @return The delete index request * @see org.elasticsearch.client.IndicesAdminClient#close(org.elasticsearch.action.admin.indices.close.CloseIndexRequest) */ public static CloseIndexRequest closeIndexRequest(String index) { return new CloseIndexRequest(index); } /** * Creates an open index request. * * @param index The index to open * @return The delete index request * @see org.elasticsearch.client.IndicesAdminClient#open(org.elasticsearch.action.admin.indices.open.OpenIndexRequest) */ public static OpenIndexRequest openIndexRequest(String index) { return new OpenIndexRequest(index); } /** * Create a create mapping request against one or more indices. * * @param indices The indices to create mapping. Use <tt>null</tt> or <tt>_all</tt> to execute against all indices * @return The create mapping request * @see org.elasticsearch.client.IndicesAdminClient#putMapping(org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest) */ public static PutMappingRequest putMappingRequest(String... indices) { return new PutMappingRequest(indices); } /** * Creates an index aliases request allowing to add and remove aliases. * * @return The index aliases request */ public static IndicesAliasesRequest indexAliasesRequest() { return new IndicesAliasesRequest(); } /** * Creates a refresh indices request. * * @param indices The indices to refresh. Use <tt>null</tt> or <tt>_all</tt> to execute against all indices * @return The refresh request * @see org.elasticsearch.client.IndicesAdminClient#refresh(org.elasticsearch.action.admin.indices.refresh.RefreshRequest) */ public static RefreshRequest refreshRequest(String... indices) { return new RefreshRequest(indices); } /** * Creates a flush indices request. * * @param indices The indices to flush. Use <tt>null</tt> or <tt>_all</tt> to execute against all indices * @return The flush request * @see org.elasticsearch.client.IndicesAdminClient#flush(org.elasticsearch.action.admin.indices.flush.FlushRequest) */ public static FlushRequest flushRequest(String... indices) { return new FlushRequest(indices); } /** * Creates a synced flush indices request. * * @param indices The indices to sync flush. Use <tt>null</tt> or <tt>_all</tt> to execute against all indices * @return The synced flush request * @see org.elasticsearch.client.IndicesAdminClient#syncedFlush(SyncedFlushRequest) */ public static SyncedFlushRequest syncedFlushRequest(String... indices) { return new SyncedFlushRequest(indices); } /** * Creates a force merge request. * * @param indices The indices to force merge. Use <tt>null</tt> or <tt>_all</tt> to execute against all indices * @return The force merge request * @see org.elasticsearch.client.IndicesAdminClient#forceMerge(org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest) */ public static ForceMergeRequest forceMergeRequest(String... indices) { return new ForceMergeRequest(indices); } /** * Creates an upgrade request. * * @param indices The indices to upgrade. Use <tt>null</tt> or <tt>_all</tt> to execute against all indices * @return The upgrade request * @see org.elasticsearch.client.IndicesAdminClient#upgrade(UpgradeRequest) */ public static UpgradeRequest upgradeRequest(String... indices) { return new UpgradeRequest(indices); } /** * Creates a clean indices cache request. * * @param indices The indices to clean their caches. Use <tt>null</tt> or <tt>_all</tt> to execute against all indices * @return The request */ public static ClearIndicesCacheRequest clearIndicesCacheRequest(String... indices) { return new ClearIndicesCacheRequest(indices); } /** * A request to update indices settings. * * @param indices The indices to update the settings for. Use <tt>null</tt> or <tt>_all</tt> to executed against all indices. * @return The request */ public static UpdateSettingsRequest updateSettingsRequest(String... indices) { return new UpdateSettingsRequest(indices); } /** * Creates a cluster state request. * * @return The cluster state request. * @see org.elasticsearch.client.ClusterAdminClient#state(org.elasticsearch.action.admin.cluster.state.ClusterStateRequest) */ public static ClusterStateRequest clusterStateRequest() { return new ClusterStateRequest(); } public static ClusterRerouteRequest clusterRerouteRequest() { return new ClusterRerouteRequest(); } public static ClusterUpdateSettingsRequest clusterUpdateSettingsRequest() { return new ClusterUpdateSettingsRequest(); } /** * Creates a cluster health request. * * @param indices The indices to provide additional cluster health information for. Use <tt>null</tt> or <tt>_all</tt> to execute against all indices * @return The cluster health request * @see org.elasticsearch.client.ClusterAdminClient#health(org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest) */ public static ClusterHealthRequest clusterHealthRequest(String... indices) { return new ClusterHealthRequest(indices); } /** * List all shards for the give search */ public static ClusterSearchShardsRequest clusterSearchShardsRequest() { return new ClusterSearchShardsRequest(); } /** * List all shards for the give search */ public static ClusterSearchShardsRequest clusterSearchShardsRequest(String... indices) { return new ClusterSearchShardsRequest(indices); } /** * Creates a nodes info request against all the nodes. * * @return The nodes info request * @see org.elasticsearch.client.ClusterAdminClient#nodesInfo(org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest) */ public static NodesInfoRequest nodesInfoRequest() { return new NodesInfoRequest(); } /** * Creates a nodes info request against one or more nodes. Pass <tt>null</tt> or an empty array for all nodes. * * @param nodesIds The nodes ids to get the status for * @return The nodes info request * @see org.elasticsearch.client.ClusterAdminClient#nodesStats(org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest) */ public static NodesInfoRequest nodesInfoRequest(String... nodesIds) { return new NodesInfoRequest(nodesIds); } /** * Creates a nodes stats request against one or more nodes. Pass <tt>null</tt> or an empty array for all nodes. * * @param nodesIds The nodes ids to get the stats for * @return The nodes info request * @see org.elasticsearch.client.ClusterAdminClient#nodesStats(org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest) */ public static NodesStatsRequest nodesStatsRequest(String... nodesIds) { return new NodesStatsRequest(nodesIds); } /** * Creates a cluster stats request. * * @return The cluster stats request * @see org.elasticsearch.client.ClusterAdminClient#clusterStats(org.elasticsearch.action.admin.cluster.stats.ClusterStatsRequest) */ public static ClusterStatsRequest clusterStatsRequest() { return new ClusterStatsRequest(); } /** * Creates a nodes tasks request against all the nodes. * * @return The nodes tasks request * @see org.elasticsearch.client.ClusterAdminClient#listTasks(ListTasksRequest) */ public static ListTasksRequest listTasksRequest() { return new ListTasksRequest(); } /** * Creates a nodes tasks request against one or more nodes. Pass <tt>null</tt> or an empty array for all nodes. * * @return The nodes tasks request * @see org.elasticsearch.client.ClusterAdminClient#cancelTasks(CancelTasksRequest) */ public static CancelTasksRequest cancelTasksRequest() { return new CancelTasksRequest(); } /** * Registers snapshot repository * * @param name repository name * @return repository registration request */ public static PutRepositoryRequest putRepositoryRequest(String name) { return new PutRepositoryRequest(name); } /** * Gets snapshot repository * * @param repositories names of repositories * @return get repository request */ public static GetRepositoriesRequest getRepositoryRequest(String... repositories) { return new GetRepositoriesRequest(repositories); } /** * Deletes registration for snapshot repository * * @param name repository name * @return delete repository request */ public static DeleteRepositoryRequest deleteRepositoryRequest(String name) { return new DeleteRepositoryRequest(name); } /** * Verifies snapshot repository * * @param name repository name * @return repository verification request */ public static VerifyRepositoryRequest verifyRepositoryRequest(String name) { return new VerifyRepositoryRequest(name); } /** * Creates new snapshot * * @param repository repository name * @param snapshot snapshot name * @return create snapshot request */ public static CreateSnapshotRequest createSnapshotRequest(String repository, String snapshot) { return new CreateSnapshotRequest(repository, snapshot); } /** * Gets snapshots from repository * * @param repository repository name * @return get snapshot request */ public static GetSnapshotsRequest getSnapshotsRequest(String repository) { return new GetSnapshotsRequest(repository); } /** * Restores new snapshot * * @param repository repository name * @param snapshot snapshot name * @return snapshot creation request */ public static RestoreSnapshotRequest restoreSnapshotRequest(String repository, String snapshot) { return new RestoreSnapshotRequest(repository, snapshot); } /** * Deletes a snapshot * * @param snapshot snapshot name * @param repository repository name * @return delete snapshot request */ public static DeleteSnapshotRequest deleteSnapshotRequest(String repository, String snapshot) { return new DeleteSnapshotRequest(repository, snapshot); } /** * Get status of snapshots * * @param repository repository name * @return snapshot status request */ public static SnapshotsStatusRequest snapshotsStatusRequest(String repository) { return new SnapshotsStatusRequest(repository); } }
import java.util.Queue; import java.util.LinkedList; import java.util.ArrayList; /** * TST<Value> * @param <Value> */ public class TST<Value> { //Atributos generales private int N; // size private Node root; // root of TST private int current; /** * Node * Usada para crear el arbol */ private class Node { private char c; // character private Node left, mid, right; // left, middle, and right subtries private Value val; // value associated with string } /** * Devuelve el numero de elementos * @return int */ public int size() { return N; } /** * Post: Indica si existe o no valor en la clave indicada * @param key * @return boolean */ public boolean contains(String key) { if (key == null || key.length() == 0) return false; try{ return get(key.toLowerCase()) != null; } catch (Exception e) { return false; } } //if not present returns null /** * Devuelve el contendo de la llave indicada * Pre: key debe ser un nombre valido * Post: En caso de existir devuelve el objeto asociado * En caso de no exisiter devuelve null; * @param key * @return * @throws Exception */ public Value get(String key) throws Exception{ if(!Util.checkName(key)) throw new Exception(key + " is not valid"); Node x = get(root, key.toLowerCase(), 0); if (x == null) return null; return x.val; } /** * * @param x * @param key * @param d * @return Node */ private Node get(Node x, String key, int d) { if (x == null) return null; char c = key.charAt(d); if (c < x.c) return get(x.left, key, d); else if (c > x.c) return get(x.right, key, d); else if (d < key.length() - 1) return get(x.mid, key, d + 1); else return x; } /** * Post: Inserta el elemento val con la llave s * @param s * @param val * @throws Exception */ public void put(String s, Value val) throws Exception { root = put(root, s.toLowerCase(), val, 0); } /** * Pre: x no debe ser nulo * Post: Crea un nuevo nodo con la infomacion de val * @param x * @param s * @param val * @param d * @return * @throws Exception */ private Node put(Node x, String s, Value val, int d) throws Exception { char c = s.charAt(d); if (x == null) { x = new Node(); x.c = c; } if (c < x.c) x.left = put(x.left, s, val, d); else if (c > x.c) x.right = put(x.right, s, val, d); else if (d < s.length() - 1) x.mid = put(x.mid, s, val, d + 1); else { if (x.val == null) { N++; x.val = val; } else { throw new Exception("The key " + s + " exists"); } } return x; } /** * Devuelve todas las llaves del TST ordenadas * @return */ public Iterable<String> keys() { Queue<String> queue = new LinkedList<String>(); collect(root, "", queue); return queue; } /** * Pre: X no debe ser nulo * @param x * @param prefix * @param queue */ private void collect(Node x, String prefix, Queue<String> queue) { if (x == null) return; collect(x.left, prefix, queue); if (x.val != null) queue.add(prefix + x.c); collect(x.mid, prefix + x.c, queue); collect(x.right, prefix, queue); } /** * Devuelve todos los objetos almacenados en el TST * @return */ public Iterable<Value> values() { Queue<Value> queue = new LinkedList<Value>(); collectValues(root, "", queue); return queue; } /** * Pre: x no debe ser nulo * Post: Guarda en queue los objetos del TST * @param x * @param prefix * @param queue */ private void collectValues(Node x, String prefix, Queue<Value> queue) { if (x == null) return; collectValues(x.left, prefix, queue); if (x.val != null) queue.add(x.val); collectValues(x.mid, prefix + x.c, queue); collectValues(x.right, prefix, queue); } /** * Devuelve los elementos del TST limitados por un max y un min * Pre: X no debe ser nulo * current <= max * Post: Alamcena en queue los emenetos que cumplan los cirterios * @param x * @param prefix * @param key * @param queue * @param max */ private void collectValuesCache(Node x, String prefix, String key, ArrayList<Value> queue, int max) { if (x == null || current > max) return; //Chivato //Console.print("Key: " + key + " Prefix: " + prefix + x.c); collectValuesCache(x.left, prefix, key, queue, max); if (x.val != null && current <= max && key.compareTo(prefix+x.c) < 0){ queue.add(x.val); //Chivato //Console.print("--->"+x.val.toString()); ++current; //Chivato //Console.print("--->Current: " + Integer.valueOf(current).toString()); } collectValuesCache(x.mid, prefix + x.c, key, queue, max); collectValuesCache(x.right, prefix, key, queue, max); } /** * * @param key * @param max * @return * @throws Exception */ public ArrayList<Value> valuesCache(String key, int max) throws Exception{ if(max < 1) throw new Exception("Mandatory: max > 0"); Node x = get(root, key.toLowerCase(), 0); if(x == null) throw new Exception("Key not present"); if(!Util.checkName(key)) throw new Exception(key + " is not valid"); current = 1; ArrayList<Value> queue = new ArrayList<Value>(); collectValuesCache(root, "", key, queue, max); return queue; } /** * Devuelve el primer elemento del TST * Pre: El TST debe tener algun valor * @return * @throws Exception */ public Value first() throws Exception{ if(N == 0) throw new Exception ("Empty TST!"); return first(root); } /** * * @param x * @return * @throws Exception */ private Value first(Node x) throws Exception{ //Testear! while(x.left != null) x = x.left; if(x.val != null) return x.val; else if(x.mid != null) return first(x.mid); //else if(x.val != null) return x.val; else throw new Exception("Debugging exception"); } /** * * @return * @throws Exception */ public String firstKey() throws Exception{ if(N == 0) throw new Exception ("Empty TST!"); return firstKey(root); } /** * * @param x * @return * @throws Exception */ public String firstKey(Node x) throws Exception{ //Testear! while(x.left != null) x = x.left; if(x.val != null) return ""+x.c; else if(x.mid != null) return x.c+firstKey(x.mid); //else if(x.val != null) return ""+x.c; else throw new Exception("Debugging exception"); } /** * Post: Elimina todo el arbol y el contador */ public void clear() { root = null; N = 0; } /** * Pre: Key debe ser un nombre valido * Post: Elimina el valor asociado a esa key y todas las key inecesarias * @param key * @throws Exception */ public void remove(String key) throws Exception { if(!Util.checkName(key)) throw new Exception(key + " is not valid"); // Remove if (!remove(root, key.toLowerCase(), 0)) throw new Exception("The key " + key + " doesn't exist"); } /** * Pre: X no debe ser nulo * @param x * @param key * @param d * @return */ private boolean remove(Node x, String key, int d) { if (x == null) return false; char c = key.charAt(d); boolean del = false; if (c < x.c) del = remove(x.left, key, d); else if (c > x.c) del = remove(x.right, key, d); else if (d < key.length() - 1) del = remove(x.mid, key, d + 1); else { del = (x.val != null); x.val = null; del = true; N--; } return del; } }
/* Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.andes.test.utils; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.LineNumberReader; import java.io.PrintStream; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import javax.jms.BytesMessage; import javax.jms.Connection; import javax.jms.Destination; import javax.jms.JMSException; import javax.jms.MapMessage; import javax.jms.Message; import javax.jms.MessageConsumer; import javax.jms.MessageProducer; import javax.jms.ObjectMessage; import javax.jms.Queue; import javax.jms.Session; import javax.jms.StreamMessage; import javax.jms.TextMessage; import javax.naming.InitialContext; import javax.naming.NamingException; import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.XMLConfiguration; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.wso2.andes.AMQException; import org.wso2.andes.client.AMQConnectionFactory; import org.wso2.andes.client.AMQQueue; import org.wso2.andes.exchange.ExchangeDefaults; import org.wso2.andes.jms.BrokerDetails; import org.wso2.andes.jms.ConnectionURL; import org.wso2.andes.management.common.mbeans.ConfigurationManagement; import org.wso2.andes.server.Broker; import org.wso2.andes.server.BrokerOptions; import org.wso2.andes.server.ProtocolExclusion; import org.wso2.andes.server.configuration.ServerConfiguration; import org.wso2.andes.server.protocol.AmqpProtocolVersion; import org.wso2.andes.url.URLSyntaxException; import org.wso2.andes.util.FileUtils; import org.wso2.andes.util.LogMonitor; /** * Qpid base class for system testing test cases. */ public class QpidBrokerTestCase extends QpidTestCase { public enum BrokerType { EXTERNAL /** Test case relies on a Broker started independently of the test-suite */, INTERNAL /** Test case starts an embedded broker within this JVM */, SPAWNED /** Test case spawns a new broker as a separate process */ } protected final String QpidHome = System.getProperty("QPID_HOME"); protected File _configFile = new File(System.getProperty("broker.config")); protected static final Logger _logger = Logger.getLogger(QpidBrokerTestCase.class); protected static final int LOGMONITOR_TIMEOUT = 5000; protected long RECEIVE_TIMEOUT = 1000l; private Map<String, String> _propertiesSetForBroker = new HashMap<String, String>(); private Map<Logger, Level> _loggerLevelSetForTest = new HashMap<Logger, Level>(); private XMLConfiguration _testConfiguration = new XMLConfiguration(); private XMLConfiguration _testVirtualhosts = new XMLConfiguration(); protected static final String INDEX = "index"; protected static final String CONTENT = "content"; private static final String DEFAULT_INITIAL_CONTEXT = "org.wso2.andes.jndi.PropertiesFileInitialContextFactory"; static { String initialContext = System.getProperty(InitialContext.INITIAL_CONTEXT_FACTORY); if (initialContext == null || initialContext.length() == 0) { System.setProperty(InitialContext.INITIAL_CONTEXT_FACTORY, DEFAULT_INITIAL_CONTEXT); } } // system properties private static final String BROKER_LANGUAGE = "broker.language"; private static final String BROKER_TYPE = "broker.type"; private static final String BROKER_COMMAND = "broker.command"; private static final String BROKER_CLEAN = "broker.clean"; private static final String BROKER_CLEAN_BETWEEN_TESTS = "broker.clean.between.tests"; private static final String BROKER_EXISTING_QPID_WORK = "broker.existing.qpid.work"; private static final String BROKER_VERSION = "broker.version"; protected static final String BROKER_READY = "broker.ready"; private static final String BROKER_STOPPED = "broker.stopped"; private static final String TEST_OUTPUT = "test.output"; private static final String BROKER_LOG_INTERLEAVE = "broker.log.interleave"; private static final String BROKER_LOG_PREFIX = "broker.log.prefix"; private static final String BROKER_PERSITENT = "broker.persistent"; private static final String BROKER_PROTOCOL_EXCLUDES = "broker.protocol.excludes"; // values protected static final String JAVA = "java"; protected static final String CPP = "cpp"; protected static final String QPID_HOME = "QPID_HOME"; public static final int DEFAULT_VM_PORT = 1; public static final int DEFAULT_PORT = Integer.getInteger("test.port", ServerConfiguration.DEFAULT_PORT); public static final int FAILING_PORT = Integer.parseInt(System.getProperty("test.port.alt")); public static final int DEFAULT_MANAGEMENT_PORT = Integer.getInteger("test.mport", ServerConfiguration.DEFAULT_JMXPORT); public static final int DEFAULT_SSL_PORT = Integer.getInteger("test.sslport", ServerConfiguration.DEFAULT_SSL_PORT); protected String _brokerLanguage = System.getProperty(BROKER_LANGUAGE, JAVA); protected BrokerType _brokerType = BrokerType.valueOf(System.getProperty(BROKER_TYPE, "").toUpperCase()); protected String _brokerCommand = System.getProperty(BROKER_COMMAND); private String _brokerClean = System.getProperty(BROKER_CLEAN, null); private Boolean _brokerCleanBetweenTests = Boolean.getBoolean(BROKER_CLEAN_BETWEEN_TESTS); private final AmqpProtocolVersion _brokerVersion = AmqpProtocolVersion.valueOf(System.getProperty(BROKER_VERSION, "")); protected String _output = System.getProperty(TEST_OUTPUT); protected Boolean _brokerPersistent = Boolean.getBoolean(BROKER_PERSITENT); private String _brokerProtocolExcludes = System.getProperty(BROKER_PROTOCOL_EXCLUDES); protected static String _brokerLogPrefix = System.getProperty(BROKER_LOG_PREFIX,"BROKER: "); protected static boolean _interleaveBrokerLog = Boolean.getBoolean(BROKER_LOG_INTERLEAVE); protected File _outputFile; protected PrintStream _brokerOutputStream; protected Map<Integer, BrokerHolder> _brokers = new HashMap<Integer, BrokerHolder>(); protected InitialContext _initialContext; protected AMQConnectionFactory _connectionFactory; protected String _testName; // the connections created for a given test protected List<Connection> _connections = new ArrayList<Connection>(); public static final String QUEUE = "queue"; public static final String TOPIC = "topic"; /** Map to hold test defined environment properties */ private Map<String, String> _env; /** Ensure our messages have some sort of size */ protected static final int DEFAULT_MESSAGE_SIZE = 1024; /** Size to create our message*/ private int _messageSize = DEFAULT_MESSAGE_SIZE; /** Type of message*/ protected enum MessageType { BYTES, MAP, OBJECT, STREAM, TEXT } private MessageType _messageType = MessageType.TEXT; public QpidBrokerTestCase(String name) { super(name); } public QpidBrokerTestCase() { super(); } public Logger getLogger() { return QpidBrokerTestCase._logger; } public void runBare() throws Throwable { _testName = getClass().getSimpleName() + "." + getName(); String qname = getClass().getName() + "." + getName(); // Initialize this for each test run _env = new HashMap<String, String>(); PrintStream oldOut = System.out; PrintStream oldErr = System.err; PrintStream out = null; PrintStream err = null; boolean redirected = _output != null && _output.length() > 0; if (redirected) { _outputFile = new File(String.format("%s/TEST-%s.out", _output, qname)); out = new PrintStream(_outputFile); err = new PrintStream(String.format("%s/TEST-%s.err", _output, qname)); System.setOut(out); System.setErr(err); if (_interleaveBrokerLog) { _brokerOutputStream = out; } else { _brokerOutputStream = new PrintStream(new FileOutputStream(String .format("%s/TEST-%s.broker.out", _output, qname)), true); } } _logger.info("========== start " + _testName + " =========="); try { super.runBare(); } catch (Exception e) { _logger.error("exception", e); throw e; } finally { try { stopBroker(); } catch (Exception e) { _logger.error("exception stopping broker", e); } if(_brokerCleanBetweenTests) { try { cleanBroker(); } catch (Exception e) { _logger.error("exception cleaning up broker", e); } } _logger.info("========== stop " + _testName + " =========="); if (redirected) { System.setErr(oldErr); System.setOut(oldOut); err.close(); out.close(); if (!_interleaveBrokerLog) { _brokerOutputStream.close(); } } } } @Override protected void setUp() throws Exception { if (!_configFile.exists()) { fail("Unable to test without config file:" + _configFile); } String existingQpidWorkPath = System.getProperty(BROKER_EXISTING_QPID_WORK); if(existingQpidWorkPath != null && !existingQpidWorkPath.equals("")) { cleanBroker(); File existing = new File(existingQpidWorkPath); File qpidWork = new File(getQpidWork(_brokerType, getPort())); FileUtils.copyRecursive(existing, qpidWork); } startBroker(); } private static final class Piper extends Thread { private LineNumberReader in; private PrintStream out; private String ready; private CountDownLatch latch; private boolean seenReady; private String stopped; private String stopLine; public Piper(InputStream in, PrintStream out, String ready) { this(in, out, ready, null); } public Piper(InputStream in, PrintStream out, String ready, String stopped) { this.in = new LineNumberReader(new InputStreamReader(in)); this.out = out; this.ready = ready; this.stopped = stopped; this.seenReady = false; if (this.ready != null && !this.ready.equals("")) { this.latch = new CountDownLatch(1); } else { this.latch = null; } } public Piper(InputStream in, PrintStream out) { this(in, out, null); } public boolean await(long timeout, TimeUnit unit) throws InterruptedException { if (latch == null) { return true; } else { latch.await(timeout, unit); return seenReady; } } public void run() { try { String line; while ((line = in.readLine()) != null) { if (_interleaveBrokerLog) { line = _brokerLogPrefix + line; } out.println(line); if (latch != null && line.contains(ready)) { seenReady = true; latch.countDown(); } if (!seenReady && line.contains(stopped)) { stopLine = line; } } } catch (IOException e) { // this seems to happen regularly even when // exits are normal } finally { if (latch != null) { latch.countDown(); } } } public String getStopLine() { return stopLine; } } /** * Return the management port in use by the broker on this main port * * @param mainPort the broker's main port. * * @return the management port that corresponds to the broker on the given port */ protected int getManagementPort(int mainPort) { return mainPort + (DEFAULT_MANAGEMENT_PORT - DEFAULT_PORT); } /** * Get the Port that is use by the current broker * * @return the current port */ protected int getPort() { return getPort(0); } protected int getPort(int port) { if (!_brokerType.equals(BrokerType.EXTERNAL)) { return port == 0 ? DEFAULT_PORT : port; } else { return port; } } protected String getBrokerCommand(int port) throws MalformedURLException { final String protocolExcludesList = _brokerProtocolExcludes.replace("@PORT", "" + port); return _brokerCommand .replace("@PORT", "" + port) .replace("@SSL_PORT", "" + (port - 1)) .replace("@MPORT", "" + getManagementPort(port)) .replace("@CONFIG_FILE", _configFile.toString()) .replace("@EXCLUDES", protocolExcludesList); } public void startBroker() throws Exception { startBroker(0); } public void startBroker(int port) throws Exception { port = getPort(port); // Save any configuration changes that have been made saveTestConfiguration(); saveTestVirtualhosts(); if(_brokers.get(port) != null) { throw new IllegalStateException("There is already an existing broker running on port " + port); } if (_brokerType.equals(BrokerType.INTERNAL) && !existingInternalBroker()) { setConfigurationProperty(ServerConfiguration.MGMT_CUSTOM_REGISTRY_SOCKET, String.valueOf(false)); saveTestConfiguration(); BrokerOptions options = new BrokerOptions(); options.setConfigFile(_configFile.getAbsolutePath()); options.addPort(port); addExcludedPorts(port, options); options.setJmxPort(getManagementPort(port)); //Set the log config file, relying on the log4j.configuration system property //set on the JVM by the JUnit runner task in module.xml. options.setLogConfigFile(new URL(System.getProperty("log4j.configuration")).getFile()); Broker broker = new Broker(); _logger.info("starting internal broker (same JVM)"); broker.startup(options); _brokers.put(port, new InternalBrokerHolder(broker)); } else if (!_brokerType.equals(BrokerType.EXTERNAL)) { String cmd = getBrokerCommand(port); _logger.info("starting external broker: " + cmd); ProcessBuilder pb = new ProcessBuilder(cmd.split("\\s+")); pb.redirectErrorStream(true); Map<String, String> env = pb.environment(); String qpidHome = System.getProperty(QPID_HOME); env.put(QPID_HOME, qpidHome); //Augment Path with bin directory in QPID_HOME. env.put("PATH", env.get("PATH").concat(File.pathSeparator + qpidHome + "/bin")); //Add the test name to the broker run. // DON'T change PNAME, qpid.stop needs this value. env.put("QPID_PNAME", "-DPNAME=QPBRKR -DTNAME=\"" + _testName + "\""); // Add the port to QPID_WORK to ensure unique working dirs for multi broker tests env.put("QPID_WORK", getQpidWork(_brokerType, port)); // Use the environment variable to set amqj.logging.level for the broker // The value used is a 'server' value in the test configuration to // allow a differentiation between the client and broker logging levels. if (System.getProperty("amqj.server.logging.level") != null) { setBrokerEnvironment("AMQJ_LOGGING_LEVEL", System.getProperty("amqj.server.logging.level")); } // Add all the environment settings the test requested if (!_env.isEmpty()) { for (Map.Entry<String, String> entry : _env.entrySet()) { env.put(entry.getKey(), entry.getValue()); } } // Add default test logging levels that are used by the log4j-test // Use the convenience methods to push the current logging setting // in to the external broker's QPID_OPTS string. if (System.getProperty("amqj.protocol.logging.level") != null) { setSystemProperty("amqj.protocol.logging.level"); } if (System.getProperty("root.logging.level") != null) { setSystemProperty("root.logging.level"); } String QPID_OPTS = " "; // Add all the specified system properties to QPID_OPTS if (!_propertiesSetForBroker.isEmpty()) { for (String key : _propertiesSetForBroker.keySet()) { QPID_OPTS += "-D" + key + "=" + _propertiesSetForBroker.get(key) + " "; } if (env.containsKey("QPID_OPTS")) { env.put("QPID_OPTS", env.get("QPID_OPTS") + QPID_OPTS); } else { env.put("QPID_OPTS", QPID_OPTS); } } Process process = pb.start();; Piper p = new Piper(process.getInputStream(), _brokerOutputStream, System.getProperty(BROKER_READY), System.getProperty(BROKER_STOPPED)); p.start(); if (!p.await(30, TimeUnit.SECONDS)) { _logger.info("broker failed to become ready (" + p.ready + "):" + p.getStopLine()); //Ensure broker has stopped process.destroy(); cleanBroker(); throw new RuntimeException("broker failed to become ready:" + p.getStopLine()); } try { //test that the broker is still running and hasn't exited unexpectedly int exit = process.exitValue(); _logger.info("broker aborted: " + exit); cleanBroker(); throw new RuntimeException("broker aborted: " + exit); } catch (IllegalThreadStateException e) { // this is expect if the broker started successfully } _brokers.put(port, new SpawnedBrokerHolder(process)); } } private void addExcludedPorts(int port, BrokerOptions options) { final String protocolExcludesList = _brokerProtocolExcludes.replace("@PORT", "" + port); if (protocolExcludesList.equals("")) { return; } final String[] toks = protocolExcludesList.split("\\s"); if(toks.length % 2 != 0) { throw new IllegalArgumentException("Must be an even number of tokens in '" + protocolExcludesList + "'"); } for (int i = 0; i < toks.length; i=i+2) { String excludeArg = toks[i]; final int excludedPort = Integer.parseInt(toks[i+1]); options.addExcludedPort(ProtocolExclusion.lookup(excludeArg), excludedPort); _logger.info("Adding protocol exclusion " + excludeArg + " " + excludedPort); } } private boolean existingInternalBroker() { for(BrokerHolder holder : _brokers.values()) { if(holder instanceof InternalBrokerHolder) { return true; } } return false; } private String getQpidWork(BrokerType broker, int port) { if (!broker.equals(BrokerType.EXTERNAL)) { return System.getProperty("QPID_WORK")+ "/" + port; } return System.getProperty("QPID_WORK"); } public String getTestConfigFile() { String path = _output == null ? System.getProperty("java.io.tmpdir") : _output; return path + "/" + getTestQueueName() + "-config.xml"; } public String getTestVirtualhostsFile() { String path = _output == null ? System.getProperty("java.io.tmpdir") : _output; return path + "/" + getTestQueueName() + "-virtualhosts.xml"; } protected void saveTestConfiguration() throws ConfigurationException { // Specifiy the test config file String testConfig = getTestConfigFile(); setSystemProperty("test.config", testConfig); // Create the file if configuration does not exist if (_testConfiguration.isEmpty()) { _testConfiguration.addProperty("__ignore", "true"); } _testConfiguration.save(testConfig); } protected void saveTestVirtualhosts() throws ConfigurationException { // Specifiy the test virtualhosts file String testVirtualhosts = getTestVirtualhostsFile(); setSystemProperty("test.virtualhosts", testVirtualhosts); // Create the file if configuration does not exist if (_testVirtualhosts.isEmpty()) { _testVirtualhosts.addProperty("__ignore", "true"); } _testVirtualhosts.save(testVirtualhosts); } public void cleanBroker() { if (_brokerClean != null) { _logger.info("clean: " + _brokerClean); try { ProcessBuilder pb = new ProcessBuilder(_brokerClean.split("\\s+")); pb.redirectErrorStream(true); Process clean = pb.start(); new Piper(clean.getInputStream(),_brokerOutputStream).start(); clean.waitFor(); _logger.info("clean exited: " + clean.exitValue()); } catch (IOException e) { throw new RuntimeException(e); } catch (InterruptedException e) { throw new RuntimeException(e); } } } public void stopBroker() throws Exception { stopBroker(0); } public void stopBroker(int port) throws Exception { port = getPort(port); _logger.info("stopping broker: " + getBrokerCommand(port)); BrokerHolder broker = _brokers.remove(port); broker.shutdown(); } public boolean isBrokerPresent(int port) throws Exception { port = getPort(port); return _brokers.containsKey(port); } /** * Attempt to set the Java Broker to use the BDBMessageStore for persistence * Falling back to the DerbyMessageStore if * * @param virtualhost - The virtualhost to modify * * @throws ConfigurationException - when reading/writing existing configuration * @throws IOException - When creating a temporary file. */ /* protected void makeVirtualHostPersistent(String virtualhost) throws ConfigurationException, IOException { Class<?> storeClass = null; try { // Try and lookup the BDB class storeClass = Class.forName("org.wso2.andes.server.store.berkeleydb.BDBMessageStore"); } catch (ClassNotFoundException e) { // No BDB store, we'll use Derby instead. storeClass = DerbyMessageStore.class; } setConfigurationProperty("virtualhosts.virtualhost." + virtualhost + ".store.class", storeClass.getName()); setConfigurationProperty("virtualhosts.virtualhost." + virtualhost + ".store." + DerbyMessageStore.ENVIRONMENT_PATH_PROPERTY, "${QPID_WORK}/" + virtualhost); } */ /** * Get a property value from the current configuration file. * * @param property the property to lookup * * @return the requested String Value * * @throws org.apache.commons.configuration.ConfigurationException * */ protected String getConfigurationStringProperty(String property) throws ConfigurationException { // Call save Configuration to be sure we have saved the test specific // file. As the optional status saveTestConfiguration(); saveTestVirtualhosts(); ServerConfiguration configuration = new ServerConfiguration(_configFile); // Don't need to configuration.configure() here as we are just pulling // values directly by String. return configuration.getConfig().getString(property); } /** * Set a configuration Property for this test run. * * This creates a new configuration based on the current configuration * with the specified property change. * * Multiple calls to this method will result in multiple temporary * configuration files being created. * * @param property the configuration property to set * @param value the new value * * @throws ConfigurationException when loading the current config file * @throws IOException when writing the new config file */ protected void setConfigurationProperty(String property, String value) throws ConfigurationException, IOException { // Choose which file to write the property to based on prefix. if (property.startsWith("virtualhosts")) { _testVirtualhosts.setProperty(StringUtils.substringAfter(property, "virtualhosts."), value); } else { _testConfiguration.setProperty(property, value); } } /** * Set a System property that is to be applied only to the external test * broker. * * This is a convenience method to enable the setting of a -Dproperty=value * entry in QPID_OPTS * * This is only useful for the External Java Broker tests. * * @param property the property name * @param value the value to set the property to */ protected void setBrokerOnlySystemProperty(String property, String value) { if (!_propertiesSetForBroker.containsKey(property)) { _propertiesSetForBroker.put(property, value); } } /** * Set a System (-D) property for this test run. * * This convenience method copies the current VMs System Property * for the external VM Broker. * * @param property the System property to set */ protected void setSystemProperty(String property) { setSystemProperty(property, System.getProperty(property)); } /** * Set a System property for the duration of this test. * * When the test run is complete the value will be reverted. * * The values set using this method will also be propogated to the external * Java Broker via a -D value defined in QPID_OPTS. * * If the value should not be set on the broker then use * setTestClientSystemProperty(). * * @param property the property to set * @param value the new value to use */ protected void setSystemProperty(String property, String value) { // Record the value for the external broker _propertiesSetForBroker.put(property, value); //Set the value for the test client vm aswell. setTestClientSystemProperty(property, value); } /** * Set a System property for the client (and broker if using the same vm) of this test. * * @param property The property to set * @param value the value to set it to. */ protected void setTestClientSystemProperty(String property, String value) { setTestSystemProperty(property, value); } /** * Restore the System property values that were set before this test run. */ protected void revertSystemProperties() { revertTestSystemProperties(); // We don't change the current VMs settings for Broker only properties // so we can just clear this map _propertiesSetForBroker.clear(); } /** * Add an environtmen variable for the external broker environment * * @param property the property to set * @param value the value to set it to */ protected void setBrokerEnvironment(String property, String value) { _env.put(property, value); } /** * Adjust the VMs Log4j Settings just for this test run * * @param logger the logger to change * @param level the level to set */ protected void setLoggerLevel(Logger logger, Level level) { assertNotNull("Cannot set level of null logger", logger); assertNotNull("Cannot set Logger("+logger.getName()+") to null level.",level); if (!_loggerLevelSetForTest.containsKey(logger)) { // Record the current value so we can revert it later. _loggerLevelSetForTest.put(logger, logger.getLevel()); } logger.setLevel(level); } /** * Restore the logging levels defined by this test. */ protected void revertLoggingLevels() { for (Logger logger : _loggerLevelSetForTest.keySet()) { logger.setLevel(_loggerLevelSetForTest.get(logger)); } _loggerLevelSetForTest.clear(); } /** * Check whether the broker is an 0.8 * * @return true if the broker is an 0_8 version, false otherwise. */ public boolean isBroker08() { return _brokerVersion.equals(AmqpProtocolVersion.v0_8); } public boolean isBroker010() { return _brokerVersion.equals(AmqpProtocolVersion.v0_10); } protected boolean isJavaBroker() { return _brokerLanguage.equals("java") || _brokerType.equals("vm"); } protected boolean isCppBroker() { return _brokerLanguage.equals("cpp"); } protected boolean isExternalBroker() { return !_brokerType.equals("vm"); //TODO } protected boolean isInternalBroker() { return _brokerType.equals(BrokerType.INTERNAL); } protected boolean isBrokerStorePersistent() { return _brokerPersistent; } public void restartBroker() throws Exception { restartBroker(0); } public void restartBroker(int port) throws Exception { stopBroker(port); startBroker(port); } /** * we assume that the environment is correctly set * i.e. -Djava.naming.provider.url="..//example010.properties" * TODO should be a way of setting that through maven * * @return an initial context * * @throws NamingException if there is an error getting the context */ public InitialContext getInitialContext() throws NamingException { _logger.info("get InitialContext"); if (_initialContext == null) { _initialContext = new InitialContext(); } return _initialContext; } /** * Get the default connection factory for the currently used broker * Default factory is "local" * * @return A conection factory * * @throws Exception if there is an error getting the tactory */ public AMQConnectionFactory getConnectionFactory() throws NamingException { _logger.info("get ConnectionFactory"); if (_connectionFactory == null) { if (Boolean.getBoolean("profile.use_ssl")) { _connectionFactory = getConnectionFactory("default.ssl"); } else { _connectionFactory = getConnectionFactory("default"); } } return _connectionFactory; } /** * Get a connection factory for the currently used broker * * @param factoryName The factory name * * @return A conection factory * * @throws Exception if there is an error getting the tactory */ public AMQConnectionFactory getConnectionFactory(String factoryName) throws NamingException { return (AMQConnectionFactory) getInitialContext().lookup(factoryName); } public Connection getConnection() throws JMSException, NamingException { return getConnection("guest", "guest"); } public Connection getConnection(ConnectionURL url) throws JMSException { _logger.info(url.getURL()); Connection connection = new AMQConnectionFactory(url).createConnection(url.getUsername(), url.getPassword()); _connections.add(connection); return connection; } /** * Get a connection (remote or in-VM) * * @param username The user name * @param password The user password * * @return a newly created connection * * @throws Exception if there is an error getting the connection */ public Connection getConnection(String username, String password) throws JMSException, NamingException { _logger.info("get connection"); Connection con = getConnectionFactory().createConnection(username, password); //add the connection in the lis of connections _connections.add(con); return con; } public Connection getClientConnection(String username, String password, String id) throws JMSException, URLSyntaxException, AMQException, NamingException { _logger.info("get Connection"); Connection con = getConnectionFactory().createConnection(username, password, id); //add the connection in the lis of connections _connections.add(con); return con; } /** * Return a uniqueName for this test. * In this case it returns a queue Named by the TestCase and TestName * * @return String name for a queue */ protected String getTestQueueName() { return getClass().getSimpleName() + "-" + getName(); } /** * Return a Queue specific for this test. * Uses getTestQueueName() as the name of the queue * @return */ public Queue getTestQueue() { return new AMQQueue(ExchangeDefaults.DIRECT_EXCHANGE_NAME, getTestQueueName()); } protected void tearDown() throws java.lang.Exception { try { // close all the connections used by this test. for (Connection c : _connections) { c.close(); } } finally { // Ensure any problems with close does not interfer with property resets revertSystemProperties(); revertLoggingLevels(); } } /** * Consume all the messages in the specified queue. Helper to ensure * persistent tests don't leave data behind. * * @param queue the queue to purge * * @return the count of messages drained * * @throws Exception if a problem occurs */ protected int drainQueue(Queue queue) throws Exception { Connection connection = getConnection(); Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); MessageConsumer consumer = session.createConsumer(queue); connection.start(); int count = 0; while (consumer.receive(1000) != null) { count++; } connection.close(); return count; } /** * Send messages to the given destination. * * If session is transacted then messages will be commited before returning * * @param session the session to use for sending * @param destination where to send them to * @param count no. of messages to send * * @return the sent messges * * @throws Exception */ public List<Message> sendMessage(Session session, Destination destination, int count) throws Exception { return sendMessage(session, destination, count, 0, 0); } /** * Send messages to the given destination. * * If session is transacted then messages will be commited before returning * * @param session the session to use for sending * @param destination where to send them to * @param count no. of messages to send * * @param batchSize the batchSize in which to commit, 0 means no batching, * but a single commit at the end * @return the sent messgse * * @throws Exception */ public List<Message> sendMessage(Session session, Destination destination, int count, int batchSize) throws Exception { return sendMessage(session, destination, count, 0, batchSize); } /** * Send messages to the given destination. * * If session is transacted then messages will be commited before returning * * @param session the session to use for sending * @param destination where to send them to * @param count no. of messages to send * * @param offset offset allows the INDEX value of the message to be adjusted. * @param batchSize the batchSize in which to commit, 0 means no batching, * but a single commit at the end * @return the sent messgse * * @throws Exception */ public List<Message> sendMessage(Session session, Destination destination, int count, int offset, int batchSize) throws Exception { List<Message> messages = new ArrayList<Message>(count); MessageProducer producer = session.createProducer(destination); int i = offset; for (; i < (count + offset); i++) { Message next = createNextMessage(session, i); producer.send(next); if (session.getTransacted() && batchSize > 0) { if (i % batchSize == 0) { session.commit(); } } messages.add(next); } // Ensure we commit the last messages // Commit the session if we are transacted and // we have no batchSize or // our count is not divible by batchSize. if (session.getTransacted() && ( batchSize == 0 || (i-1) % batchSize != 0)) { session.commit(); } return messages; } public Message createNextMessage(Session session, int msgCount) throws JMSException { Message message = createMessage(session, _messageSize); message.setIntProperty(INDEX, msgCount); return message; } public Message createMessage(Session session, int messageSize) throws JMSException { String payload = new String(new byte[messageSize]); Message message; switch (_messageType) { case BYTES: message = session.createBytesMessage(); ((BytesMessage) message).writeUTF(payload); break; case MAP: message = session.createMapMessage(); ((MapMessage) message).setString(CONTENT, payload); break; default: // To keep the compiler happy case TEXT: message = session.createTextMessage(); ((TextMessage) message).setText(payload); break; case OBJECT: message = session.createObjectMessage(); ((ObjectMessage) message).setObject(payload); break; case STREAM: message = session.createStreamMessage(); ((StreamMessage) message).writeString(payload); break; } return message; } protected int getMessageSize() { return _messageSize; } protected void setMessageSize(int byteSize) { _messageSize = byteSize; } public ConnectionURL getConnectionURL() throws NamingException { return getConnectionFactory().getConnectionURL(); } public BrokerDetails getBroker() { try { if (getConnectionFactory().getConnectionURL().getBrokerCount() > 0) { return getConnectionFactory().getConnectionURL().getBrokerDetails(0); } else { fail("No broker details are available."); } } catch (NamingException e) { fail(e.getMessage()); } //keep compiler happy return null; } /** * Reloads the broker security configuration using the ApplicationRegistry (InVM brokers) or the * ConfigurationManagementMBean via the JMX interface (Standalone brokers, management must be * enabled before calling the method). */ public void reloadBrokerSecurityConfig() throws Exception { JMXTestUtils jmxu = new JMXTestUtils(this, "admin" , "admin"); jmxu.open(); try { ConfigurationManagement configMBean = jmxu.getConfigurationManagement(); configMBean.reloadSecurityConfiguration(); } finally { jmxu.close(); } LogMonitor _monitor = new LogMonitor(_outputFile); assertTrue("The expected server security configuration reload did not occur", _monitor.waitForMessage(ServerConfiguration.SECURITY_CONFIG_RELOADED, LOGMONITOR_TIMEOUT)); } protected int getFailingPort() { return FAILING_PORT; } }
/* * Copyright 2012-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.gradle.repackage; import java.io.File; import java.io.IOException; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.gradle.api.Action; import org.gradle.api.DefaultTask; import org.gradle.api.Project; import org.gradle.api.Task; import org.gradle.api.plugins.ExtraPropertiesExtension; import org.gradle.api.tasks.TaskAction; import org.gradle.api.tasks.bundling.Jar; import org.springframework.boot.gradle.SpringBootPluginExtension; import org.springframework.boot.loader.tools.DefaultLaunchScript; import org.springframework.boot.loader.tools.LaunchScript; import org.springframework.boot.loader.tools.Layout; import org.springframework.boot.loader.tools.Repackager; import org.springframework.boot.loader.tools.Repackager.MainClassTimeoutWarningListener; import org.springframework.util.FileCopyUtils; /** * Repackage task. * * @author Phillip Webb * @author Janne Valkealahti * @author Andy Wilkinson */ public class RepackageTask extends DefaultTask { private String customConfiguration; private Object withJarTask; private String mainClass; private String classifier; private File outputFile; private Boolean excludeDevtools; private Boolean executable; private File embeddedLaunchScript; private Map<String, String> embeddedLaunchScriptProperties; public void setCustomConfiguration(String customConfiguration) { this.customConfiguration = customConfiguration; } public Object getWithJarTask() { return this.withJarTask; } public void setWithJarTask(Object withJarTask) { this.withJarTask = withJarTask; } public void setMainClass(String mainClass) { this.mainClass = mainClass; } public String getMainClass() { return this.mainClass; } public String getClassifier() { return this.classifier; } public void setClassifier(String classifier) { this.classifier = classifier; } void setOutputFile(File file) { this.outputFile = file; } public Boolean getExcludeDevtools() { return this.excludeDevtools; } public void setExcludeDevtools(Boolean excludeDevtools) { this.excludeDevtools = excludeDevtools; } public Boolean getExecutable() { return this.executable; } public void setExecutable(Boolean executable) { this.executable = executable; } public File getEmbeddedLaunchScript() { return this.embeddedLaunchScript; } public void setEmbeddedLaunchScript(File embeddedLaunchScript) { this.embeddedLaunchScript = embeddedLaunchScript; } public Map<String, String> getEmbeddedLaunchScriptProperties() { return this.embeddedLaunchScriptProperties; } public void setEmbeddedLaunchScriptProperties( Map<String, String> embeddedLaunchScriptProperties) { this.embeddedLaunchScriptProperties = embeddedLaunchScriptProperties; } @TaskAction public void repackage() { Project project = getProject(); SpringBootPluginExtension extension = project.getExtensions() .getByType(SpringBootPluginExtension.class); ProjectLibraries libraries = getLibraries(); project.getTasks().withType(Jar.class, new RepackageAction(extension, libraries)); } public ProjectLibraries getLibraries() { Project project = getProject(); SpringBootPluginExtension extension = project.getExtensions() .getByType(SpringBootPluginExtension.class); ProjectLibraries libraries = new ProjectLibraries(project, extension, this.excludeDevtools == null ? extension.isExcludeDevtools() : this.excludeDevtools); if (extension.getProvidedConfiguration() != null) { libraries.setProvidedConfigurationName(extension.getProvidedConfiguration()); } if (this.customConfiguration != null) { libraries.setCustomConfigurationName(this.customConfiguration); } else if (extension.getCustomConfiguration() != null) { libraries.setCustomConfigurationName(extension.getCustomConfiguration()); } return libraries; } /** * Action to repackage JARs. */ private class RepackageAction implements Action<Jar> { private final SpringBootPluginExtension extension; private final ProjectLibraries libraries; RepackageAction(SpringBootPluginExtension extension, ProjectLibraries libraries) { this.extension = extension; this.libraries = libraries; } @Override public void execute(Jar jarTask) { if (!RepackageTask.this.isEnabled()) { getLogger().info("Repackage disabled"); return; } Object withJarTask = RepackageTask.this.withJarTask; if (!isTaskMatch(jarTask, withJarTask)) { getLogger().info( "Jar task not repackaged (didn't match withJarTask): " + jarTask); return; } File file = jarTask.getArchivePath(); if (file.exists()) { repackage(file); } } private boolean isTaskMatch(Jar task, Object withJarTask) { if (withJarTask == null) { if ("".equals(task.getClassifier())) { Set<Object> tasksWithCustomRepackaging = new HashSet<Object>(); for (RepackageTask repackageTask : RepackageTask.this.getProject() .getTasks().withType(RepackageTask.class)) { if (repackageTask.getWithJarTask() != null) { tasksWithCustomRepackaging .add(repackageTask.getWithJarTask()); } } return !tasksWithCustomRepackaging.contains(task); } return false; } return task.equals(withJarTask) || task.getName().equals(withJarTask); } private void repackage(File file) { File outputFile = RepackageTask.this.outputFile; if (outputFile != null && !file.equals(outputFile)) { copy(file, outputFile); file = outputFile; } Repackager repackager = new Repackager(file, this.extension.getLayoutFactory()); repackager.addMainClassTimeoutWarningListener( new LoggingMainClassTimeoutWarningListener()); setMainClass(repackager); Layout layout = this.extension.convertLayout(); if (layout != null) { repackager.setLayout(layout); } repackager.setBackupSource(this.extension.isBackupSource()); try { LaunchScript launchScript = getLaunchScript(); repackager.repackage(file, this.libraries, launchScript); } catch (IOException ex) { throw new IllegalStateException(ex.getMessage(), ex); } } private void copy(File source, File dest) { try { FileCopyUtils.copy(source, dest); } catch (IOException ex) { throw new IllegalStateException(ex.getMessage(), ex); } } private void setMainClass(Repackager repackager) { String mainClassName = getMainClassNameProperty(); if (RepackageTask.this.mainClass != null) { mainClassName = RepackageTask.this.mainClass; } else if (this.extension.getMainClass() != null) { mainClassName = this.extension.getMainClass(); } else { Task runTask = getProject().getTasks().findByName("run"); if (runTask != null && runTask.hasProperty("main")) { mainClassName = (String) getProject().getTasks().getByName("run") .property("main"); } } if (mainClassName != null) { getLogger().info("Setting mainClass: " + mainClassName); repackager.setMainClass(mainClassName); } else { getLogger().info("No mainClass configured"); } } private String getMainClassNameProperty() { if (getProject().hasProperty("mainClassName")) { return (String) getProject().property("mainClassName"); } ExtraPropertiesExtension extraProperties = (ExtraPropertiesExtension) getProject() .getExtensions().getByName("ext"); if (extraProperties.has("mainClassName")) { return (String) extraProperties.get("mainClassName"); } return null; } private LaunchScript getLaunchScript() throws IOException { if (isExecutable() || getEmbeddedLaunchScript() != null) { return new DefaultLaunchScript(getEmbeddedLaunchScript(), getEmbeddedLaunchScriptProperties()); } return null; } private boolean isExecutable() { return RepackageTask.this.executable != null ? RepackageTask.this.executable : this.extension.isExecutable(); } private File getEmbeddedLaunchScript() { return RepackageTask.this.embeddedLaunchScript != null ? RepackageTask.this.embeddedLaunchScript : this.extension.getEmbeddedLaunchScript(); } private Map<String, String> getEmbeddedLaunchScriptProperties() { return RepackageTask.this.embeddedLaunchScriptProperties != null ? RepackageTask.this.embeddedLaunchScriptProperties : this.extension.getEmbeddedLaunchScriptProperties(); } } /** * {@link Repackager} that also logs when searching takes too long. */ private class LoggingMainClassTimeoutWarningListener implements MainClassTimeoutWarningListener { @Override public void handleTimeoutWarning(long duration, String mainMethod) { getLogger().warn("Searching for the main-class is taking " + "some time, consider using setting " + "'springBoot.mainClass'"); } } }
package nl.siegmann.ehcachetag; import java.io.IOException; import javax.servlet.ServletContext; import javax.servlet.ServletRequest; import javax.servlet.http.HttpServletRequest; import javax.servlet.jsp.JspContext; import javax.servlet.jsp.JspException; import javax.servlet.jsp.JspWriter; import javax.servlet.jsp.PageContext; import javax.servlet.jsp.tagext.BodyContent; import javax.servlet.jsp.tagext.BodyTagSupport; import net.sf.ehcache.CacheManager; import net.sf.ehcache.Ehcache; import net.sf.ehcache.Element; import nl.siegmann.ehcachetag.CacheTag.ModifierNotFoundException; import nl.siegmann.ehcachetag.cachetagmodifier.CacheTagModifier; import nl.siegmann.ehcachetag.cachetagmodifier.CacheTagModifierFactory; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; public class CacheTagTest { @Mock private PageContext pageContext; @Mock private ServletContext servletContext; @Mock private JspWriter jspWriter = Mockito.mock(JspWriter.class); @Mock private CacheTagModifierFactory cacheTagModifierFactory; @Mock private CacheTagModifier cacheTagModifier; @Mock private CacheManager cacheManager; @Mock private CacheManager customCacheManager; @Mock private BodyContent bodyContent; @Mock private Ehcache ehcache; private CacheTag testSubject; @Before public void setUp() { MockitoAnnotations.initMocks(this); this.testSubject = Mockito.spy(new CacheTag()); testSubject.setPageContext(pageContext); testSubject.setBodyContent(bodyContent); Mockito.when(pageContext.getOut()).thenReturn(jspWriter); Mockito.when(pageContext.getPage()).thenReturn(this); Mockito.when(testSubject.getDefaultCacheManager()).thenReturn(cacheManager); Mockito.when(pageContext.getServletContext()).thenReturn(servletContext); } /** * Key is null, check that we return EVAL_BODY_INCLUDE. * * @throws JspException */ @Test public void testDoStartTag_null_key() throws JspException { // given testSubject.setKey(null); testSubject.setCache("mycache"); // when int actualResult = testSubject.doStartTag(); // then Assert.assertEquals(BodyTagSupport.EVAL_BODY_INCLUDE, actualResult); // verify cleanup Assert.assertNull(testSubject.getCache()); verifyCleanup(); } /** * Cache is null, check that we return EVAL_BODY_INCLUDE. * * @throws JspException */ @Test public void testDoStartTag_null_cache() throws JspException { // given testSubject.setKey("mykey"); testSubject.setCache(null); testSubject.setModifiers("hi"); // when int actualResult = testSubject.doStartTag(); // then Assert.assertEquals(BodyTagSupport.EVAL_BODY_INCLUDE, actualResult); verifyCleanup(); } /** * Cache is null, check that we return EVAL_BODY_INCLUDE. * Also improves the code coverage by added a HttpServletRequest. * * @throws JspException */ @Test public void testDoStartTag_null_cache_httpservletrequest() throws JspException { // given testSubject.setKey("mykey"); testSubject.setCache(null); testSubject.setModifiers("hi"); HttpServletRequest servletRequest = Mockito.mock(HttpServletRequest.class); Mockito.when(servletRequest.getRequestURI()).thenReturn("http://example.com/testpage.jsp"); Mockito.when(pageContext.getRequest()).thenReturn(servletRequest); // when int actualResult = testSubject.doStartTag(); // then Assert.assertEquals(BodyTagSupport.EVAL_BODY_INCLUDE, actualResult); verifyCleanup(); } /** * Cache not found. * * @throws JspException */ @Test public void testDoStartTag_cache_not_found() throws JspException { // given testSubject.setKey("mykey"); testSubject.setCache("mycache"); testSubject.setModifiers("hi"); Mockito.when(cacheManager.getEhcache(Mockito.anyString())).thenReturn(null); // when int actualResult = testSubject.doStartTag(); // then Assert.assertEquals(BodyTagSupport.EVAL_BODY_INCLUDE, actualResult); Mockito.verify(cacheManager).getEhcache("mycache"); verifyCleanup(); } /** * Cache found, but the value with the key is not there. * * @throws JspException */ @Test public void testDoStartTag_no_cached_value() throws JspException { // given testSubject.setKey("mykey"); testSubject.setCache("mycache"); Mockito.when(cacheManager.getEhcache(Mockito.anyString())).thenReturn(ehcache); // when int actualResult = testSubject.doStartTag(); // then Assert.assertEquals(BodyTagSupport.EVAL_BODY_BUFFERED, actualResult); Assert.assertEquals("mycache", testSubject.getCache()); Mockito.verifyNoMoreInteractions(jspWriter); } /** * We found a cached value * * @throws JspException * @throws IOException */ @Test public void testDoStartTag_cached_value() throws JspException, IOException { // given ensureCacheContent("mycache", "mykey", "cached_content"); testSubject.setKey("mykey"); testSubject.setCache("mycache"); // when int actualResult = testSubject.doStartTag(); // then Assert.assertEquals(BodyTagSupport.SKIP_BODY, actualResult); Mockito.verify(pageContext).getOut(); Mockito.verify(jspWriter).write("cached_content"); Mockito.verifyNoMoreInteractions(jspWriter); } private void ensureCacheContent(String cacheName, String cacheKey, String cachedContent) { Mockito.when(cacheManager.getEhcache(Mockito.eq(cacheName))).thenReturn(ehcache); Mockito.when(ehcache.get(Mockito.eq((Object) cacheKey))).thenReturn(new Element(cacheKey, cachedContent)); } @Test public void testDoStartTag_null_cache_key() { // given // when int actualResult = -1; try { actualResult = testSubject.doStartTag(); } catch (JspException e) { Assert.fail(e.getMessage()); } // then Assert.assertEquals(BodyTagSupport.EVAL_BODY_INCLUDE, actualResult); Mockito.verify(pageContext).findAttribute(EHCacheTagConstants.MODIFIER_FACTORY_ATTRIBUTE); Mockito.verifyNoMoreInteractions(jspWriter, pageContext); } @Test public void doTestStartTag_no_such_cache() throws JspException { // given testSubject.setCache("XXX"); testSubject.setKey("greeting"); Mockito.when(cacheManager.getEhcache("XXX")).thenReturn(null); // when int startTagReturn = testSubject.doStartTag(); // then Assert.assertEquals(BodyTagSupport.EVAL_BODY_INCLUDE, startTagReturn); // XXX Mockito.verifyNoMoreInteractions(jspWriter, pageContext); } /** * Test the case where the before cache lookup causes an exception. * @throws Exception */ @Test public void doStartTag_before_lookup_excepion() throws Exception { // given Mockito.doThrow(new Exception()).when(testSubject).doBeforeLookup(); // when int startTagReturn = testSubject.doStartTag(); // then Assert.assertEquals(1, startTagReturn); verifyCleanup(); } /** * Test the behaviour when writing the result to the outputstream causes an IOException. * @throws IOException * @throws JspException */ @Test(expected = JspException.class) public void doStartTag_exception_writing_cached_content() throws IOException, JspException { // given testSubject.setKey("mykey"); testSubject.setCache("ehcachetag"); Mockito.doReturn("foo").when(testSubject).getCachedBodyContent("ehcachetag", "mykey"); Mockito.doThrow(new IOException()).when(jspWriter).write(Mockito.anyString()); ServletRequest servletRequest = Mockito.mock(ServletRequest.class); Mockito.when(pageContext.getRequest()).thenReturn(servletRequest); // when testSubject.doStartTag(); // then verifyCleanup(); } /** * Test the behaviour when writing the result to the outputstream causes an IOException. * @throws IOException * @throws JspException */ @Test(expected = JspException.class) public void doStartTag_exception_writing_cached_content_log_request_uri() throws IOException, JspException { // given testSubject.setKey("mykey"); testSubject.setCache("ehcachetag"); Mockito.doReturn("foo").when(testSubject).getCachedBodyContent("ehcachetag", "mykey"); Mockito.doThrow(new IOException()).when(jspWriter).write(Mockito.anyString()); // when testSubject.doStartTag(); // then verifyCleanup(); } /** * Test the behaviour when the afterRetrieval method throws an exception. * @throws Exception */ @Test public void doStartTag_do_after_retrieval_exception() throws Exception { // given testSubject.setKey("mykey"); testSubject.setCache("ehcachetag"); Mockito.doReturn("foo").when(testSubject).getCachedBodyContent("ehcachetag", "mykey"); Mockito.doThrow(new Exception()).when(testSubject).doAfterRetrieval("foo"); // when int doStartTagResult = testSubject.doStartTag(); // then Assert.assertEquals(1, doStartTagResult); Mockito.verifyNoMoreInteractions(jspWriter); verifyCleanup(); } /** * Test doStartTag where the first modifier throws an exception * @throws JspException */ @Test public void doStartTag_modifier_not_found() throws JspException { // given testSubject.setKey("testkey"); CacheTagModifier modifier = Mockito.mock(CacheTagModifier.class); Mockito.doThrow(new RuntimeException()).when(modifier).beforeLookup(Mockito.any(CacheTag.class), Mockito.any(JspContext.class)); Mockito.when(cacheTagModifierFactory.getCacheTagModifier(Mockito.anyString())).thenReturn(modifier); // when int actualResult = testSubject.doStartTag(); // then Assert.assertEquals(BodyTagSupport.EVAL_BODY_INCLUDE, actualResult); // cleanup Assert.assertNull(testSubject.getKey()); Assert.assertNull(testSubject.getCache()); Assert.assertEquals("", testSubject.getModifiers()); } /** * Test doStartTag where the first modifier throws an exception * @throws JspException */ @Test public void doStartTag_modifier_exception() throws JspException { // given testSubject.setKey("testkey"); CacheTagModifier modifier = Mockito.mock(CacheTagModifier.class); Mockito.doThrow(new RuntimeException()).when(modifier).beforeLookup(Mockito.any(CacheTag.class), Mockito.any(JspContext.class)); Mockito.when(cacheTagModifierFactory.getCacheTagModifier(Mockito.anyString())).thenReturn(modifier); // when int actualResult = testSubject.doStartTag(); // then Assert.assertEquals(BodyTagSupport.EVAL_BODY_INCLUDE, actualResult); verifyCleanup(); } @Test public void testDoBeforeLookup() throws Exception { // given CacheTagModifier modifierB = new CacheTagModifier() { @Override public void init(ServletContext servletContext) { // TODO Auto-generated method stub } @Override public void destroy() { // TODO Auto-generated method stub } @Override public String beforeUpdate(CacheTag cacheTag, JspContext jspContext, String content) { // TODO Auto-generated method stub return null; } @Override public void beforeLookup(CacheTag cacheTag, JspContext jspContext) { cacheTag.setKey(cacheTag.getKey().toString() + "B"); } @Override public String afterRetrieval(CacheTag cacheTag, JspContext jspContext, String content) { // TODO Auto-generated method stub return null; } }; Mockito.when(cacheTagModifierFactory.getCacheTagModifier("modifierB")).thenReturn(modifierB); testSubject.setModifiers("modifierB"); Mockito.when(pageContext.findAttribute(EHCacheTagConstants.MODIFIER_FACTORY_ATTRIBUTE)).thenReturn(cacheTagModifierFactory); // when testSubject.setKey("A"); testSubject.doBeforeLookup(); Assert.assertEquals("AB", testSubject.getKey()); } @Test(expected = ModifierNotFoundException.class) public void testGetCacheTagModifier_not_found() throws ModifierNotFoundException { // when testSubject.getCacheTagModifier(cacheTagModifierFactory, "test_modifier"); } /** * This tests the behaviour when finding a non-string cached value. * * FIXME however, it also succeeds if the cache contains no value for the given key. */ @Test public void testGetCachedBodyContent_not_String() { // given Mockito.when(cacheManager.getEhcache("test_cache")).thenReturn(ehcache); Mockito.when(ehcache.get(Mockito.any())).thenReturn(new Element("test_key", Integer.valueOf(1))); // when String cachedBodyContent = testSubject.getCachedBodyContent("test_cache", "test_key"); // then Assert.assertTrue(CacheTag.NO_CACHED_VALUE == cachedBodyContent); // XXX for some reason this one fails // Mockito.verify(ehcache).get(Mockito.eq("test_key")); } @Test public void testDoBeforeUpdate() throws Exception { // given String testInput = "A"; CacheTagModifier modifierB = Mockito.mock(CacheTagModifier.class); Mockito.when(modifierB.beforeUpdate(Mockito.any(CacheTag.class), Mockito.any(JspContext.class), Mockito.eq("A"))).thenReturn("AB"); Mockito.when(cacheTagModifierFactory.getCacheTagModifier("modifierB")).thenReturn(modifierB); CacheTagModifier modifierC = Mockito.mock(CacheTagModifier.class); Mockito.when(modifierC.beforeUpdate(Mockito.any(CacheTag.class), Mockito.any(JspContext.class), Mockito.eq("AB"))).thenReturn("CAB"); Mockito.when(cacheTagModifierFactory.getCacheTagModifier("modifierC")).thenReturn(modifierC); testSubject.setModifiers("modifierB,modifierC"); Mockito.when(pageContext.findAttribute(EHCacheTagConstants.MODIFIER_FACTORY_ATTRIBUTE)).thenReturn(cacheTagModifierFactory); // when String actualResult = testSubject.doBeforeUpdate(testInput); // then Assert.assertEquals("CAB", actualResult); Mockito.verify(modifierB).beforeUpdate(Mockito.any(CacheTag.class), Mockito.any(JspContext.class), Mockito.eq("A")); Mockito.verify(modifierC).beforeUpdate(Mockito.any(CacheTag.class), Mockito.any(JspContext.class), Mockito.eq("AB")); Mockito.verifyNoMoreInteractions(modifierB, modifierC); } @Test public void testDoBeforeUpdate_exception() throws Exception { // given String testInput = "A"; CacheTagModifier modifierB = Mockito.mock(CacheTagModifier.class); Mockito.when(modifierB.beforeUpdate(Mockito.any(CacheTag.class), Mockito.any(JspContext.class), Mockito.eq("A"))).thenReturn("AB"); Mockito.when(cacheTagModifierFactory.getCacheTagModifier("modifierB")).thenReturn(modifierB); CacheTagModifier modifierC = Mockito.mock(CacheTagModifier.class); Mockito.when(modifierC.beforeUpdate(Mockito.any(CacheTag.class), Mockito.any(JspContext.class), Mockito.eq("AB"))).thenThrow(new RuntimeException()); Mockito.when(cacheTagModifierFactory.getCacheTagModifier("modifierC")).thenReturn(modifierC); testSubject.setModifiers("modifierB,modifierC"); Mockito.when(pageContext.findAttribute(EHCacheTagConstants.MODIFIER_FACTORY_ATTRIBUTE)).thenReturn(cacheTagModifierFactory); // when String actualResult = "original_value"; try { actualResult = testSubject.doBeforeUpdate(testInput); Assert.fail("Expected RuntimeException"); } catch(RuntimeException e) { } // then Assert.assertEquals("original_value", actualResult); Mockito.verify(modifierB).beforeUpdate(Mockito.any(CacheTag.class), Mockito.any(JspContext.class), Mockito.eq("A")); Mockito.verify(modifierC).beforeUpdate(Mockito.any(CacheTag.class), Mockito.any(JspContext.class), Mockito.eq("AB")); Mockito.verifyNoMoreInteractions(modifierB, modifierC); } @Test public void testDoAfterRetrieval() throws Exception { // given String testInput = "A"; CacheTagModifier modifierB = Mockito.mock(CacheTagModifier.class); Mockito.when(modifierB.afterRetrieval(Mockito.any(CacheTag.class), Mockito.any(JspContext.class), Mockito.eq("A"))).thenReturn("AB"); Mockito.when(cacheTagModifierFactory.getCacheTagModifier("modifierB")).thenReturn(modifierB); CacheTagModifier modifierC = Mockito.mock(CacheTagModifier.class); Mockito.when(modifierC.afterRetrieval(Mockito.any(CacheTag.class), Mockito.any(JspContext.class), Mockito.eq("AB"))).thenReturn("CAB"); Mockito.when(cacheTagModifierFactory.getCacheTagModifier("modifierC")).thenReturn(modifierC); testSubject.setModifiers("modifierB,modifierC"); Mockito.when(pageContext.findAttribute(EHCacheTagConstants.MODIFIER_FACTORY_ATTRIBUTE)).thenReturn(cacheTagModifierFactory); // when String actualResult = testSubject.doAfterRetrieval(testInput); // then Assert.assertEquals("CAB", actualResult); Mockito.verify(modifierB).afterRetrieval(Mockito.any(CacheTag.class), Mockito.any(JspContext.class), Mockito.eq("A")); Mockito.verify(modifierC).afterRetrieval(Mockito.any(CacheTag.class), Mockito.any(JspContext.class), Mockito.eq("AB")); Mockito.verifyNoMoreInteractions(modifierB, modifierC); } @Test public void testDoAfterRetrieval_exception() throws Exception { // given String testInput = "A"; CacheTagModifier modifierB = Mockito.mock(CacheTagModifier.class); Mockito.when(modifierB.afterRetrieval(Mockito.any(CacheTag.class), Mockito.any(JspContext.class), Mockito.eq("A"))).thenReturn("AB"); Mockito.when(cacheTagModifierFactory.getCacheTagModifier("modifierB")).thenReturn(modifierB); CacheTagModifier modifierC = Mockito.mock(CacheTagModifier.class); Mockito.when(modifierC.afterRetrieval(Mockito.any(CacheTag.class), Mockito.any(JspContext.class), Mockito.eq("AB"))).thenThrow(new RuntimeException()); Mockito.when(cacheTagModifierFactory.getCacheTagModifier("modifierC")).thenReturn(modifierC); testSubject.setModifiers("modifierB,modifierC"); Mockito.when(pageContext.findAttribute(EHCacheTagConstants.MODIFIER_FACTORY_ATTRIBUTE)).thenReturn(cacheTagModifierFactory); // when String actualResult = "original_value"; try { actualResult = testSubject.doAfterRetrieval(testInput); Assert.fail("Expected RuntimeException"); } catch(RuntimeException e) { } // then Assert.assertEquals("original_value", actualResult); Mockito.verify(modifierB).afterRetrieval(Mockito.any(CacheTag.class), Mockito.any(JspContext.class), Mockito.eq("A")); Mockito.verify(modifierC).afterRetrieval(Mockito.any(CacheTag.class), Mockito.any(JspContext.class), Mockito.eq("AB")); Mockito.verifyNoMoreInteractions(modifierB, modifierC); } /** * There is no cache key. * * @throws JspException * @throws IOException */ @Test public void testEndTag_no_cache_key() throws JspException, IOException { // given // when int endTagReturn = testSubject.doEndTag(); // then Assert.assertEquals(BodyTagSupport.EVAL_PAGE, endTagReturn); verifyCleanup(); Mockito.verifyNoMoreInteractions(jspWriter, pageContext); } /** * There is a cache, a key and a value to store, but writing the content throws an IOException. * Verify that the value is stored in the cache and that the tag values have been cleaned up. * * @throws JspException * @throws IOException */ @Test public void testEndTag_cache_value_ioexception() throws JspException, IOException { // given testSubject.setKey("mykey"); testSubject.setCache("ehcachetag"); Mockito.when(bodyContent.getString()).thenReturn("body_value"); Mockito.when(cacheManager.getEhcache("ehcachetag")).thenReturn(ehcache); Mockito.doThrow(new IOException()).when(jspWriter).write(Mockito.anyString()); // when int actualReturn = -1; try { actualReturn = testSubject.doEndTag(); Assert.fail("Expected JspException"); } catch(JspException e) { // as expected } // then Assert.assertEquals(-1, actualReturn); Mockito.verify(testSubject).doEndTag(); // get content from bodyContent Mockito.verify(bodyContent).getString(); // cache content Mockito.verify(cacheManager).getEhcache("ehcachetag"); Mockito.verify(ehcache).put(new Element("mykey", "body_value")); // write content Mockito.verify(pageContext).getOut(); Mockito.verify(jspWriter).write("body_value"); verifyCleanup(); Mockito.verifyNoMoreInteractions(ehcache, jspWriter, bodyContent, cacheManager); // XXX this one fails, commented out for now // Mockito.verifyNoMoreInteractions(pageContext); } /** * There is a cache, a key and a value to store. * Verify that the value is stored in the cache and written to the response writer. * * @throws JspException * @throws IOException */ @Test public void testEndTag_cache_value() throws JspException, IOException { // given testSubject.setKey("mykey"); testSubject.setCache("ehcachetag"); Mockito.when(bodyContent.getString()).thenReturn("body_value"); Mockito.when(cacheManager.getEhcache("ehcachetag")).thenReturn(ehcache); // when int endTagReturn = testSubject.doEndTag(); // then Assert.assertEquals(BodyTagSupport.EVAL_PAGE, endTagReturn); Mockito.verify(testSubject).doEndTag(); // get content from bodyContent Mockito.verify(bodyContent).getString(); // cache content Mockito.verify(cacheManager).getEhcache("ehcachetag"); Mockito.verify(ehcache).put(new Element("mykey", "body_value")); // write content Mockito.verify(pageContext).getOut(); Mockito.verify(jspWriter).write("body_value"); verifyCleanup(); Mockito.verifyNoMoreInteractions(ehcache, jspWriter, bodyContent, cacheManager); // XXX this one fails, commented out for now // Mockito.verifyNoMoreInteractions(pageContext); } /** * There is a cache name, a key and a value to store, but cache not found. * * @throws JspException * @throws IOException */ @Test public void testEndTag_cache_value_no_cache_found() throws JspException, IOException { // given testSubject.setKey("mykey"); testSubject.setCache("ehcachetag"); Mockito.when(bodyContent.getString()).thenReturn("body_value"); Mockito.when(cacheManager.getEhcache("ehcachetag")).thenReturn(null); // when int endTagReturn = testSubject.doEndTag(); // then Assert.assertEquals(BodyTagSupport.EVAL_PAGE, endTagReturn); Mockito.verify(testSubject).doEndTag(); // get content from bodyContent Mockito.verify(bodyContent).getString(); // cache content Mockito.verify(cacheManager).getEhcache("ehcachetag"); // write content Mockito.verify(pageContext).getOut(); Mockito.verify(jspWriter).write("body_value"); verifyCleanup(); Mockito.verifyNoMoreInteractions(ehcache, jspWriter, bodyContent, cacheManager); // XXX this one fails, commented out for now // Mockito.verifyNoMoreInteractions(pageContext); } /** * Verify that an exception thrown by the doBeforeUpdate call is properly handled. * @throws Exception */ @Test(expected = JspException.class) public void testEndTag_exception_on_do_before_update() throws Exception { // given testSubject.setKey("mykey"); Mockito.when(bodyContent.getString()).thenReturn("cached content"); Mockito.doThrow(new Exception()).when(testSubject).doBeforeUpdate("cached content"); // when testSubject.doEndTag(); // then Mockito.verify(bodyContent).getString(); Mockito.verify(testSubject).doBeforeUpdate("cached content"); verifyCleanup(); Assert.assertNotNull(testSubject.getKey()); } /** * Verify that the cacheTag is ready for use by another thread. * Tag instances can be reused for another thread by the underlying jsp engine. */ private void verifyCleanup() { Mockito.verify(testSubject).cleanup(); } @Test public void setModifiers_null() { // when testSubject.setModifiers(null); // then Assert.assertNotNull(testSubject.getModifiers()); Assert.assertEquals(0, testSubject.getModifiers().length()); } @Test public void setModifiers_a____b() { // when testSubject.setModifiers("a, b"); // then Assert.assertNotNull(testSubject.getModifiers()); Assert.assertEquals("a,b", testSubject.getModifiers()); } @Test public void getCacheManager_defaults() { // given Mockito.when(servletContext.getAttribute(EHCacheTagConstants.CACHE_MANAGER)).thenReturn(null); // when CacheManager actualCacheManager = testSubject.getCacheManager(); // then Mockito.verify(testSubject).getDefaultCacheManager(); Assert.assertTrue(actualCacheManager == cacheManager); } @Test public void getCacheManager_invalid_cacheManager_in_servletcontext() { // given Mockito.when(servletContext.getAttribute(EHCacheTagConstants.CACHE_MANAGER)).thenReturn("Hello"); // when CacheManager actualCacheManager = testSubject.getCacheManager(); // then Mockito.verify(testSubject).getDefaultCacheManager(); Assert.assertTrue(actualCacheManager == cacheManager); } @Test public void getCacheManager_customized_cacheManager() { // given Mockito.when(servletContext.getAttribute(EHCacheTagConstants.CACHE_MANAGER)).thenReturn(customCacheManager); // when CacheManager actualCacheManager = testSubject.getCacheManager(); // then Mockito.verify(testSubject, Mockito.times(0)).getDefaultCacheManager(); Assert.assertTrue(actualCacheManager == customCacheManager); } }
package org.dynmap.hdmap; import static org.dynmap.JSONUtils.s; import java.io.IOException; import java.util.BitSet; import java.util.List; import org.dynmap.Color; import org.dynmap.ConfigurationNode; import org.dynmap.DynmapCore; import org.dynmap.MapManager; import org.dynmap.common.DynmapCommandSender; import org.dynmap.exporter.OBJExport; import org.dynmap.renderer.DynmapBlockState; import org.dynmap.utils.DynLongHashMap; import org.dynmap.utils.MapChunkCache; import org.dynmap.utils.MapIterator; import org.dynmap.utils.BlockStep; import org.json.simple.JSONObject; public class TopoHDShader implements HDShader { private final String name; private final Color linecolor; /* Color for topo lines */ private final Color fillcolor[]; /* Color for nontopo surfaces */ private final Color watercolor; private BitSet hiddenids; private final int linespacing; public TopoHDShader(DynmapCore core, ConfigurationNode configuration) { name = (String) configuration.get("name"); fillcolor = new Color[256]; /* Color by Y */ /* Load defined colors from parameters */ for(int i = 0; i < 256; i++) { fillcolor[i] = configuration.getColor("color" + i, null); } linecolor = configuration.getColor("linecolor", null); watercolor = configuration.getColor("watercolor", null); float wateralpha = configuration.getFloat("wateralpha", 1.0F); if (wateralpha < 1.0) { watercolor.setAlpha((int)(255 * wateralpha)); } /* Now, interpolate missing colors */ if(fillcolor[0] == null) { fillcolor[0] = new Color(0, 0, 0); } if(fillcolor[255] == null) { fillcolor[255] = new Color(255, 255, 255); } int starty = 0; for(int i = 1; i < 256; i++) { if(fillcolor[i] != null) { /* Found color? */ int delta = i - starty; Color c0 = fillcolor[starty]; Color c1 = fillcolor[i]; /* Interpolate missing colors since last one */ for(int j = 1; j < delta; j++) { fillcolor[starty + j] = new Color((c0.getRed()*(delta-j) + c1.getRed()*j)/delta, (c0.getGreen()*(delta-j) + c1.getGreen()*j)/delta, (c0.getBlue()*(delta-j) + c1.getBlue()*j)/delta); } starty = i; /* New start color */ } } hiddenids = new BitSet(); setHidden(DynmapBlockState.AIR_BLOCK); /* Air is hidden always */ List<Object> hidden = configuration.getList("hiddennames"); if(hidden != null) { for(Object o : hidden) { if(o instanceof String) { setHidden((String) o); } } } linespacing = configuration.getInteger("linespacing", 1); } private void setHidden(String bn) { DynmapBlockState bs = DynmapBlockState.getBaseStateByName(bn); for (int i = 0; i < bs.getStateCount(); i++) { DynmapBlockState b = bs.getState(i); hiddenids.set(b.globalStateIndex); } } @Override public boolean isBiomeDataNeeded() { return false; } @Override public boolean isRawBiomeDataNeeded() { return false; } @Override public boolean isHightestBlockYDataNeeded() { return false; } @Override public boolean isBlockTypeDataNeeded() { return true; } @Override public boolean isSkyLightLevelNeeded() { return false; } @Override public boolean isEmittedLightLevelNeeded() { return false; } @Override public String getName() { return name; } private class OurShaderState implements HDShaderState { private Color color[]; private Color tmpcolor[]; private Color c; protected MapIterator mapiter; protected HDMap map; private HDLighting lighting; private int scale; private int heightshift; /* Divide to keep in 0-127 range of colors */ private boolean inWater; final int[] lightingTable; private OurShaderState(MapIterator mapiter, HDMap map, MapChunkCache cache, int scale) { this.mapiter = mapiter; this.map = map; this.lighting = map.getLighting(); if(lighting.isNightAndDayEnabled()) { color = new Color[] { new Color(), new Color() }; tmpcolor = new Color[] { new Color(), new Color() }; } else { color = new Color[] { new Color() }; tmpcolor = new Color[] { new Color() }; } this.scale = scale; c = new Color(); /* Compute divider for Y - to map to existing color range */ int wh = mapiter.getWorldHeight(); heightshift = 0; while(wh > 256) { heightshift++; wh >>= 1; } if (MapManager.mapman.useBrightnessTable()) { lightingTable = cache.getWorld().getBrightnessTable(); } else { lightingTable = null; } inWater = false; } /** * Get our shader */ public HDShader getShader() { return TopoHDShader.this; } /** * Get our map */ public HDMap getMap() { return map; } /** * Get our lighting */ public HDLighting getLighting() { return lighting; } /** * Reset renderer state for new ray */ public void reset(HDPerspectiveState ps) { for(int i = 0; i < color.length; i++) color[i].setTransparent(); inWater = false; } private final boolean isHidden(DynmapBlockState blk) { return hiddenids.get(blk.globalStateIndex); } /** * Process next ray step - called for each block on route * @return true if ray is done, false if ray needs to continue */ public boolean processBlock(HDPerspectiveState ps) { DynmapBlockState blocktype = ps.getBlockState(); if (isHidden(blocktype)) { return false; } int y = mapiter.getY(); if (y < 0) y = 0; // Clamp at zero for now /* See if we're close to an edge */ int[] xyz = ps.getSubblockCoord(); // Only color lines when spacing is matched Color lcolor = ((y % linespacing) == 0)?linecolor:null; /* See which face we're on (only do lines on top face) */ switch(ps.getLastBlockStep()) { case Y_MINUS: case Y_PLUS: if((lcolor != null) && (((xyz[0] == 0) && (isHidden(mapiter.getBlockTypeAt(BlockStep.X_MINUS)))) || ((xyz[0] == (scale-1)) && (isHidden(mapiter.getBlockTypeAt(BlockStep.X_PLUS)))) || ((xyz[2] == 0) && (isHidden(mapiter.getBlockTypeAt(BlockStep.Z_MINUS)))) || ((xyz[2] == (scale-1)) && (isHidden(mapiter.getBlockTypeAt(BlockStep.Z_PLUS)))))) { c.setColor(lcolor); inWater = false; } else if ((watercolor != null) && blocktype.isWater()) { if (!inWater) { c.setColor(watercolor); inWater = true; } else { return false; } } else { c.setColor(fillcolor[y >> heightshift]); inWater = false; } break; default: if((lcolor != null) && (xyz[1] == (scale-1))) { c.setColor(lcolor); inWater = false; } else if ((watercolor != null) && blocktype.isWater()) { if (!inWater) { c.setColor(watercolor); inWater = true; } else { return false; } } else { c.setColor(fillcolor[y >> heightshift]); inWater = false; } break; } /* Handle light level, if needed */ lighting.applyLighting(ps, this, c, tmpcolor); /* If no previous color contribution, use new color */ if(color[0].isTransparent()) { for(int i = 0; i < color.length; i++) color[i].setColor(tmpcolor[i]); return (color[0].getAlpha() == 255); } /* Else, blend and generate new alpha */ else { int alpha = color[0].getAlpha(); int alpha2 = tmpcolor[0].getAlpha() * (255-alpha) / 255; int talpha = alpha + alpha2; if(talpha > 0) for(int i = 0; i < color.length; i++) color[i].setRGBA((tmpcolor[i].getRed()*alpha2 + color[i].getRed()*alpha) / talpha, (tmpcolor[i].getGreen()*alpha2 + color[i].getGreen()*alpha) / talpha, (tmpcolor[i].getBlue()*alpha2 + color[i].getBlue()*alpha) / talpha, talpha); else for(int i = 0; i < color.length; i++) color[i].setTransparent(); return (talpha >= 254); /* If only one short, no meaningful contribution left */ } } /** * Ray ended - used to report that ray has exited map (called if renderer has not reported complete) */ public void rayFinished(HDPerspectiveState ps) { } /** * Get result color - get resulting color for ray * @param c - object to store color value in * @param index - index of color to request (renderer specific - 0=default, 1=day for night/day renderer */ public void getRayColor(Color c, int index) { c.setColor(color[index]); } /** * Clean up state object - called after last ray completed */ public void cleanup() { } @Override public DynLongHashMap getCTMTextureCache() { return null; } @Override public int[] getLightingTable() { return lightingTable; } @Override public void setLastBlockState(DynmapBlockState new_lastbs) { } } /** * Get renderer state object for use rendering a tile * @param map - map being rendered * @param cache - chunk cache containing data for tile to be rendered * @param mapiter - iterator used when traversing rays in tile * @param scale - scale of perspecitve * @return state object to use for all rays in tile */ @Override public HDShaderState getStateInstance(HDMap map, MapChunkCache cache, MapIterator mapiter, int scale) { return new OurShaderState(mapiter, map, cache, scale); } /* Add shader's contributions to JSON for map object */ public void addClientConfiguration(JSONObject mapObject) { s(mapObject, "shader", name); } @Override public void exportAsMaterialLibrary(DynmapCommandSender sender, OBJExport out) throws IOException { throw new IOException("Export unsupported"); } private static final String[] nulllist = new String[0]; @Override public String[] getCurrentBlockMaterials(DynmapBlockState blk, MapIterator mapiter, int[] txtidx, BlockStep[] steps) { return nulllist; } }
/* * Copyright 2012-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.test.context; import java.lang.annotation.Annotation; import java.lang.reflect.AnnotatedElement; import java.lang.reflect.Constructor; import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.Set; import org.springframework.beans.BeansException; import org.springframework.beans.factory.BeanFactory; import org.springframework.beans.factory.BeanFactoryAware; import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.beans.factory.support.BeanDefinitionRegistry; import org.springframework.beans.factory.support.BeanDefinitionRegistryPostProcessor; import org.springframework.boot.context.annotation.DeterminableImports; import org.springframework.context.ApplicationContext; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.AnnotatedBeanDefinitionReader; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.context.annotation.ImportBeanDefinitionRegistrar; import org.springframework.context.annotation.ImportSelector; import org.springframework.context.support.AbstractApplicationContext; import org.springframework.core.Ordered; import org.springframework.core.annotation.AnnotationUtils; import org.springframework.core.annotation.Order; import org.springframework.core.style.ToStringCreator; import org.springframework.core.type.AnnotationMetadata; import org.springframework.test.context.ContextCustomizer; import org.springframework.test.context.MergedContextConfiguration; import org.springframework.util.ReflectionUtils; /** * {@link ContextCustomizer} to allow {@code @Import} annotations to be used directly on * test classes. * * @author Phillip Webb * @author Andy Wilkinson * @see ImportsContextCustomizerFactory */ class ImportsContextCustomizer implements ContextCustomizer { static final String TEST_CLASS_ATTRIBUTE = "testClass"; private final Class<?> testClass; private final ContextCustomizerKey key; ImportsContextCustomizer(Class<?> testClass) { this.testClass = testClass; this.key = new ContextCustomizerKey(testClass); } @Override public void customizeContext(ConfigurableApplicationContext context, MergedContextConfiguration mergedContextConfiguration) { BeanDefinitionRegistry registry = getBeanDefinitionRegistry(context); AnnotatedBeanDefinitionReader reader = new AnnotatedBeanDefinitionReader(registry); registerCleanupPostProcessor(registry, reader); registerImportsConfiguration(registry, reader); } private void registerCleanupPostProcessor(BeanDefinitionRegistry registry, AnnotatedBeanDefinitionReader reader) { BeanDefinition definition = registerBean(registry, reader, ImportsCleanupPostProcessor.BEAN_NAME, ImportsCleanupPostProcessor.class); definition.getConstructorArgumentValues().addIndexedArgumentValue(0, this.testClass); } private void registerImportsConfiguration(BeanDefinitionRegistry registry, AnnotatedBeanDefinitionReader reader) { BeanDefinition definition = registerBean(registry, reader, ImportsConfiguration.BEAN_NAME, ImportsConfiguration.class); definition.setAttribute(TEST_CLASS_ATTRIBUTE, this.testClass); } private BeanDefinitionRegistry getBeanDefinitionRegistry(ApplicationContext context) { if (context instanceof BeanDefinitionRegistry) { return (BeanDefinitionRegistry) context; } if (context instanceof AbstractApplicationContext) { return (BeanDefinitionRegistry) ((AbstractApplicationContext) context).getBeanFactory(); } throw new IllegalStateException("Could not locate BeanDefinitionRegistry"); } private BeanDefinition registerBean(BeanDefinitionRegistry registry, AnnotatedBeanDefinitionReader reader, String beanName, Class<?> type) { reader.registerBean(type, beanName); return registry.getBeanDefinition(beanName); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj == null || obj.getClass() != getClass()) { return false; } // ImportSelectors are flexible so the only safe cache key is the test class ImportsContextCustomizer other = (ImportsContextCustomizer) obj; return this.key.equals(other.key); } @Override public int hashCode() { return this.key.hashCode(); } @Override public String toString() { return new ToStringCreator(this).append("key", this.key).toString(); } /** * {@link Configuration @Configuration} registered to trigger the * {@link ImportsSelector}. */ @Configuration(proxyBeanMethods = false) @Import(ImportsSelector.class) static class ImportsConfiguration { static final String BEAN_NAME = ImportsConfiguration.class.getName(); } /** * {@link ImportSelector} that returns the original test class so that direct * {@code @Import} annotations are processed. */ static class ImportsSelector implements ImportSelector, BeanFactoryAware { private static final String[] NO_IMPORTS = {}; private ConfigurableListableBeanFactory beanFactory; @Override public void setBeanFactory(BeanFactory beanFactory) throws BeansException { this.beanFactory = (ConfigurableListableBeanFactory) beanFactory; } @Override public String[] selectImports(AnnotationMetadata importingClassMetadata) { BeanDefinition definition = this.beanFactory.getBeanDefinition(ImportsConfiguration.BEAN_NAME); Object testClass = (definition != null) ? definition.getAttribute(TEST_CLASS_ATTRIBUTE) : null; return (testClass != null) ? new String[] { ((Class<?>) testClass).getName() } : NO_IMPORTS; } } /** * {@link BeanDefinitionRegistryPostProcessor} to cleanup temporary configuration * added to load imports. */ @Order(Ordered.LOWEST_PRECEDENCE) static class ImportsCleanupPostProcessor implements BeanDefinitionRegistryPostProcessor { static final String BEAN_NAME = ImportsCleanupPostProcessor.class.getName(); private final Class<?> testClass; ImportsCleanupPostProcessor(Class<?> testClass) { this.testClass = testClass; } @Override public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException { } @Override public void postProcessBeanDefinitionRegistry(BeanDefinitionRegistry registry) throws BeansException { try { String[] names = registry.getBeanDefinitionNames(); for (String name : names) { BeanDefinition definition = registry.getBeanDefinition(name); if (this.testClass.getName().equals(definition.getBeanClassName())) { registry.removeBeanDefinition(name); } } registry.removeBeanDefinition(ImportsConfiguration.BEAN_NAME); } catch (NoSuchBeanDefinitionException ex) { } } } /** * The key used to ensure correct application context caching. Keys are generated * based on <em>all</em> the annotations used with the test that aren't core Java or * Kotlin annotations. We must use something broader than just {@link Import @Import} * annotations since an {@code @Import} may use an {@link ImportSelector} which could * make decisions based on anything available from {@link AnnotationMetadata}. */ static class ContextCustomizerKey { private static final Class<?>[] NO_IMPORTS = {}; private static final Set<AnnotationFilter> ANNOTATION_FILTERS; static { Set<AnnotationFilter> filters = new HashSet<>(); filters.add(new JavaLangAnnotationFilter()); filters.add(new KotlinAnnotationFilter()); filters.add(new SpockAnnotationFilter()); ANNOTATION_FILTERS = Collections.unmodifiableSet(filters); } private final Set<Object> key; ContextCustomizerKey(Class<?> testClass) { Set<Annotation> annotations = new HashSet<>(); Set<Class<?>> seen = new HashSet<>(); collectClassAnnotations(testClass, annotations, seen); Set<Object> determinedImports = determineImports(annotations, testClass); this.key = Collections.unmodifiableSet((determinedImports != null) ? determinedImports : annotations); } private void collectClassAnnotations(Class<?> classType, Set<Annotation> annotations, Set<Class<?>> seen) { if (seen.add(classType)) { collectElementAnnotations(classType, annotations, seen); for (Class<?> interfaceType : classType.getInterfaces()) { collectClassAnnotations(interfaceType, annotations, seen); } if (classType.getSuperclass() != null) { collectClassAnnotations(classType.getSuperclass(), annotations, seen); } } } private void collectElementAnnotations(AnnotatedElement element, Set<Annotation> annotations, Set<Class<?>> seen) { for (Annotation annotation : element.getDeclaredAnnotations()) { if (!isIgnoredAnnotation(annotation)) { annotations.add(annotation); collectClassAnnotations(annotation.annotationType(), annotations, seen); } } } private boolean isIgnoredAnnotation(Annotation annotation) { for (AnnotationFilter annotationFilter : ANNOTATION_FILTERS) { if (annotationFilter.isIgnored(annotation)) { return true; } } return false; } private Set<Object> determineImports(Set<Annotation> annotations, Class<?> testClass) { Set<Object> determinedImports = new LinkedHashSet<>(); AnnotationMetadata testClassMetadata = AnnotationMetadata.introspect(testClass); for (Annotation annotation : annotations) { for (Class<?> source : getImports(annotation)) { Set<Object> determinedSourceImports = determineImports(source, testClassMetadata); if (determinedSourceImports == null) { return null; } determinedImports.addAll(determinedSourceImports); } } return determinedImports; } private Class<?>[] getImports(Annotation annotation) { if (annotation instanceof Import) { return ((Import) annotation).value(); } return NO_IMPORTS; } private Set<Object> determineImports(Class<?> source, AnnotationMetadata metadata) { if (DeterminableImports.class.isAssignableFrom(source)) { // We can determine the imports return ((DeterminableImports) instantiate(source)).determineImports(metadata); } if (ImportSelector.class.isAssignableFrom(source) || ImportBeanDefinitionRegistrar.class.isAssignableFrom(source)) { // Standard ImportSelector and ImportBeanDefinitionRegistrar could // use anything to determine the imports so we can't be sure return null; } // The source itself is the import return Collections.singleton(source.getName()); } @SuppressWarnings("unchecked") private <T> T instantiate(Class<T> source) { try { Constructor<?> constructor = source.getDeclaredConstructor(); ReflectionUtils.makeAccessible(constructor); return (T) constructor.newInstance(); } catch (Throwable ex) { throw new IllegalStateException("Unable to instantiate DeterminableImportSelector " + source.getName(), ex); } } @Override public boolean equals(Object obj) { return (obj != null && getClass() == obj.getClass() && this.key.equals(((ContextCustomizerKey) obj).key)); } @Override public int hashCode() { return this.key.hashCode(); } @Override public String toString() { return this.key.toString(); } } /** * Filter used to limit considered annotations. */ private interface AnnotationFilter { boolean isIgnored(Annotation annotation); } /** * {@link AnnotationFilter} for {@literal java.lang} annotations. */ private static final class JavaLangAnnotationFilter implements AnnotationFilter { @Override public boolean isIgnored(Annotation annotation) { return AnnotationUtils.isInJavaLangAnnotationPackage(annotation); } } /** * {@link AnnotationFilter} for Kotlin annotations. */ private static final class KotlinAnnotationFilter implements AnnotationFilter { @Override public boolean isIgnored(Annotation annotation) { return "kotlin.Metadata".equals(annotation.annotationType().getName()) || isInKotlinAnnotationPackage(annotation); } private boolean isInKotlinAnnotationPackage(Annotation annotation) { return annotation.annotationType().getName().startsWith("kotlin.annotation."); } } /** * {@link AnnotationFilter} for Spock annotations. */ private static final class SpockAnnotationFilter implements AnnotationFilter { @Override public boolean isIgnored(Annotation annotation) { return annotation.annotationType().getName().startsWith("org.spockframework.") || annotation.annotationType().getName().startsWith("spock."); } } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.wso2.andes.management.ui.views; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map.Entry; import javax.management.openmbean.CompositeData; import javax.management.openmbean.TabularDataSupport; import static org.wso2.andes.management.ui.Constants.*; import org.wso2.andes.management.ui.ApplicationRegistry; import org.wso2.andes.management.ui.ManagedBean; import org.wso2.andes.management.ui.jmx.MBeanUtility; import org.wso2.andes.management.ui.model.OperationData; import org.wso2.andes.management.ui.model.ParameterData; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.ScrolledComposite; import org.eclipse.swt.events.KeyAdapter; import org.eclipse.swt.events.KeyEvent; import org.eclipse.swt.events.KeyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.SelectionListener; import org.eclipse.swt.events.VerifyEvent; import org.eclipse.swt.events.VerifyListener; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Combo; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.TabFolder; import org.eclipse.swt.widgets.Text; import org.eclipse.ui.forms.widgets.Form; import org.eclipse.ui.forms.widgets.FormToolkit; /** * Control class for the MBean operations tab. It creates the required widgets * for the selected MBean. */ public class OperationTabControl extends TabControl { private static final int heightForAParameter = 30; private static final int labelWidth = 30; private static final int valueWidth = labelWidth + 25; private FormToolkit _toolkit; private Form _form; private OperationData _opData; private SelectionListener operationExecutionListener = new OperationExecutionListener(); private SelectionListener refreshListener = new RefreshListener(); private SelectionListener parameterSelectionListener = new ParameterSelectionListener(); private SelectionListener booleanSelectionListener = new BooleanSelectionListener(); private VerifyListener verifyListener = new VerifyListenerImpl(); private KeyListener keyListener = new KeyListenerImpl(); private KeyListener headerBindingListener = new HeaderBindingKeyListener(); private Composite _headerComposite = null; private Composite _paramsComposite = null; private Composite _resultsComposite = null; private Button _executionButton = null; // for customized method in header exchange private HashMap<Text, Text> headerBindingHashMap = null; private String _virtualHostName = null; public OperationTabControl(TabFolder tabFolder, OperationData opData) { super(tabFolder); _toolkit = new FormToolkit(_tabFolder.getDisplay()); _form = _toolkit.createForm(_tabFolder); _form.getBody().setLayout(new GridLayout()); _opData = opData; createComposites(); setHeader(); } /** * Form area is devided in four parts: * Header composite - displays operaiton information * Patameters composite - displays parameters if there * Button - operation execution button * Results composite - displays results for operations, which have * no parameters but have some return value */ private void createComposites() { // _headerComposite = _toolkit.createComposite(_form.getBody(), SWT.NONE); _headerComposite.setLayoutData(new GridData(SWT.FILL, SWT.TOP, true, false)); List<ParameterData> params = _opData.getParameters(); if (params != null && !params.isEmpty()) { _paramsComposite = _toolkit.createComposite(_form.getBody(), SWT.NONE); _paramsComposite.setLayoutData(new GridData(SWT.FILL, SWT.TOP, true, false)); } _executionButton = _toolkit.createButton(_form.getBody(), BUTTON_EXECUTE, SWT.PUSH | SWT.CENTER); _executionButton.setFont(ApplicationRegistry.getFont(FONT_BUTTON)); GridData layoutData = new GridData(SWT.CENTER, SWT.TOP, true, false); layoutData.verticalIndent = 20; _executionButton.setLayoutData(layoutData); _resultsComposite = _toolkit.createComposite(_form.getBody(), SWT.NONE); layoutData = new GridData(SWT.FILL, SWT.FILL, true, true); layoutData.verticalIndent = 20; _resultsComposite.setLayoutData(layoutData); _resultsComposite.setLayout(new GridLayout()); } /** * @see TabControl#getControl() */ public Control getControl() { return _form; } @Override public void refresh(ManagedBean mbean) { _mbean = mbean; _virtualHostName = _mbean.getVirtualHostName(); // Setting the form to be invisible. Just in case the mbean server connection // is done and it takes time in getting the response, then the ui should be blank // instead of having half the widgets displayed. _form.setVisible(false); ViewUtility.disposeChildren(_paramsComposite); createParameterWidgets(); // Set button text and add appropriate listener to button. // If there are no parameters and it is info operation, then operation gets executed // and result is displayed List<ParameterData> params = _opData.getParameters(); if (params != null && !params.isEmpty()) { setButton(BUTTON_EXECUTE); } else if (_opData.getImpact() == OPERATION_IMPACT_ACTION) { setButton(BUTTON_EXECUTE); } else if (_opData.getImpact() == OPERATION_IMPACT_INFO) { setButton(BUTTON_REFRESH); executeAndShowResults(); } _form.setVisible(true); layout(); } public void layout() { _form.layout(true); _form.getBody().layout(true, true); } /** * populates the header composite, containing the operation name and description. */ private void setHeader() { _form.setText(ViewUtility.getDisplayText(_opData.getName())); _headerComposite.setLayout(new GridLayout(2, false)); //operation description Label label = _toolkit.createLabel(_headerComposite, DESCRIPTION + " : "); label.setFont(ApplicationRegistry.getFont(FONT_BOLD)); label.setLayoutData(new GridData(SWT.LEAD, SWT.TOP, false, false)); label = _toolkit.createLabel(_headerComposite, _opData.getDescription()); label.setFont(ApplicationRegistry.getFont(FONT_NORMAL)); label.setLayoutData(new GridData(SWT.LEAD, SWT.TOP, true, false)); _headerComposite.layout(); } /** * Creates the widgets for operation parameters if there are any */ private void createParameterWidgets() { List<ParameterData> params = _opData.getParameters(); if (params == null || params.isEmpty()) { return; } // Customised parameter widgets if (_mbean.isExchange() && HEADERS_EXCHANGE.equals(_mbean.getProperty(EXCHANGE_TYPE)) && _opData.getName().equalsIgnoreCase(OPERATION_CREATE_BINDING)) { customCreateNewBinding(); return; } // end of Customised parameter widgets _paramsComposite.setLayoutData(new GridData(SWT.FILL, SWT.TOP, true, false)); _paramsComposite.setLayout(new FormLayout()); int parameterPositionOffset = 0; for (ParameterData param : params) { boolean valueInCombo = false; Label label = _toolkit.createLabel(_paramsComposite, ViewUtility.getDisplayText(param.getName())); FormData formData = new FormData(); if (params.indexOf(param) == 0) { parameterPositionOffset = 0; } else { parameterPositionOffset += heightForAParameter; } formData.top = new FormAttachment(0, parameterPositionOffset + 2); formData.right = new FormAttachment(labelWidth); label.setLayoutData(formData); label.setToolTipText(param.getDescription()); formData = new FormData(); formData.top = new FormAttachment(0, parameterPositionOffset); formData.left = new FormAttachment(label, 5); formData.right = new FormAttachment(valueWidth); // this will contain the list of items, if the list is to be made available to choose from // e.g. the list of exchanges String[] items = null; if (param.getName().equals(QUEUE)) { List<String> qList = ApplicationRegistry.getServerRegistry(_mbean).getQueueNames(_virtualHostName); // Customization for AMQQueueMBean method OPERATION_MOVE_MESSAGES if (_opData.getName().equals(OPERATION_MOVE_MESSAGES)) { qList.remove(_mbean.getName()); } // End of Customization items = qList.toArray(new String[0]); } else if (param.getName().equals(EXCHANGE)) { items = ApplicationRegistry.getServerRegistry(_mbean).getExchangeNames(_virtualHostName); } else if (param.getName().equals(EXCHANGE_TYPE)) { items = DEFAULT_EXCHANGE_TYPE_VALUES.toArray(new String[DEFAULT_EXCHANGE_TYPE_VALUES.size()]); } else if (isUserListParameter(param)) { List<String> list = ApplicationRegistry.getServerRegistry(_mbean).getUsernames(); if (list != null && !list.isEmpty()) { items = list.toArray(new String[0]); } } if (items != null) { org.eclipse.swt.widgets.List _list = new org.eclipse.swt.widgets.List(_paramsComposite, SWT.BORDER | SWT.V_SCROLL); int listSize = _form.getClientArea().height * 2 / 3; int itemsHeight = items.length * (_list.getItemHeight() + 2); // Set a min height for the list widget (set it to min 4 items) if (items.length < 4) { itemsHeight = 4 * (_list.getItemHeight() + 2); } listSize = (listSize > itemsHeight) ? itemsHeight : listSize; parameterPositionOffset = parameterPositionOffset + listSize; formData.bottom = new FormAttachment(0, parameterPositionOffset); _list.setLayoutData(formData); _list.setData(param); _list.setItems(items); _list.addSelectionListener(parameterSelectionListener); valueInCombo = true; } else if (param.isBoolean()) { Button booleanButton = _toolkit.createButton(_paramsComposite, "", SWT.CHECK); booleanButton.setLayoutData(formData); booleanButton.setData(param); booleanButton.addSelectionListener(booleanSelectionListener); valueInCombo = true; } else { int style = SWT.NONE; if (PASSWORD.equalsIgnoreCase(param.getName())) { style = SWT.PASSWORD; } Text text = _toolkit.createText(_paramsComposite, "", style); formData = new FormData(); formData.top = new FormAttachment(0, parameterPositionOffset); formData.left = new FormAttachment(label, 5); formData.right = new FormAttachment(valueWidth); text.setLayoutData(formData); // Listener to assign value to the parameter text.addKeyListener(keyListener); // Listener to verify if the entered key is valid text.addVerifyListener(verifyListener); text.setData(param); } // display the parameter data type next to the text field if (valueInCombo) { label = _toolkit.createLabel(_paramsComposite, ""); } else if (PASSWORD.equalsIgnoreCase(param.getName())) { label = _toolkit.createLabel(_paramsComposite, "(String)"); } else { String str = param.getType(); if (param.getType().lastIndexOf(".") != -1) str = param.getType().substring(1 + param.getType().lastIndexOf(".")); label = _toolkit.createLabel(_paramsComposite, "(" + str + ")"); } formData = new FormData(); formData.top = new FormAttachment(0, parameterPositionOffset); formData.left = new FormAttachment(valueWidth, 5); label.setLayoutData(formData); } } private boolean isUserListParameter(ParameterData param) { if (_mbean.isAdmin() && param.getName().equals(OPERATION_PARAM_USERNAME) && !_opData.getName().equals(OPERATION_CREATEUSER)) { return true; } return false; } /** * Creates customized dispaly for a method "CreateNewBinding" for Headers exchange * */ private void customCreateNewBinding() { headerBindingHashMap = new HashMap<Text, Text>(); _paramsComposite.setLayout(new GridLayout()); _paramsComposite.setLayoutData(new GridData(SWT.FILL, SWT.TOP, true, true)); final ScrolledComposite scrolledComposite = new ScrolledComposite(_paramsComposite, SWT.BORDER | SWT.V_SCROLL); scrolledComposite.setExpandHorizontal(true); scrolledComposite.setExpandVertical(true); GridData layoutData = new GridData(SWT.FILL, SWT.TOP, true, true); scrolledComposite.setLayoutData(layoutData); scrolledComposite.setLayout(new GridLayout()); final Composite composite = _toolkit.createComposite(scrolledComposite, SWT.NONE); scrolledComposite.setContent(composite); layoutData = new GridData(SWT.FILL, SWT.FILL, true, true); layoutData.verticalIndent = 20; composite.setLayoutData(layoutData); composite.setLayout(new FormLayout()); List<ParameterData> params = _opData.getParameters(); ParameterData param = params.get(0); // Queue selection widget Label label = _toolkit.createLabel(composite, ViewUtility.getDisplayText(param.getName())); FormData formData = new FormData(); formData.top = new FormAttachment(0, 2); formData.right = new FormAttachment(labelWidth); label.setLayoutData(formData); label.setToolTipText(param.getDescription()); formData = new FormData(); formData.top = new FormAttachment(0); formData.left = new FormAttachment(label, 5); formData.right = new FormAttachment(valueWidth); Combo combo = new Combo(composite, SWT.READ_ONLY | SWT.DROP_DOWN); List<String> qList = ApplicationRegistry.getServerRegistry(_mbean).getQueueNames(_virtualHostName); combo.setItems(qList.toArray(new String[0])); combo.add("Select Queue", 0); combo.select(0); combo.setLayoutData(formData); combo.setData(param); combo.addSelectionListener(parameterSelectionListener); // Binding creation widgets createARowForCreatingHeadersBinding(composite, 1); createARowForCreatingHeadersBinding(composite, 2); createARowForCreatingHeadersBinding(composite, 3); createARowForCreatingHeadersBinding(composite, 4); createARowForCreatingHeadersBinding(composite, 5); createARowForCreatingHeadersBinding(composite, 6); createARowForCreatingHeadersBinding(composite, 7); createARowForCreatingHeadersBinding(composite, 8); final Button addMoreButton = _toolkit.createButton(composite, "Add More", SWT.PUSH); formData = new FormData(); formData.top = new FormAttachment(0, heightForAParameter); formData.left = new FormAttachment(70, 5); addMoreButton.setLayoutData(formData); addMoreButton.setData("rowCount", 8); addMoreButton.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { int count = Integer.parseInt(addMoreButton.getData("rowCount").toString()); createARowForCreatingHeadersBinding(composite, ++count); addMoreButton.setData("rowCount", count); scrolledComposite.setMinSize(composite.computeSize(SWT.DEFAULT, SWT.DEFAULT)); composite.layout(); _form.layout(); } }); scrolledComposite.setMinSize(composite.computeSize(SWT.DEFAULT, SWT.DEFAULT)); composite.layout(); } /** * Adds a row for adding a binding for Headers Exchange. Used by the method, which creates the customized * layout and widgest for Header's exchange method createNewBinding. * @param parent composite * @param rowCount - row number */ private void createARowForCreatingHeadersBinding(Composite parent, int rowCount) { Label key = _toolkit.createLabel(parent, "Name"); FormData formData = new FormData(); formData.top = new FormAttachment(0, rowCount * heightForAParameter + 2); formData.right = new FormAttachment(15); key.setLayoutData(formData); Text keyText = _toolkit.createText(parent, "", SWT.NONE); formData = new FormData(); formData.top = new FormAttachment(0, rowCount * heightForAParameter); formData.left = new FormAttachment(key, 5); formData.right = new FormAttachment(40); keyText.setLayoutData(formData); keyText.addKeyListener(headerBindingListener); Label value = _toolkit.createLabel(parent, "Value"); formData = new FormData(); formData.top = new FormAttachment(0, rowCount * heightForAParameter + 2); formData.right = new FormAttachment(45); value.setLayoutData(formData); Text valueText = _toolkit.createText(parent, "", SWT.NONE); formData = new FormData(); formData.top = new FormAttachment(0, rowCount * heightForAParameter); formData.left = new FormAttachment(value, 5); formData.right = new FormAttachment(70); valueText.setLayoutData(formData); valueText.addKeyListener(headerBindingListener); // Add these to the map, to retrieve the values while setting the parameter value headerBindingHashMap.put(keyText, valueText); } /** * Sets text and listener for the operation execution button * @param text */ private void setButton(String text) { _executionButton.setText(text); _executionButton.removeSelectionListener(refreshListener); _executionButton.removeSelectionListener(operationExecutionListener); if (BUTTON_EXECUTE.equals(text)) { _executionButton.addSelectionListener(operationExecutionListener); } else { _executionButton.addSelectionListener(refreshListener); } } /** * displays the operation result in a pop-up window * @param result */ private void populateResults(Object result) { Display display = Display.getCurrent(); int width = 610; int height = 400; Shell shell = ViewUtility.createPopupShell(RESULT, width, height); shell.setImage(ApplicationRegistry.getImage(CONSOLE_IMAGE)); ViewUtility.populateCompositeWithData(_toolkit, shell, result); shell.open(); while (!shell.isDisposed()) { if (!display.readAndDispatch()) { display.sleep(); } } shell.dispose(); } /** * Clears the parameter values of the operation */ private void clearParameters() { List<ParameterData> params = _opData.getParameters(); if (params != null && !params.isEmpty()) { for (ParameterData param : params) { param.setDefaultValue(); } } } /** * Clears the values entered by the user from parameter value widgets * @param control */ private void clearParameterValues(Composite control) { if (control == null || (control.isDisposed())) return; Control[] controls = control.getChildren(); if (controls == null || controls.length == 0) return; for (int i = 0; i < controls.length; i++) { if (controls[i] instanceof Combo) ((Combo)controls[i]).select(0); if (controls[i] instanceof org.eclipse.swt.widgets.List) ((org.eclipse.swt.widgets.List)controls[i]).deselectAll(); else if (controls[i] instanceof Text) ((Text)controls[i]).setText(""); else if (controls[i] instanceof Button) ((Button)controls[i]).setSelection(false); else if (controls[i] instanceof Composite) clearParameterValues((Composite)controls[i]); } } /** * Listener class for operation execution events */ private class OperationExecutionListener extends SelectionAdapter { public void widgetSelected(SelectionEvent e) { List<ParameterData> params = _opData.getParameters(); if (params != null && !params.isEmpty()) { for (ParameterData param : params) { if (param.getValue() == null || param.getValue().toString().length() == 0) { // Customized check, because for this parameter null is allowed if (param.getName().equals(ATTRIBUTE_QUEUE_OWNER) && _opData.getName().equals(OPERATION_CREATE_QUEUE)) { continue; } // End of custom code ViewUtility.popupInfoMessage(_form.getText(), "Please select the " + ViewUtility.getDisplayText(param.getName())); return; } //Custom handling for the PASSWORD field if (param.getName().equalsIgnoreCase(PASSWORD)) { //Convert the String value to a character array if that is what is required. if (param.getType().equals("[C")) { // Retreive the mBean type and version. // If we have a version 1 UserManagement class mbean then it expects the password // to be sent as the hashed version. if (_mbean.getType().equals("UserManagement") && _mbean.getVersion() == 1) { try { param.setValue(ViewUtility.getHash((String) param.getValue())); } catch (Exception hashException) { ViewUtility.popupErrorMessage(_form.getText(), "Unable to calculate hash for Password:" + hashException.getMessage()); return; } } else { param.setValue(((String) param.getValue()).toCharArray()); } } } // end of customization } } if (_opData.getImpact() == OPERATION_IMPACT_ACTION) { String bean = _mbean.getName() == null ? _mbean.getType() : _mbean.getName(); int response = ViewUtility.popupConfirmationMessage(bean, "Do you want to " + _form.getText()+ " ?"); if (response == SWT.YES) { executeAndShowResults(); } } else { executeAndShowResults(); } if (_mbean.isAdmin() && _opData.getName().equals(OPERATION_DELETEUSER)) { refresh(_mbean); } else { clearParameters(); clearParameterValues(_paramsComposite); } } } // Listener for the "Refresh" execution button private class RefreshListener extends SelectionAdapter { public void widgetSelected(SelectionEvent e) { executeAndShowResults(); } } /** * Executres the operation, gets the result from server and displays to the user */ private void executeAndShowResults() { Object result = null; try { result = MBeanUtility.execute(_mbean, _opData); } catch(Exception ex) { MBeanUtility.handleException(_mbean, ex); return; } // Custom code for Admin mbean operation /* These custome codes here are to make the GUI look more user friendly. * Here we are adding the users to a list, which will be used to list username to be selected on * pages like "delete user", "set password" instead of typing the username */ if (_mbean.isAdmin()) { if (_opData.getName().equals(OPERATION_VIEWUSERS)) { ApplicationRegistry.getServerRegistry(_mbean).setUserList(extractUserList(result)); } else if (_opData.getName().equals(OPERATION_DELETEUSER)) { List<String> list = ApplicationRegistry.getServerRegistry(_mbean).getUsernames(); Object userName = _opData.getParameterValue(OPERATION_PARAM_USERNAME); if ((list != null) && !list.isEmpty() && (userName != null)) { list.remove(userName); ApplicationRegistry.getServerRegistry(_mbean).setUserList(list); } } else if (_opData.getName().equals(OPERATION_CREATEUSER)) { List<String> list = ApplicationRegistry.getServerRegistry(_mbean).getUsernames(); Object userName = _opData.getParameterValue(OPERATION_PARAM_USERNAME); if ((list != null) && !list.isEmpty() && (userName != null)) { list.add(userName.toString()); ApplicationRegistry.getServerRegistry(_mbean).setUserList(list); } } } // end of custom code // Some mbeans have only "type" and no "name". String title = _mbean.getType(); if (_mbean.getName() != null && _mbean.getName().length() != 0) { title = _mbean.getName(); } if (_opData.isReturnTypeVoid()) { ViewUtility.popupInfoMessage(title, OPERATION_SUCCESSFUL); } else if (_opData.isReturnTypeBoolean()) { boolean success = Boolean.parseBoolean(result.toString()); String message = success ? OPERATION_SUCCESSFUL : OPERATION_UNSUCCESSFUL; if(success) { ViewUtility.popupInfoMessage(title, message); } else { ViewUtility.popupErrorMessage(title, message); } } else if (_opData.getParameters() != null && !_opData.getParameters().isEmpty()) { populateResults(result); } else { ViewUtility.disposeChildren(_resultsComposite); ViewUtility.populateCompositeWithData(_toolkit, _resultsComposite, result); _resultsComposite.layout(); _form.layout(); } } private List<String> extractUserList(Object result) { if (!(result instanceof TabularDataSupport)) { return null; } TabularDataSupport tabularData = (TabularDataSupport)result; Collection<Object> records = tabularData.values(); List<String> list = new ArrayList<String>(); for (Object o : records) { CompositeData data = (CompositeData) o; if (data.containsKey(USERNAME)) { list.add(data.get(USERNAME).toString()); } } return list; } /** * Listener class for the operation parameters widget */ private static class ParameterSelectionListener extends SelectionAdapter { public void widgetSelected(SelectionEvent e) { ParameterData parameter = (ParameterData)e.widget.getData(); parameter.setValue(null); if (e.widget instanceof Combo) { Combo combo = (Combo)e.widget; if (combo.getSelectionIndex() > 0) { String item = combo.getItem(combo.getSelectionIndex()); parameter.setValueFromString(item); } } else if (e.widget instanceof org.eclipse.swt.widgets.List) { org.eclipse.swt.widgets.List list = (org.eclipse.swt.widgets.List)e.widget; String[] selectedItems = list.getSelection(); if (selectedItems.length > 0) { parameter.setValueFromString(selectedItems[0]); } } } } /** * Listener class for boolean parameter widgets */ private static class BooleanSelectionListener extends SelectionAdapter { public void widgetSelected(SelectionEvent e) { ParameterData parameter = (ParameterData)(e.widget.getData()); if (e.widget instanceof Button) { Button button = (Button)e.widget; parameter.setValue(button.getSelection()); } else if (e.widget instanceof Combo) { Combo combo = (Combo)e.widget; String item = combo.getItem(combo.getSelectionIndex()); parameter.setValueFromString(item); } } } /** * Listener class for the operation parameter value widget (Text field) */ private static class KeyListenerImpl extends KeyAdapter { public void keyReleased(KeyEvent e) { if (!(e.widget instanceof Text)) return; Text text = (Text)e.widget; // Get the parameters widget and assign the text to the parameter String strValue = text.getText(); ParameterData parameter = (ParameterData)text.getData(); try { parameter.setValueFromString(strValue); } catch(Exception ex) { // Exception occured in setting parameter value. // ignore it. The value will not be assigned to the parameter } } } /** * Listener class for HeaderExchange's new binding widgets. Used when the new bindings are * being created for Header's Exchange */ private class HeaderBindingKeyListener extends KeyAdapter { public void keyReleased(KeyEvent e) { ParameterData param = _opData.getParameters().get(1); StringBuffer paramValue = new StringBuffer(); for (Entry<Text, Text> entry : headerBindingHashMap.entrySet()) { Text nameText = entry.getKey(); String name = nameText.getText(); Text valueText = entry.getValue(); String value = valueText.getText(); if ((name != null) && (name.length() != 0) && (value != null) && (value.length() != 0)) { if (paramValue.length() != 0) { paramValue.append(","); } paramValue.append(name + "=" + value); } } param.setValue(paramValue.toString()); } } /** * Listener class for verifying the user input with parameter type */ private static class VerifyListenerImpl implements VerifyListener { public void verifyText(VerifyEvent event) { ParameterData parameter = (ParameterData)event.widget.getData(); String text = event.text; char [] chars = new char [text.length ()]; text.getChars(0, chars.length, chars, 0); String type = parameter.getType(); if (type.equals("int") || type.equals("java.lang.Integer") || type.equals("long") || type.equals("java.lang.Long")) { for (int i=0; i<chars.length; i++) { if (!('0' <= chars [i] && chars [i] <= '9')) { event.doit = false; return; } } } } } }
package org.cfr.restlet.ext.shindig.http; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; import java.net.SocketException; import java.net.SocketTimeoutException; import java.net.UnknownHostException; import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import java.util.zip.GZIPInputStream; import java.util.zip.Inflater; import java.util.zip.InflaterInputStream; import javax.servlet.http.HttpServletResponse; import org.apache.commons.lang.StringUtils; import org.apache.http.HttpEntity; import org.apache.http.NoHttpResponseException; import org.apache.http.conn.ConnectionPoolTimeoutException; import org.apache.http.conn.HttpHostConnectException; import org.apache.http.entity.HttpEntityWrapper; import org.apache.http.util.ByteArrayBuffer; import org.apache.shindig.common.uri.Uri; import org.apache.shindig.gadgets.GadgetException; import org.apache.shindig.gadgets.http.HttpFetcher; import org.apache.shindig.gadgets.http.HttpRequest; import org.apache.shindig.gadgets.http.HttpResponse; import org.apache.shindig.gadgets.http.HttpResponseBuilder; import org.restlet.Request; import org.restlet.Response; import org.restlet.data.Form; import org.restlet.data.Method; import org.restlet.data.Parameter; import org.restlet.representation.InputRepresentation; import org.restlet.representation.Representation; import org.restlet.resource.ClientResource; import org.restlet.util.Series; import com.google.common.collect.ImmutableSet; import com.google.inject.Inject; import com.google.inject.Singleton; import com.google.inject.internal.Nullable; import com.google.inject.internal.Preconditions; import com.google.inject.name.Named; /** * A simple HTTP fetcher implementation based on Apache httpclient. Not recommended for production deployments until * the following issues are addressed: * <p/> * 1. This class potentially allows access to resources behind an organization's firewall. * 2. This class does not enforce any limits on what is fetched from remote hosts. * Based on {@link org.apache.shindig.gadgets.http.BasicHttpFetcher} */ @Singleton public class ClientHttpFetcher implements HttpFetcher { private static final int DEFAULT_CONNECT_TIMEOUT_MS = 5000; private static final int DEFAULT_READ_TIMEOUT_MS = 5000; private static final int DEFAULT_MAX_OBJECT_SIZE = 0; // no limit private static final long DEFAULT_SLOW_RESPONSE_WARNING = 10000; // mutable fields must be volatile private volatile int maxObjSize; private volatile long slowResponseWarning; private static final Logger LOG = Logger.getLogger(ClientHttpFetcher.class.getName()); private final Set<Class<?>> TIMEOUT_EXCEPTIONS = ImmutableSet.<Class<?>> of(ConnectionPoolTimeoutException.class, SocketTimeoutException.class, SocketException.class, HttpHostConnectException.class, NoHttpResponseException.class, InterruptedException.class, UnknownHostException.class); /** * Creates a new fetcher using the default maximum object size and timeout -- * no limit and 5 seconds. * @param basicHttpFetcherProxy The http proxy to use. */ @Inject public ClientHttpFetcher(@Nullable @Named("org.apache.shindig.gadgets.http.basicHttpFetcherProxy") String basicHttpFetcherProxy) { this(DEFAULT_MAX_OBJECT_SIZE, DEFAULT_CONNECT_TIMEOUT_MS, DEFAULT_READ_TIMEOUT_MS, basicHttpFetcherProxy); } /** * Creates a new fetcher for fetching HTTP objects. Not really suitable * for production use. Use of an HTTP proxy for security is also necessary * for production deployment. * * @param maxObjSize Maximum size, in bytes, of the object we will fetch, 0 if no limit.. * @param connectionTimeoutMs timeout, in milliseconds, for connecting to hosts. * @param readTimeoutMs timeout, in millseconds, for unresponsive connections * @param basicHttpFetcherProxy The http proxy to use. */ public ClientHttpFetcher(int maxObjSize, int connectionTimeoutMs, int readTimeoutMs, String basicHttpFetcherProxy) { // Create and initialize HTTP parameters setMaxObjectSizeBytes(maxObjSize); setSlowResponseWarning(DEFAULT_SLOW_RESPONSE_WARNING); } static class GzipDecompressingEntity extends HttpEntityWrapper { public GzipDecompressingEntity(final HttpEntity entity) { super(entity); } @Override public InputStream getContent() throws IOException, IllegalStateException { // the wrapped entity's getContent() decides about repeatability InputStream wrappedin = wrappedEntity.getContent(); return new GZIPInputStream(wrappedin); } @Override public long getContentLength() { // length of ungzipped content is not known return -1; } } static class DeflateDecompressingEntity extends HttpEntityWrapper { public DeflateDecompressingEntity(final HttpEntity entity) { super(entity); } @Override public InputStream getContent() throws IOException, IllegalStateException { // the wrapped entity's getContent() decides about repeatability InputStream wrappedin = wrappedEntity.getContent(); return new InflaterInputStream(wrappedin, new Inflater(true)); } @Override public long getContentLength() { // length of ungzipped content is not known return -1; } } public HttpResponse fetch(org.apache.shindig.gadgets.http.HttpRequest request) throws GadgetException { Preconditions.checkNotNull(request); final String methodType = request.getMethod(); final Response response; final long started = System.currentTimeMillis(); // Break the request Uri to its components: Uri uri = request.getUri(); if (StringUtils.isEmpty(uri.getAuthority())) { throw new GadgetException(GadgetException.Code.INVALID_USER_DATA, "Missing domain name for request: " + uri, HttpServletResponse.SC_BAD_REQUEST); } if (StringUtils.isEmpty(uri.getScheme())) { throw new GadgetException(GadgetException.Code.INVALID_USER_DATA, "Missing schema for request: " + uri, HttpServletResponse.SC_BAD_REQUEST); } String[] hostparts = uri.getAuthority().split(":"); int port = -1; // default port if (hostparts.length > 2) { throw new GadgetException(GadgetException.Code.INVALID_USER_DATA, "Bad host name in request: " + uri.getAuthority(), HttpServletResponse.SC_BAD_REQUEST); } if (hostparts.length == 2) { try { port = Integer.parseInt(hostparts[1]); } catch (NumberFormatException e) { throw new GadgetException(GadgetException.Code.INVALID_USER_DATA, "Bad port number in request: " + uri.getAuthority(), HttpServletResponse.SC_BAD_REQUEST); } } try { Method method = null; Representation representation = null; Series<Parameter> header = new Form(); if ("POST".equals(methodType) || "PUT".equals(methodType)) { if (request.getPostBodyLength() > 0) { representation = new InputRepresentation(request.getPostBody()); } method = Method.valueOf(methodType); } else if ("GET".equals(methodType)) { method = Method.GET; } else if ("HEAD".equals(methodType)) { method = Method.HEAD; } else if ("DELETE".equals(methodType)) { method = Method.DELETE; } for (Map.Entry<String, List<String>> entry : request.getHeaders().entrySet()) { header.add(entry.getKey(), StringUtils.join(entry.getValue(), ',')); } Request req = new Request(method, uri.toString()); // TODO to verify req.getAttributes().put("org.restlet.http.headers", header); if (representation != null) req.setEntity(representation); ClientResource clientResource = new ClientResource(req, new Response(req)); // HttpClient doesn't handle all cases when breaking url (specifically '_' in domain) // So lets pass it the url parsed: clientResource.handle(); response = clientResource.getResponse(); if (response == null) { throw new IOException("Unknown problem with request"); } long now = System.currentTimeMillis(); if (now - started > slowResponseWarning) { slowResponseWarning(request, started, now); } return makeResponse(response); } catch (Exception e) { long now = System.currentTimeMillis(); // Find timeout exceptions, respond accordingly if (TIMEOUT_EXCEPTIONS.contains(e.getClass())) { LOG.info("Timeout for " + request.getUri() + " Exception: " + e.getClass().getName() + " - " + e.getMessage() + " - " + (now - started) + "ms"); return HttpResponse.timeout(); } LOG.log(Level.INFO, "Got Exception fetching " + request.getUri() + " - " + (now - started) + "ms", e); // Separate shindig error from external error throw new GadgetException(GadgetException.Code.INTERNAL_SERVER_ERROR, e, HttpServletResponse.SC_INTERNAL_SERVER_ERROR); } } /** * Called when a request takes too long. Consider subclassing this if you want to do something other than logging * a warning . * * @param request the request that generated the slowrequest * @param started the time the request started, in milliseconds. * @param finished the time the request finished, in milliseconds. */ protected void slowResponseWarning(HttpRequest request, long started, long finished) { LOG.warning("Slow response from " + request.getUri() + ' ' + (finished - started) + "ms"); } /** * Change the global maximum fetch size (in bytes) for all fetches. * * @param maxObjectSizeBytes value for maximum number of bytes, or 0 for no limit */ @Inject(optional = true) public void setMaxObjectSizeBytes(@Named("shindig.http.client.max-object-size-bytes") int maxObjectSizeBytes) { this.maxObjSize = maxObjectSizeBytes; } /** * Change the global threshold for warning about slow responses * * @param slowResponseWarning time in milliseconds after we issue a warning */ @Inject(optional = true) public void setSlowResponseWarning(@Named("shindig.http.client.slow-response-warning") long slowResponseWarning) { this.slowResponseWarning = slowResponseWarning; } /** * Change the global connection timeout for all new fetchs. * * @param connectionTimeoutMs new connection timeout in milliseconds */ @Inject(optional = true) public void setConnectionTimeoutMs(@Named("shindig.http.client.connection-timeout-ms") int connectionTimeoutMs) { } /** * Change the global read timeout for all new fetchs. * * @param connectionTimeoutMs new connection timeout in milliseconds */ @Inject(optional = true) public void setReadTimeoutMs(@Named("shindig.http.client.read-timeout-ms") int connectionTimeoutMs) { } /** * @param response The response to parse * @return A HttpResponse object made by consuming the response of the * given HttpMethod. * @throws IOException when problems occur processing the body content */ @SuppressWarnings("unchecked") private HttpResponse makeResponse(Response response) throws IOException { HttpResponseBuilder builder = new HttpResponseBuilder(); Series<Parameter> headers = (Series<Parameter>) response.getAttributes().get("org.restlet.http.headers"); if (headers != null) { for (Parameter param : headers) { if (param.getName() != null) { builder.addHeader(param.getName(), param.getValue()); } } } Representation entity = Preconditions.checkNotNull(response.getEntity()); if (maxObjSize > 0 && entity != null && entity.getSize() > maxObjSize) { return HttpResponse.badrequest("Exceeded maximum number of bytes - " + maxObjSize); } return builder.setHttpStatusCode(response.getStatus().getCode()).setResponse(toByteArraySafe(entity)).create(); } /** * This method is Safe replica version of org.apache.http.util.EntityUtils.toByteArray. * The try block embedding 'instream.read' has a corresponding catch block for 'EOFException' * (that's Ignored) and all other IOExceptions are let pass. * * @param entity * @return byte array containing the entity content. May be empty/null. * @throws IOException if an error occurs reading the input stream */ public byte[] toByteArraySafe(final Representation entity) throws IOException { if (entity == null) { return null; } InputStream instream = entity.getStream(); if (instream == null) { return new byte[] {}; } Preconditions.checkArgument(entity.getSize() < Integer.MAX_VALUE, "HTTP entity too large to be buffered in memory"); // The raw data stream (inside JDK) is read in a buffer of size '512'. The original code // org.apache.http.util.EntityUtils.toByteArray reads the unzipped data in a buffer of // 4096 byte. For any data stream that has a compression ratio lesser than 1/8, this may // result in the buffer/array overflow. Increasing the buffer size to '16384'. It's highly // unlikely to get data compression ratios lesser than 1/32 (3%). final int bufferLength = 16384; int i = (int) entity.getSize(); if (i < 0) { i = bufferLength; } ByteArrayBuffer buffer = new ByteArrayBuffer(i); try { byte[] tmp = new byte[bufferLength]; int l; while ((l = instream.read(tmp)) != -1) { buffer.append(tmp, 0, l); } } catch (EOFException eofe) { /** * Ref: http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4040920 * Due to a bug in JDK ZLIB (InflaterInputStream), unexpected EOF error can occur. * In such cases, even if the input stream is finished reading, the * 'Inflater.finished()' call erroneously returns 'false' and * 'java.util.zip.InflaterInputStream.fill' throws the 'EOFException'. * So for such case, ignore the Exception in case Exception Cause is * 'Unexpected end of ZLIB input stream'. * * Also, ignore this exception in case the exception has no message * body as this is the case where {@link GZIPInputStream#readUByte} * throws EOFException with empty message. A bug has been filed with Sun * and will be mentioned here once it is accepted. */ if (instream.available() == 0 && (eofe.getMessage() == null || eofe.getMessage().equals("Unexpected end of ZLIB input stream"))) { LOG.log(Level.FINE, "EOFException: ", eofe); } else { throw eofe; } } finally { instream.close(); } return buffer.toByteArray(); } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.execution.testDiscovery.actions; import com.intellij.codeInsight.actions.FormatChangedTextUtil; import com.intellij.execution.ExecutionException; import com.intellij.execution.Executor; import com.intellij.execution.JavaTestConfigurationWithDiscoverySupport; import com.intellij.execution.Location; import com.intellij.execution.actions.ConfigurationContext; import com.intellij.execution.actions.RunConfigurationProducer; import com.intellij.execution.executors.DefaultRunExecutor; import com.intellij.execution.runners.ExecutionEnvironmentBuilder; import com.intellij.execution.runners.ExecutionUtil; import com.intellij.execution.testDiscovery.TestDiscoveryConfigurationProducer; import com.intellij.execution.testDiscovery.TestDiscoveryExtension; import com.intellij.execution.testDiscovery.TestDiscoveryProducer; import com.intellij.featureStatistics.FeatureUsageTracker; import com.intellij.find.FindUtil; import com.intellij.find.actions.CompositeActiveComponent; import com.intellij.icons.AllIcons; import com.intellij.ide.DataManager; import com.intellij.lang.Language; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.actionSystem.impl.ActionButton; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.keymap.KeymapUtil; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.ui.popup.JBPopup; import com.intellij.openapi.ui.popup.PopupChooserBuilder; import com.intellij.openapi.util.Couple; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vcs.VcsDataKeys; import com.intellij.openapi.vcs.changes.Change; import com.intellij.openapi.vcs.changes.ContentRevision; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VfsUtilCore; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.ClassUtil; import com.intellij.psi.util.PsiFormatUtil; import com.intellij.psi.util.PsiFormatUtilBase; import com.intellij.rt.coverage.testDiscovery.instrumentation.TestDiscoveryInstrumentationUtils; import com.intellij.uast.UastMetaLanguage; import com.intellij.ui.ActiveComponent; import com.intellij.usages.UsageView; import com.intellij.util.ArrayUtil; import com.intellij.util.ObjectUtils; import com.intellij.util.PsiNavigateUtil; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.JBIterable; import com.intellij.util.ui.EdtInvocationManager; import com.intellij.util.ui.JBDimension; import com.intellij.util.ui.tree.TreeModelAdapter; import com.intellij.vcsUtil.VcsFileUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.uast.UClass; import org.jetbrains.uast.UMethod; import org.jetbrains.uast.UastContextKt; import org.jetbrains.uast.UastUtils; import javax.swing.*; import javax.swing.event.TreeModelEvent; import javax.swing.tree.TreeModel; import java.awt.event.ActionEvent; import java.util.*; import java.util.stream.Collectors; import static com.intellij.openapi.actionSystem.CommonDataKeys.*; import static com.intellij.openapi.util.Pair.pair; public class ShowAffectedTestsAction extends AnAction { private static final String RUN_ALL_ACTION_TEXT = "Run All Affected Tests"; @Override public void update(@NotNull AnActionEvent e) { e.getPresentation().setEnabledAndVisible( isEnabled(e.getProject()) && (findMethodAtCaret(e) != null || findClassAtCaret(e) != null || !findFilesInContext(e).isEmpty() || e.getData(VcsDataKeys.CHANGES) != null) ); } @Override public void actionPerformed(@NotNull AnActionEvent e) { Project project = e.getProject(); assert project != null; PsiMethod method = findMethodAtCaret(e); if (method != null) { showDiscoveredTestsByPsiMethod(project, method, e); return; } PsiClass psiClass = findClassAtCaret(e); if (psiClass != null) { showDiscoveredTestsByPsiClass(project, psiClass, e); return; } if (e.getData(VcsDataKeys.CHANGES) != null) { showDiscoveredTestsByChanges(e); return; } List<VirtualFile> virtualFiles = findFilesInContext(e); if (!virtualFiles.isEmpty()) { showDiscoveredTestsByFile(project, virtualFiles, e); } } private static void showDiscoveredTestsByFile(@NotNull Project project, @NotNull List<VirtualFile> files, @NotNull AnActionEvent e) { VirtualFile projectBasePath = getBasePathAsVirtualFile(project); if (projectBasePath == null) return; DiscoveredTestsTree tree = showTree(project, e.getDataContext(), createTitle(files)); FeatureUsageTracker.getInstance().triggerFeatureUsed("test.discovery"); ApplicationManager.getApplication().executeOnPooledThread(() -> { JBIterable<String> paths = JBIterable .from(files) .flatMap(f -> VfsUtil.collectChildrenRecursively(f)) .map(f -> VcsFileUtil.getRelativeFilePath(f, projectBasePath)) .filter(Objects::nonNull) .map(p -> "/" + p); if (paths.isNotEmpty()) { processMethodsAsync(project, PsiMethod.EMPTY_ARRAY, paths.toList(), createTreeProcessor(tree), () -> tree.setPaintBusy(false)); } }); } @NotNull private static String createTitle(@NotNull List<VirtualFile> files) { if (files.isEmpty()) return "Empty Selection"; String firstName = files.get(0).getName(); if (files.size() == 1) return firstName; if (files.size() == 2) return firstName + " and " + files.get(1).getName(); return firstName + " et al."; } private static void showDiscoveredTestsByPsiClass(@NotNull Project project, @NotNull PsiClass psiClass, @NotNull AnActionEvent e) { if (DumbService.isDumb(project)) return; DataContext dataContext = DataManager.getInstance().getDataContext(e.getRequiredData(EDITOR).getContentComponent()); FeatureUsageTracker.getInstance().triggerFeatureUsed("test.discovery"); String presentableName = PsiFormatUtil.formatClass(psiClass, PsiFormatUtilBase.SHOW_NAME); DiscoveredTestsTree tree = showTree(project, dataContext, presentableName); ApplicationManager.getApplication().executeOnPooledThread(() -> { if (DumbService.isDumb(project)) return; String className = ReadAction.compute(() -> DiscoveredTestsTreeModel.getClassName(psiClass)); if (className == null) return; List<Couple<String>> classesAndMethods = ContainerUtil.newSmartList(Couple.of(className, null)); processTestDiscovery(project, createTreeProcessor(tree), classesAndMethods, Collections.emptyList()); EdtInvocationManager.getInstance().invokeLater(() -> tree.setPaintBusy(false)); }); } private static void showDiscoveredTestsByPsiMethod(@NotNull Project project, @NotNull PsiMethod method, @NotNull AnActionEvent e) { Couple<String> key = getMethodKey(method); if (key == null) return; DataContext dataContext = DataManager.getInstance().getDataContext(e.getRequiredData(EDITOR).getContentComponent()); FeatureUsageTracker.getInstance().triggerFeatureUsed("test.discovery"); String presentableName = PsiFormatUtil.formatMethod(method, PsiSubstitutor.EMPTY, PsiFormatUtilBase.SHOW_CONTAINING_CLASS | PsiFormatUtilBase.SHOW_NAME, 0); DiscoveredTestsTree tree = showTree(project, dataContext, presentableName); processMethodsAsync(project, new PsiMethod[]{method}, Collections.emptyList(), createTreeProcessor(tree), () -> tree.setPaintBusy(false)); } @NotNull private static TestDiscoveryProducer.PsiTestProcessor createTreeProcessor(@NotNull DiscoveredTestsTree tree) { return (clazz, method, parameter) -> { tree.addTest(clazz, method, parameter); return true; }; } private static void showDiscoveredTestsByChanges(@NotNull AnActionEvent e) { Change[] changes = e.getRequiredData(VcsDataKeys.CHANGES); Project project = e.getProject(); assert project != null; showDiscoveredTestsByChanges(project, changes, "Selected Changes", e.getDataContext()); } public static void showDiscoveredTestsByChanges(@NotNull Project project, @NotNull Change[] changes, @NotNull String title, @NotNull DataContext dataContext) { DiscoveredTestsTree tree = showTree(project, dataContext, title); FeatureUsageTracker.getInstance().triggerFeatureUsed("test.discovery.selected.changes"); ApplicationManager.getApplication().executeOnPooledThread(() -> { PsiMethod[] methods = findMethods(project, changes); List<String> filePaths = getRelativeAffectedPaths(project, Arrays.asList(changes)); processMethodsAsync(project, methods, filePaths, createTreeProcessor(tree), () -> tree.setPaintBusy(false)); }); } @NotNull public static PsiMethod[] findMethods(@NotNull Project project, @NotNull Change... changes) { UastMetaLanguage jvmLanguage = Language.findInstance(UastMetaLanguage.class); return PsiDocumentManager.getInstance(project).commitAndRunReadAction( () -> FormatChangedTextUtil.getInstance().getChangedElements(project, changes, file -> { if (DumbService.isDumb(project) || project.isDisposed() || !file.isValid()) return null; ProjectFileIndex index = ProjectFileIndex.getInstance(project); if (!index.isInSource(file)) return null; PsiFile psiFile = PsiManager.getInstance(project).findFile(file); if (psiFile == null || !jvmLanguage.matchesLanguage(psiFile.getLanguage())) return null; Document document = FileDocumentManager.getInstance().getDocument(file); if (document == null) return null; List<PsiElement> physicalMethods = ContainerUtil.newSmartList(); psiFile.accept(new PsiRecursiveElementWalkingVisitor() { @Override public void visitElement(PsiElement element) { UMethod method = UastContextKt.toUElement(element, UMethod.class); if (method != null) { ContainerUtil.addAllNotNull(physicalMethods, method.getSourcePsi()); } super.visitElement(element); } }); return physicalMethods; }).stream() .map(m -> UastContextKt.toUElement(m)) .filter(Objects::nonNull) .map(m -> ObjectUtils.tryCast(m.getJavaPsi(), PsiMethod.class)) .filter(Objects::nonNull) .toArray(PsiMethod.ARRAY_FACTORY::create)); } public static boolean isEnabled(@Nullable Project project) { if (project == null || DumbService.isDumb(project)) return false; return Registry.is(TestDiscoveryExtension.TEST_DISCOVERY_REGISTRY_KEY) || ApplicationManager.getApplication().isInternal(); } @NotNull private static List<VirtualFile> findFilesInContext(@NotNull AnActionEvent event) { VirtualFile[] virtualFiles = event.getData(VIRTUAL_FILE_ARRAY); if (virtualFiles == null || virtualFiles.length == 0) { PsiFile file = event.getData(PSI_FILE); if (file != null) { virtualFiles = new VirtualFile[]{file.getVirtualFile()}; } } return virtualFiles == null ? Collections.emptyList() : Arrays.stream(virtualFiles).filter(v -> v.isInLocalFileSystem()).collect(Collectors.toList()); } @Nullable private static PsiMethod findMethodAtCaret(@NotNull AnActionEvent e) { UMethod uMethod = UastUtils.findContaining(findElementAtCaret(e), UMethod.class); return uMethod == null ? null : ObjectUtils.tryCast(uMethod.getJavaPsi(), PsiMethod.class); } @Nullable private static PsiClass findClassAtCaret(@NotNull AnActionEvent e) { UClass uClass = UastUtils.findContaining(findElementAtCaret(e), UClass.class); return uClass == null ? null : ObjectUtils.tryCast(uClass.getJavaPsi(), PsiClass.class); } @Nullable private static PsiElement findElementAtCaret(@NotNull AnActionEvent e) { Editor editor = e.getData(EDITOR); PsiFile file = e.getData(PSI_FILE); if (editor == null || file == null) return null; int offset = editor.getCaretModel().getOffset(); PsiElement at = file.findElementAt(offset); if (at instanceof PsiWhiteSpace && offset > 0) { PsiElement prev = file.findElementAt(offset - 1); if (!(prev instanceof PsiWhiteSpace)) return prev; } return at; } @NotNull private static DiscoveredTestsTree showTree(@NotNull Project project, @NotNull DataContext dataContext, @NotNull String title) { DiscoveredTestsTree tree = new DiscoveredTestsTree(title); String initTitle = "Tests for " + title; Ref<JBPopup> ref = new Ref<>(); ConfigurationContext context = ConfigurationContext.getFromContext(dataContext); ActiveComponent runButton = createButton(RUN_ALL_ACTION_TEXT, AllIcons.Actions.Execute, () -> runAllDiscoveredTests(project, tree, ref, context, initTitle), tree); Runnable pinActionListener = () -> { UsageView view = FindUtil.showInUsageView(null, tree.getTestMethods(), param -> param, initTitle, p -> { p.setCodeUsages(false); // don't show r/w, imports filtering actions p.setUsagesWord("test"); p.setMergeDupLinesAvailable(false); p.setUsageTypeFilteringAvailable(false); p.setExcludeAvailable(false); }, project); if (view != null) { view.addButtonToLowerPane(new AbstractAction(RUN_ALL_ACTION_TEXT, AllIcons.Actions.Execute) { @Override public void actionPerformed(ActionEvent e) { runAllDiscoveredTests(project, tree, ref, context, initTitle); } }); } JBPopup popup = ref.get(); if (popup != null) { popup.cancel(); } }; KeyStroke findUsageKeyStroke = findUsagesKeyStroke(); String pinTooltip = "Open Find Usages Toolwindow" + (findUsageKeyStroke == null ? "" : " " + KeymapUtil.getKeystrokeText(findUsageKeyStroke)); ActiveComponent pinButton = createButton(pinTooltip, AllIcons.General.Pin_tab, pinActionListener, tree); PopupChooserBuilder builder = new PopupChooserBuilder(tree) .setTitle(initTitle) .setMovable(true) .setResizable(true) .setCommandButton(new CompositeActiveComponent(pinButton)) .setSettingButton(new CompositeActiveComponent(runButton).getComponent()) .setItemChoosenCallback(() -> PsiNavigateUtil.navigate(tree.getSelectedElement())) .registerKeyboardAction(findUsageKeyStroke, __ -> pinActionListener.run()) .setMinSize(new JBDimension(500, 300)) .setDimensionServiceKey(ShowAffectedTestsAction.class.getSimpleName()); JBPopup popup = builder.createPopup(); ref.set(popup); TreeModel model = tree.getModel(); Disposer.register(popup, tree); model.addTreeModelListener(new TreeModelAdapter() { @Override protected void process(@NotNull TreeModelEvent event, @NotNull EventType type) { int testsCount = tree.getTestCount(); int classesCount = tree.getTestClassesCount(); popup.setCaption("Found " + testsCount + " " + StringUtil.pluralize("Test", testsCount) + " in " + classesCount + " " + StringUtil.pluralize("Class", classesCount) + " for " + title); } }); popup.showInBestPositionFor(dataContext); return tree; } public static void processMethodsAsync(@NotNull Project project, @NotNull PsiMethod[] methods, @NotNull List<String> filePaths, @NotNull TestDiscoveryProducer.PsiTestProcessor processor, @Nullable Runnable doWhenDone) { if (DumbService.isDumb(project)) return; ApplicationManager.getApplication().executeOnPooledThread(() -> { processMethods(project, methods, filePaths, processor); if (doWhenDone != null) { EdtInvocationManager.getInstance().invokeLater(doWhenDone); } }); } public static void processMethods(@NotNull Project project, @NotNull PsiMethod[] methods, @NotNull List<String> filePaths, @NotNull TestDiscoveryProducer.PsiTestProcessor processor) { List<Couple<String>> classesAndMethods = ReadAction.compute(() -> Arrays.stream(methods) .map(method -> getMethodKey(method)).filter(Objects::nonNull).collect(Collectors.toList())); processTestDiscovery(project, processor, classesAndMethods, filePaths); } private static void processTestDiscovery(@NotNull Project project, @NotNull TestDiscoveryProducer.PsiTestProcessor processor, @NotNull List<Couple<String>> classesAndMethods, @NotNull List<String> filePaths) { if (DumbService.isDumb(project)) return; GlobalSearchScope scope = GlobalSearchScope.projectScope(project); for (TestDiscoveryConfigurationProducer producer : getRunConfigurationProducers(project)) { byte frameworkId = ((JavaTestConfigurationWithDiscoverySupport)producer.getConfigurationFactory().createTemplateConfiguration(project)) .getTestFrameworkId(); TestDiscoveryProducer.consumeDiscoveredTests(project, classesAndMethods, frameworkId, filePaths, (testClass, testMethod, parameter) -> { PsiClass[] testClassPsi = {null}; PsiMethod[] testMethodPsi = {null}; ReadAction.run(() -> DumbService.getInstance(project).runWithAlternativeResolveEnabled(() -> { testClassPsi[0] = ClassUtil.findPsiClass(PsiManager.getInstance(project), testClass, null, true, scope); boolean checkBases = parameter != null; // check bases for parameterized tests if (testClassPsi[0] != null) { testMethodPsi[0] = ArrayUtil.getFirstElement(testClassPsi[0].findMethodsByName(testMethod, checkBases)); } })); if (testClassPsi[0] != null) { if (!processor.process(testClassPsi[0], testMethodPsi[0], parameter)) return false; } return true; }); } } @NotNull private static ActiveComponent createButton(@NotNull String text, @NotNull Icon icon, @NotNull Runnable listener, @NotNull DiscoveredTestsTree tree) { return new ActiveComponent.Adapter() { @NotNull @Override public JComponent getComponent() { Presentation presentation = new Presentation(); presentation.setText(text); presentation.setDescription(text); presentation.setIcon(icon); presentation.setEnabled(false); tree.getModel().addTreeModelListener(new TreeModelAdapter() { @Override protected void process(@NotNull TreeModelEvent event, @NotNull EventType type) { if (!presentation.isEnabled() && tree.getTestCount() != 0) { presentation.setEnabled(true); } } }); return new ActionButton(new AnAction() { @Override public void actionPerformed(@NotNull AnActionEvent e) { listener.run(); } }, presentation, "ShowDiscoveredTestsToolbar", ActionToolbar.DEFAULT_MINIMUM_BUTTON_SIZE); } }; } private static void runAllDiscoveredTests(@NotNull Project project, @NotNull DiscoveredTestsTree tree, @NotNull Ref<? extends JBPopup> ref, @NotNull ConfigurationContext context, @NotNull String title) { Executor executor = DefaultRunExecutor.getRunExecutorInstance(); Module targetModule = TestDiscoveryConfigurationProducer.detectTargetModule(tree.getContainingModules(), project); //first producer with results will be picked List<Location<PsiMethod>> testMethods = Arrays.stream(tree.getTestMethods()) .map(TestMethodUsage::calculateLocation) .filter(Objects::nonNull) .collect(Collectors.toList()); getRunConfigurationProducers(project).stream() .map(producer -> pair(producer, ContainerUtil.filter(testMethods, producer::isApplicable))) .max(Comparator.comparingInt(p -> p.second.size())) .map(p -> { @SuppressWarnings("unchecked") Location<PsiMethod>[] locations = p.second.toArray(new Location[0]); return p.first.createProfile(locations, targetModule, context, title); }) .ifPresent(profile -> { try { ExecutionEnvironmentBuilder.create(project, executor, profile).buildAndExecute(); } catch (ExecutionException e) { ExecutionUtil.handleExecutionError(project, executor.getToolWindowId(), title, e); } JBPopup popup = ref.get(); if (popup != null) { popup.cancel(); } }); } @Nullable public static Couple<String> getMethodKey(@NotNull PsiMethod method) { if (DumbService.isDumb(method.getProject())) return null; PsiClass c = method.isValid() ? method.getContainingClass() : null; String fqn = c != null ? DiscoveredTestsTreeModel.getClassName(c) : null; return fqn == null ? null : Couple.of(fqn, methodSignature(method)); } @NotNull private static String methodSignature(@NotNull PsiMethod method) { String tail = TestDiscoveryInstrumentationUtils.SEPARATOR + ClassUtil.getAsmMethodSignature(method); return (method.isConstructor() ? "<init>" : method.getName()) + tail; } @Nullable private static KeyStroke findUsagesKeyStroke() { AnAction action = ActionManager.getInstance().getAction(IdeActions.ACTION_FIND_USAGES); ShortcutSet shortcutSet = action == null ? null : action.getShortcutSet(); return shortcutSet == null ? null : KeymapUtil.getKeyStroke(shortcutSet); } @NotNull private static List<TestDiscoveryConfigurationProducer> getRunConfigurationProducers(@NotNull Project project) { return RunConfigurationProducer.getProducers(project) .stream() .filter(producer -> producer instanceof TestDiscoveryConfigurationProducer) .map(producer -> (TestDiscoveryConfigurationProducer)producer) .collect(Collectors.toList()); } @NotNull public static List<String> getRelativeAffectedPaths(@NotNull Project project, @NotNull Collection<? extends Change> changes) { VirtualFile baseDir = getBasePathAsVirtualFile(project); return baseDir == null ? Collections.emptyList() : changes.stream() .map(change -> relativePath(baseDir, change)) .filter(Objects::nonNull) .map(s -> "/" + s) .collect(Collectors.toList()); } @Nullable static VirtualFile getBasePathAsVirtualFile(@NotNull Project project) { String basePath = project.getBasePath(); return basePath == null ? null : LocalFileSystem.getInstance().findFileByPath(basePath); } @Nullable private static String relativePath(@NotNull VirtualFile baseDir, @NotNull Change change) { VirtualFile file = change.getVirtualFile(); if (file == null) { ContentRevision before = change.getBeforeRevision(); if (before != null) { return VcsFileUtil.relativePath(baseDir, before.getFile()); } } return file == null ? null : VfsUtilCore.getRelativePath(file, baseDir); } }
package com.ctrip.xpipe.redis.keeper.store; import com.ctrip.xpipe.redis.core.protocal.protocal.EofType; import com.ctrip.xpipe.redis.core.protocal.protocal.LenEofType; import com.ctrip.xpipe.redis.core.store.*; import com.ctrip.xpipe.redis.keeper.config.KeeperConfig; import com.ctrip.xpipe.redis.keeper.monitor.KeeperMonitor; import com.ctrip.xpipe.redis.keeper.store.meta.DefaultMetaStore; import com.ctrip.xpipe.utils.FileUtils; import io.netty.buffer.ByteBuf; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; // TODO make methods correctly sequenced public class DefaultReplicationStore extends AbstractStore implements ReplicationStore { private final static Logger logger = LoggerFactory.getLogger(DefaultReplicationStore.class); private final static FileFilter RDB_FILE_FILTER = new FileFilter() { @Override public boolean accept(File path) { return path.isFile() && path.getName().startsWith("rdb_"); } }; private File baseDir; private AtomicReference<RdbStore> rdbStoreRef = new AtomicReference<>(); private ConcurrentMap<RdbStore, Boolean> previousRdbStores = new ConcurrentHashMap<>(); private DefaultCommandStore cmdStore; private MetaStore metaStore; private int cmdFileSize; private KeeperConfig config; private Object lock = new Object(); private AtomicInteger rdbUpdateCount = new AtomicInteger(); private KeeperMonitor keeperMonitor; public DefaultReplicationStore(File baseDir, KeeperConfig config, String keeperRunid, KeeperMonitor keeperMonitor) throws IOException { this.baseDir = baseDir; this.cmdFileSize = config.getReplicationStoreCommandFileSize(); this.config = config; this.keeperMonitor = keeperMonitor; metaStore = new DefaultMetaStore(baseDir, keeperRunid); ReplicationStoreMeta meta = metaStore.dupReplicationStoreMeta(); if (meta != null && meta.getRdbFile() != null) { File rdb = new File(baseDir, meta.getRdbFile()); if (rdb.isFile()) { rdbStoreRef.set(new DefaultRdbStore(rdb, meta.getRdbLastOffset(), initEofType(meta))); cmdStore = new DefaultCommandStore(new File(baseDir, meta.getCmdFilePrefix()), cmdFileSize, config.getReplicationStoreMinTimeMilliToGcAfterCreate(), () -> config.getReplicationStoreCommandFileNumToKeep(), keeperMonitor); } } removeUnusedRdbFiles(); } private EofType initEofType(ReplicationStoreMeta meta) { // must has length field logger.info("[initEofType][leneof]{}", meta); return new LenEofType(meta.getRdbFileSize()); } private void removeUnusedRdbFiles() { @SuppressWarnings("resource") RdbStore rdbStore = rdbStoreRef.get() == null ? null : rdbStoreRef.get(); for (File rdbFile : rdbFilesOnFS()) { if (rdbStore == null || !rdbStore.sameRdbFile(rdbFile)) { logger.info("[removeUnusedRdbFile] {}", rdbFile); rdbFile.delete(); } } } @Override public RdbStore beginRdb(String replId, long rdbOffset, EofType eofType) throws IOException { makeSureOpen(); logger.info("Begin RDB replId:{}, rdbOffset:{}, eof:{}", replId, rdbOffset, eofType); baseDir.mkdirs(); String rdbFile = newRdbFileName(); String cmdFilePrefix = "cmd_" + UUID.randomUUID().toString() + "_"; ReplicationStoreMeta newMeta = metaStore.rdbBegun(replId, rdbOffset + 1, rdbFile, eofType, cmdFilePrefix); // beginOffset - 1 == masteroffset RdbStore rdbStore = new DefaultRdbStore(new File(baseDir, newMeta.getRdbFile()), newMeta.getBeginOffset() - 1, eofType); rdbStore.addListener(new ReplicationStoreRdbFileListener(rdbStore)); rdbStoreRef.set(rdbStore); cmdStore = new DefaultCommandStore(new File(baseDir, newMeta.getCmdFilePrefix()), cmdFileSize, config.getReplicationStoreMinTimeMilliToGcAfterCreate(), () -> config.getReplicationStoreCommandFileNumToKeep(), keeperMonitor); return rdbStoreRef.get(); } @Override public DumpedRdbStore prepareNewRdb() throws IOException { makeSureOpen(); DumpedRdbStore rdbStore = new DefaultDumpedRdbStore(new File(baseDir, newRdbFileName())); return rdbStore; } @Override public void rdbUpdated(DumpedRdbStore dumpedRdbStore) throws IOException { makeSureOpen(); synchronized (lock) { rdbUpdateCount.incrementAndGet(); File dumpedRdbFile = dumpedRdbStore.getRdbFile(); if (!baseDir.equals(dumpedRdbFile.getParentFile())) { throw new IllegalStateException("update rdb error, filePath:" + dumpedRdbFile.getAbsolutePath() + ", baseDir:" + baseDir.getAbsolutePath()); } EofType eofType = dumpedRdbStore.getEofType(); long rdbOffset = dumpedRdbStore.rdbOffset(); @SuppressWarnings("unused") ReplicationStoreMeta metaDup = metaStore.rdbUpdated(dumpedRdbFile.getName(), eofType, rdbOffset); dumpedRdbStore.addListener(new ReplicationStoreRdbFileListener(dumpedRdbStore)); logger.info("[rdbUpdated] new file {}, eofType {}, rdbOffset {}", dumpedRdbFile, eofType, rdbOffset); RdbStore oldRdbStore = rdbStoreRef.get(); rdbStoreRef.set(dumpedRdbStore); previousRdbStores.put(oldRdbStore, Boolean.TRUE); } } // fot teset only public CommandStore getCommandStore() { return cmdStore; } // for test only public RdbStore getRdbStore() { return rdbStoreRef.get(); } @Override public long beginOffsetWhenCreated() { if(metaStore == null || metaStore.beginOffset() == null){ throw new IllegalStateException("meta store null:" + this); } return metaStore.beginOffset(); } @Override public long getEndOffset() { if (metaStore == null || metaStore.beginOffset() == null || cmdStore == null) { // TODO return ReplicationStoreMeta.DEFAULT_END_OFFSET; } else { long beginOffset = metaStore.beginOffset(); long totalLength = cmdStore.totalLength(); logger.debug("[getEndOffset]B:{}, L:{}", beginOffset, totalLength); return beginOffset + totalLength - 1; } } @Override public long firstAvailableOffset() { long beginOffset = metaStore == null? ReplicationStoreMeta.DEFAULT_BEGIN_OFFSET : metaStore.beginOffset(); long minCmdOffset = cmdStore == null ? 0 : cmdStore.lowestAvailableOffset(); long firstAvailableOffset = minCmdOffset + beginOffset; return firstAvailableOffset; } @Override public MetaStore getMetaStore() { return metaStore; } @Override public void shiftReplicationId(String newReplId) throws IOException { logger.info("[shiftReplicationId]{}", newReplId); if(newReplId != null){ this.metaStore.shiftReplicationId(newReplId, getEndOffset()); } } private File[] rdbFilesOnFS() { File[] rdbFiles = baseDir.listFiles(RDB_FILE_FILTER); return rdbFiles != null ? rdbFiles : new File[0]; } private FullSyncContext lockAndCheckIfFullSyncPossible() { synchronized (lock) { RdbStore rdbStore = rdbStoreRef.get(); if (rdbStore == null || !rdbStore.checkOk()) { logger.info("[lockAndCheckIfFullSyncPossible][false]{}", rdbStore); return new FullSyncContext(false); } rdbStore.incrementRefCount(); long rdbOffset = rdbStore.rdbOffset(); long minOffset = firstAvailableOffset(); long maxOffset = getEndOffset(); /** * rdb and cmd is continuous AND not so much cmd after rdb */ long cmdAfterRdbThreshold = config.getReplicationStoreMaxCommandsToTransferBeforeCreateRdb(); boolean fullSyncPossible = minOffset <= rdbOffset + 1 && (maxOffset - rdbOffset <= cmdAfterRdbThreshold); logger.info("minOffset <= rdbOffset + 1 && maxOffset - rdbOffset <= cmdAfterRdbThreshold"); logger.info("[isFullSyncPossible] {}, {} <= {} + 1 && {} - {} <= {}", // fullSyncPossible, minOffset, rdbOffset, maxOffset, rdbOffset, cmdAfterRdbThreshold); if (fullSyncPossible) { return new FullSyncContext(true, rdbStore); } else { rdbStore.decrementRefCount(); return new FullSyncContext(false); } } } private String newRdbFileName() { return "rdb_" + System.currentTimeMillis() + "_" + UUID.randomUUID().toString(); } @Override public boolean fullSyncIfPossible(FullSyncListener fullSyncListener) throws IOException { makeSureOpen(); final FullSyncContext ctx = lockAndCheckIfFullSyncPossible(); if (ctx.isFullSyncPossible()) { logger.info("[fullSyncToSlave][reuse current rdb to full sync]{}", fullSyncListener); RdbStore rdbStore = ctx.getRdbStore(); try { // after rdb send over, command will be sent automatically rdbStore.readRdbFile(fullSyncListener); } finally { rdbStore.decrementRefCount(); } return true; } else { return false; } } @Override public void addCommandsListener(long offset, CommandsListener commandsListener) throws IOException { makeSureOpen(); long realOffset = offset - metaStore.beginOffset(); getCommandStore().addCommandsListener(realOffset, commandsListener); } @Override public boolean isFresh() { return metaStore == null || metaStore.isFresh(); } @Override public int appendCommands(ByteBuf byteBuf) throws IOException { makeSureOpen(); return cmdStore.appendCommands(byteBuf); } @Override public boolean awaitCommandsOffset(long offset, int timeMilli) throws InterruptedException { return cmdStore.awaitCommandsOffset(offset, timeMilli); } public int getRdbUpdateCount() { return rdbUpdateCount.get(); } protected File getBaseDir() { return baseDir; } @Override public boolean checkOk() { return true; } @Override public String toString() { return String.format("ReplicationStore:%s", baseDir); } public class ReplicationStoreRdbFileListener implements RdbStoreListener { private RdbStore rdbStore; public ReplicationStoreRdbFileListener(RdbStore rdbStore) { this.rdbStore = rdbStore; } @Override public void onEndRdb() { try { logger.info("[onEndRdb]{}, {}", rdbStore, DefaultReplicationStore.this); metaStore.setRdbFileSize(rdbStore.rdbFileLength()); } catch (Exception e) { logger.error("[onEndRdb]", e); } } } @Override public void close() throws IOException { if (cmpAndSetClosed()) { logger.info("[close]{}", this); RdbStore rdbStore = rdbStoreRef.get(); if (rdbStore != null) { rdbStore.close(); } if (cmdStore != null) { cmdStore.close(); } }else{ logger.warn("[close][already closed!]{}", this); } } @Override public void destroy() throws Exception { logger.info("[destroy]{}", this); FileUtils.recursiveDelete(baseDir); } @Override public boolean gc() { // delete old rdb files for (RdbStore rdbStore : previousRdbStores.keySet()) { if (rdbStore.refCount() == 0) { try { rdbStore.destroy(); } catch (Exception e) { logger.error("[gc]" + rdbStore, e); } previousRdbStores.remove(rdbStore); } } // delete old command file if (cmdStore != null) { cmdStore.gc(); } return true; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.gateway; import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.env.Environment; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.InternalTestCluster.RestartCallback; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.store.MockFSDirectoryService; import org.elasticsearch.test.store.MockFSIndexStore; import java.util.Collection; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; @ClusterScope(numDataNodes = 0, scope = Scope.TEST) public class RecoveryFromGatewayIT extends ESIntegTestCase { @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return pluginList(MockFSIndexStore.TestPlugin.class); } public void testOneNodeRecoverFromGateway() throws Exception { internalCluster().startNode(); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("appAccountIds").field("type", "string").endObject().endObject() .endObject().endObject().string(); assertAcked(prepareCreate("test").addMapping("type1", mapping)); client().prepareIndex("test", "type1", "10990239").setSource(jsonBuilder().startObject() .startArray("appAccountIds").value(14).value(179).endArray().endObject()).execute().actionGet(); client().prepareIndex("test", "type1", "10990473").setSource(jsonBuilder().startObject() .startArray("appAccountIds").value(14).endArray().endObject()).execute().actionGet(); client().prepareIndex("test", "type1", "10990513").setSource(jsonBuilder().startObject() .startArray("appAccountIds").value(14).value(179).endArray().endObject()).execute().actionGet(); client().prepareIndex("test", "type1", "10990695").setSource(jsonBuilder().startObject() .startArray("appAccountIds").value(14).endArray().endObject()).execute().actionGet(); client().prepareIndex("test", "type1", "11026351").setSource(jsonBuilder().startObject() .startArray("appAccountIds").value(14).endArray().endObject()).execute().actionGet(); refresh(); assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("appAccountIds", 179)).execute().actionGet(), 2); ensureYellow("test"); // wait for primary allocations here otherwise if we have a lot of shards we might have a // shard that is still in post recovery when we restart and the ensureYellow() below will timeout internalCluster().fullRestart(); logger.info("Running Cluster Health (wait for the shards to startup)"); ensureYellow(); client().admin().indices().prepareRefresh().execute().actionGet(); assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("appAccountIds", 179)).execute().actionGet(), 2); internalCluster().fullRestart(); logger.info("Running Cluster Health (wait for the shards to startup)"); ensureYellow(); client().admin().indices().prepareRefresh().execute().actionGet(); assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("appAccountIds", 179)).execute().actionGet(), 2); } public void testSingleNodeNoFlush() throws Exception { internalCluster().startNode(); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("field").field("type", "string").endObject().startObject("num").field("type", "integer").endObject().endObject() .endObject().endObject().string(); // note: default replica settings are tied to #data nodes-1 which is 0 here. We can do with 1 in this test. int numberOfShards = numberOfShards(); assertAcked(prepareCreate("test").setSettings( SETTING_NUMBER_OF_SHARDS, numberOfShards(), SETTING_NUMBER_OF_REPLICAS, randomIntBetween(0, 1) ).addMapping("type1", mapping)); int value1Docs; int value2Docs; boolean indexToAllShards = randomBoolean(); if (indexToAllShards) { // insert enough docs so all shards will have a doc value1Docs = randomIntBetween(numberOfShards * 10, numberOfShards * 20); value2Docs = randomIntBetween(numberOfShards * 10, numberOfShards * 20); } else { // insert a two docs, some shards will not have anything value1Docs = 1; value2Docs = 1; } for (int i = 0; i < 1 + randomInt(100); i++) { for (int id = 0; id < Math.max(value1Docs, value2Docs); id++) { if (id < value1Docs) { index("test", "type1", "1_" + id, jsonBuilder().startObject().field("field", "value1").startArray("num").value(14).value(179).endArray().endObject() ); } if (id < value2Docs) { index("test", "type1", "2_" + id, jsonBuilder().startObject().field("field", "value2").startArray("num").value(14).endArray().endObject() ); } } } refresh(); for (int i = 0; i <= randomInt(10); i++) { assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), value1Docs + value2Docs); assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("field", "value1")).get(), value1Docs); assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("field", "value2")).get(), value2Docs); assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("num", 179)).get(), value1Docs); } if (!indexToAllShards) { // we have to verify primaries are started for them to be restored logger.info("Ensure all primaries have been started"); ensureYellow(); } internalCluster().fullRestart(); logger.info("Running Cluster Health (wait for the shards to startup)"); ensureYellow(); for (int i = 0; i <= randomInt(10); i++) { assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), value1Docs + value2Docs); assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("field", "value1")).get(), value1Docs); assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("field", "value2")).get(), value2Docs); assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("num", 179)).get(), value1Docs); } internalCluster().fullRestart(); logger.info("Running Cluster Health (wait for the shards to startup)"); ensureYellow(); for (int i = 0; i <= randomInt(10); i++) { assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), value1Docs + value2Docs); assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("field", "value1")).get(), value1Docs); assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("field", "value2")).get(), value2Docs); assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("num", 179)).get(), value1Docs); } } public void testSingleNodeWithFlush() throws Exception { internalCluster().startNode(); client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).execute().actionGet(); flush(); client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject().field("field", "value2").endObject()).execute().actionGet(); refresh(); assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).execute().actionGet(), 2); ensureYellow("test"); // wait for primary allocations here otherwise if we have a lot of shards we might have a // shard that is still in post recovery when we restart and the ensureYellow() below will timeout internalCluster().fullRestart(); logger.info("Running Cluster Health (wait for the shards to startup)"); ensureYellow(); for (int i = 0; i < 10; i++) { assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).execute().actionGet(), 2); } internalCluster().fullRestart(); logger.info("Running Cluster Health (wait for the shards to startup)"); ensureYellow(); for (int i = 0; i < 10; i++) { assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).execute().actionGet(), 2); } } public void testTwoNodeFirstNodeCleared() throws Exception { final String firstNode = internalCluster().startNode(); internalCluster().startNode(); client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).execute().actionGet(); flush(); client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject().field("field", "value2").endObject()).execute().actionGet(); refresh(); logger.info("Running Cluster Health (wait for the shards to startup)"); ensureGreen(); for (int i = 0; i < 10; i++) { assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).execute().actionGet(), 2); } internalCluster().fullRestart(new RestartCallback() { @Override public Settings onNodeStopped(String nodeName) throws Exception { return settingsBuilder().put("gateway.recover_after_nodes", 2).build(); } @Override public boolean clearData(String nodeName) { return firstNode.equals(nodeName); } }); logger.info("Running Cluster Health (wait for the shards to startup)"); ensureGreen(); for (int i = 0; i < 10; i++) { assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).execute().actionGet(), 2); } } public void testLatestVersionLoaded() throws Exception { // clean two nodes internalCluster().startNodesAsync(2, settingsBuilder().put("gateway.recover_after_nodes", 2).build()).get(); client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).execute().actionGet(); client().admin().indices().prepareFlush().execute().actionGet(); client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject().field("field", "value2").endObject()).execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); logger.info("--> running cluster_health (wait for the shards to startup)"); ensureGreen(); for (int i = 0; i < 10; i++) { assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).execute().actionGet(), 2); } String metaDataUuid = client().admin().cluster().prepareState().execute().get().getState().getMetaData().clusterUUID(); assertThat(metaDataUuid, not(equalTo("_na_"))); logger.info("--> closing first node, and indexing more data to the second node"); internalCluster().fullRestart(new RestartCallback() { @Override public void doAfterNodes(int numNodes, Client client) throws Exception { if (numNodes == 1) { logger.info("--> one node is closed - start indexing data into the second one"); client.prepareIndex("test", "type1", "3").setSource(jsonBuilder().startObject().field("field", "value3").endObject()).execute().actionGet(); // TODO: remove once refresh doesn't fail immediately if there a master block: // https://github.com/elasticsearch/elasticsearch/issues/9997 client.admin().cluster().prepareHealth("test").setWaitForYellowStatus().get(); client.admin().indices().prepareRefresh().execute().actionGet(); for (int i = 0; i < 10; i++) { assertHitCount(client.prepareSearch().setSize(0).setQuery(matchAllQuery()).execute().actionGet(), 3); } logger.info("--> add some metadata, additional type and template"); client.admin().indices().preparePutMapping("test").setType("type2") .setSource(jsonBuilder().startObject().startObject("type2").endObject().endObject()) .execute().actionGet(); client.admin().indices().preparePutTemplate("template_1") .setTemplate("te*") .setOrder(0) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("field1").field("type", "string").field("store", "yes").endObject() .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject() .endObject().endObject().endObject()) .execute().actionGet(); client.admin().indices().prepareAliases().addAlias("test", "test_alias", QueryBuilders.termQuery("field", "value")).execute().actionGet(); logger.info("--> starting two nodes back, verifying we got the latest version"); } } }); logger.info("--> running cluster_health (wait for the shards to startup)"); ensureGreen(); assertThat(client().admin().cluster().prepareState().execute().get().getState().getMetaData().clusterUUID(), equalTo(metaDataUuid)); for (int i = 0; i < 10; i++) { assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).execute().actionGet(), 3); } ClusterState state = client().admin().cluster().prepareState().execute().actionGet().getState(); assertThat(state.metaData().index("test").mapping("type2"), notNullValue()); assertThat(state.metaData().templates().get("template_1").template(), equalTo("te*")); assertThat(state.metaData().index("test").getAliases().get("test_alias"), notNullValue()); assertThat(state.metaData().index("test").getAliases().get("test_alias").filter(), notNullValue()); } public void testReusePeerRecovery() throws Exception { final Settings settings = settingsBuilder() .put("action.admin.cluster.node.shutdown.delay", "10ms") .put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false) .put("gateway.recover_after_nodes", 4) .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.getKey(), 4) .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 4) .put(MockFSDirectoryService.CRASH_INDEX_SETTING.getKey(), false).build(); internalCluster().startNodesAsync(4, settings).get(); // prevent any rebalance actions during the peer recovery // if we run into a relocation the reuse count will be 0 and this fails the test. We are testing here if // we reuse the files on disk after full restarts for replicas. assertAcked(prepareCreate("test").setSettings(Settings.builder() .put(indexSettings()) .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE))); ensureGreen(); logger.info("--> indexing docs"); for (int i = 0; i < 1000; i++) { client().prepareIndex("test", "type").setSource("field", "value").execute().actionGet(); if ((i % 200) == 0) { client().admin().indices().prepareFlush().execute().actionGet(); } } if (randomBoolean()) { client().admin().indices().prepareFlush().execute().actionGet(); } logger.info("Running Cluster Health"); ensureGreen(); client().admin().indices().prepareForceMerge("test").setMaxNumSegments(100).get(); // just wait for merges client().admin().indices().prepareFlush().setWaitIfOngoing(true).setForce(true).get(); boolean useSyncIds = randomBoolean(); if (useSyncIds == false) { logger.info("--> disabling allocation while the cluster is shut down"); // Disable allocations while we are closing nodes client().admin().cluster().prepareUpdateSettings() .setTransientSettings(settingsBuilder() .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), EnableAllocationDecider.Allocation.NONE)) .get(); logger.info("--> full cluster restart"); internalCluster().fullRestart(); logger.info("--> waiting for cluster to return to green after first shutdown"); ensureGreen(); } else { logger.info("--> trying to sync flush"); assertEquals(client().admin().indices().prepareSyncedFlush("test").get().failedShards(), 0); assertSyncIdsNotNull(); } logger.info("--> disabling allocation while the cluster is shut down", useSyncIds ? "" : " a second time"); // Disable allocations while we are closing nodes client().admin().cluster().prepareUpdateSettings() .setTransientSettings(settingsBuilder() .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), EnableAllocationDecider.Allocation.NONE)) .get(); logger.info("--> full cluster restart"); internalCluster().fullRestart(); logger.info("--> waiting for cluster to return to green after {}shutdown", useSyncIds ? "" : "second "); ensureGreen(); if (useSyncIds) { assertSyncIdsNotNull(); } RecoveryResponse recoveryResponse = client().admin().indices().prepareRecoveries("test").get(); for (RecoveryState recoveryState : recoveryResponse.shardRecoveryStates().get("test")) { long recovered = 0; for (RecoveryState.File file : recoveryState.getIndex().fileDetails()) { if (file.name().startsWith("segments")) { recovered += file.length(); } } if (!recoveryState.getPrimary() && (useSyncIds == false)) { logger.info("--> replica shard {} recovered from {} to {}, recovered {}, reuse {}", recoveryState.getShardId().getId(), recoveryState.getSourceNode().name(), recoveryState.getTargetNode().name(), recoveryState.getIndex().recoveredBytes(), recoveryState.getIndex().reusedBytes()); assertThat("no bytes should be recovered", recoveryState.getIndex().recoveredBytes(), equalTo(recovered)); assertThat("data should have been reused", recoveryState.getIndex().reusedBytes(), greaterThan(0l)); // we have to recover the segments file since we commit the translog ID on engine startup assertThat("all bytes should be reused except of the segments file", recoveryState.getIndex().reusedBytes(), equalTo(recoveryState.getIndex().totalBytes() - recovered)); assertThat("no files should be recovered except of the segments file", recoveryState.getIndex().recoveredFileCount(), equalTo(1)); assertThat("all files should be reused except of the segments file", recoveryState.getIndex().reusedFileCount(), equalTo(recoveryState.getIndex().totalFileCount() - 1)); assertThat("> 0 files should be reused", recoveryState.getIndex().reusedFileCount(), greaterThan(0)); } else { if (useSyncIds && !recoveryState.getPrimary()) { logger.info("--> replica shard {} recovered from {} to {} using sync id, recovered {}, reuse {}", recoveryState.getShardId().getId(), recoveryState.getSourceNode().name(), recoveryState.getTargetNode().name(), recoveryState.getIndex().recoveredBytes(), recoveryState.getIndex().reusedBytes()); } assertThat(recoveryState.getIndex().recoveredBytes(), equalTo(0l)); assertThat(recoveryState.getIndex().reusedBytes(), equalTo(recoveryState.getIndex().totalBytes())); assertThat(recoveryState.getIndex().recoveredFileCount(), equalTo(0)); assertThat(recoveryState.getIndex().reusedFileCount(), equalTo(recoveryState.getIndex().totalFileCount())); } } } public void assertSyncIdsNotNull() { IndexStats indexStats = client().admin().indices().prepareStats("test").get().getIndex("test"); for (ShardStats shardStats : indexStats.getShards()) { assertNotNull(shardStats.getCommitStats().getUserData().get(Engine.SYNC_COMMIT_ID)); } } public void testRecoveryDifferentNodeOrderStartup() throws Exception { // we need different data paths so we make sure we start the second node fresh final String node_1 = internalCluster().startNode(settingsBuilder().put(Environment.PATH_DATA_SETTING.getKey(), createTempDir()).build()); client().prepareIndex("test", "type1", "1").setSource("field", "value").execute().actionGet(); internalCluster().startNode(settingsBuilder().put(Environment.PATH_DATA_SETTING.getKey(), createTempDir()).build()); ensureGreen(); internalCluster().fullRestart(new RestartCallback() { @Override public boolean doRestart(String nodeName) { return !node_1.equals(nodeName); } }); ensureYellow(); assertThat(client().admin().indices().prepareExists("test").execute().actionGet().isExists(), equalTo(true)); assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet(), 1); } }
package at.salzburgresearch.stanbol.enhancer.engines.gosennlp.impl; import static at.salzburgresearch.stanbol.enhancer.engines.gosennlp.Constants.GOSEN_NER_TAG_SET; import static at.salzburgresearch.stanbol.enhancer.engines.gosennlp.Constants.GOSEN_POS_TAG_SET; import static org.apache.stanbol.enhancer.nlp.NlpAnnotations.POS_ANNOTATION; import static org.apache.stanbol.enhancer.nlp.utils.NlpEngineHelper.getLanguage; import static org.apache.stanbol.enhancer.nlp.utils.NlpEngineHelper.initAnalysedText; import static org.apache.stanbol.enhancer.servicesapi.rdf.Properties.DC_TYPE; import static org.apache.stanbol.enhancer.servicesapi.rdf.Properties.ENHANCER_END; import static org.apache.stanbol.enhancer.servicesapi.rdf.Properties.ENHANCER_SELECTED_TEXT; import static org.apache.stanbol.enhancer.servicesapi.rdf.Properties.ENHANCER_SELECTION_CONTEXT; import static org.apache.stanbol.enhancer.servicesapi.rdf.Properties.ENHANCER_START; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.clerezza.rdf.core.Language; import org.apache.clerezza.rdf.core.LiteralFactory; import org.apache.clerezza.rdf.core.MGraph; import org.apache.clerezza.rdf.core.UriRef; import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl; import org.apache.clerezza.rdf.core.impl.TripleImpl; import org.apache.commons.io.input.CharSequenceReader; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.ConfigurationPolicy; import org.apache.felix.scr.annotations.Properties; import org.apache.felix.scr.annotations.Property; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.ReferenceCardinality; import org.apache.felix.scr.annotations.Service; import org.apache.lucene.analysis.CharReader; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.gosen.tokenAttributes.PartOfSpeechAttribute; import org.apache.lucene.analysis.gosen.tokenAttributes.SentenceStartAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.util.Version; import org.apache.solr.analysis.MappingCharFilterFactory; import org.apache.solr.analysis.TokenizerFactory; import org.apache.solr.analysis.gosen.OsgiGosenTokenizerFactory; import org.apache.solr.common.ResourceLoader; import org.apache.stanbol.commons.solr.utils.StanbolResourceLoader; import org.apache.stanbol.enhancer.nlp.NlpAnnotations; import org.apache.stanbol.enhancer.nlp.NlpProcessingRole; import org.apache.stanbol.enhancer.nlp.NlpServiceProperties; import org.apache.stanbol.enhancer.nlp.model.AnalysedText; import org.apache.stanbol.enhancer.nlp.model.AnalysedTextFactory; import org.apache.stanbol.enhancer.nlp.model.Chunk; import org.apache.stanbol.enhancer.nlp.model.Sentence; import org.apache.stanbol.enhancer.nlp.model.Token; import org.apache.stanbol.enhancer.nlp.model.annotation.Value; import org.apache.stanbol.enhancer.nlp.ner.NerTag; import org.apache.stanbol.enhancer.nlp.pos.PosTag; import org.apache.stanbol.enhancer.nlp.utils.NlpEngineHelper; import org.apache.stanbol.enhancer.servicesapi.Blob; import org.apache.stanbol.enhancer.servicesapi.ContentItem; import org.apache.stanbol.enhancer.servicesapi.EngineException; import org.apache.stanbol.enhancer.servicesapi.EnhancementEngine; import org.apache.stanbol.enhancer.servicesapi.ServiceProperties; import org.apache.stanbol.enhancer.servicesapi.helper.EnhancementEngineHelper; import org.apache.stanbol.enhancer.servicesapi.impl.AbstractEnhancementEngine; import org.osgi.framework.Constants; import org.osgi.service.cm.ConfigurationException; import org.osgi.service.component.ComponentContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @Component(immediate = true, metatype = true, policy = ConfigurationPolicy.OPTIONAL) //create a default instance with the default configuration @Service @Properties(value={ @Property(name= EnhancementEngine.PROPERTY_NAME,value="gosen-nlp"), @Property(name=Constants.SERVICE_RANKING,intValue=0) //give the default instance a ranking < 0 }) public class GosenNlpEngine extends AbstractEnhancementEngine<RuntimeException,RuntimeException> implements ServiceProperties { private Logger log = LoggerFactory.getLogger(GosenNlpEngine.class); /* * Analyzer configuration constants */ private static final String LUCENE_VERSION = Version.LUCENE_36.toString(); private static final Map<String,String> CHAR_FILTER_FACTORY_CONFIG = new HashMap<String,String>(); private static final Map<String,String> TOKENIZER_FACTORY_CONFIG = new HashMap<String,String>(); static { CHAR_FILTER_FACTORY_CONFIG.put("luceneMatchVersion", LUCENE_VERSION); CHAR_FILTER_FACTORY_CONFIG.put("mapping", "gosen-mapping-japanese.txt"); TOKENIZER_FACTORY_CONFIG.put("luceneMatchVersion", LUCENE_VERSION); } /** * Service Properties of this Engine */ private static final Map<String,Object> SERVICE_PROPERTIES; static { Map<String,Object> props = new HashMap<String,Object>(); props.put(ServiceProperties.ENHANCEMENT_ENGINE_ORDERING, ServiceProperties.ORDERING_NLP_TOKENIZING); props.put(NlpServiceProperties.ENHANCEMENT_ENGINE_NLP_ROLE, NlpProcessingRole.Tokenizing); SERVICE_PROPERTIES = Collections.unmodifiableMap(props); } @Reference(cardinality=ReferenceCardinality.OPTIONAL_UNARY) protected ResourceLoader parentResourceLoader; protected ResourceLoader resourceLoader; private MappingCharFilterFactory charFilterFactory; private TokenizerFactory tokenizerFactory; @Reference protected AnalysedTextFactory analysedTextFactory; protected LiteralFactory lf = LiteralFactory.getInstance(); /** * holds {@link PosTag}s that are not contained in the * {@link at.salzburgresearch.stanbol.enhancer.engines.gosennlp.Constants#GOSEN_POS_TAG_SET} */ private Map<String,PosTag> adhocTags = new HashMap<String,PosTag>(); @Override protected void activate(ComponentContext ctx) throws ConfigurationException { super.activate(ctx); //init the Solr ResourceLoader used for initialising the components resourceLoader = new StanbolResourceLoader(parentResourceLoader); charFilterFactory = new MappingCharFilterFactory(); charFilterFactory.init(CHAR_FILTER_FACTORY_CONFIG); charFilterFactory.inform(resourceLoader); tokenizerFactory = new OsgiGosenTokenizerFactory(); tokenizerFactory.init(TOKENIZER_FACTORY_CONFIG); } @Override protected void deactivate(ComponentContext ctx) { super.deactivate(ctx); resourceLoader = null; charFilterFactory = null; tokenizerFactory = null; } @Override public int canEnhance(ContentItem ci) throws EngineException { // check if content is present Map.Entry<UriRef,Blob> entry = NlpEngineHelper.getPlainText(this, ci, false); if(entry == null || entry.getValue() == null) { return CANNOT_ENHANCE; } String language = getLanguage(this,ci,false); if("ja".equals(language) || (language != null && language.startsWith("ja-"))) { log.trace(" > can enhance ContentItem {} with language {}",ci,language); return ENHANCE_ASYNC; } else { return CANNOT_ENHANCE; } } @Override public void computeEnhancements(ContentItem ci) throws EngineException { final AnalysedText at = initAnalysedText(this,analysedTextFactory,ci); String language = getLanguage(this,ci,false); if(!("ja".equals(language) || (language != null && language.startsWith("ja-")))) { throw new IllegalStateException("The detected language is NOT 'ja'! " + "As this is also checked within the #canEnhance(..) method this " + "indicates an Bug in the used EnhancementJobManager implementation. " + "Please report this on the dev@apache.stanbol.org or create an " + "JIRA issue about this."); } TokenStream tokenizer = tokenizerFactory.create(charFilterFactory.create( CharReader.get(new CharSequenceReader(at.getText())))); //Sentence data int sentStartOffset = -1; //NER data List<NerData> nerList = new ArrayList<NerData>(); int nerSentIndex = 0; //the next index where the NerData.context need to be set NerData ner = null; OffsetAttribute offset = null; try { while (tokenizer.incrementToken()){ offset = tokenizer.addAttribute(OffsetAttribute.class); Token token = at.addToken(offset.startOffset(), offset.endOffset()); SentenceStartAttribute sentStart = tokenizer.addAttribute(SentenceStartAttribute.class); if(sentStart.getSentenceStart()){ if(sentStartOffset >= 0){ Sentence sent = at.addSentence(sentStartOffset, offset.startOffset()); //add the sentence as context to the NerData instances while(nerSentIndex < nerList.size()){ nerList.get(nerSentIndex).context = sent.getSpan(); nerSentIndex++; } } sentStartOffset = offset.startOffset(); } //POS PartOfSpeechAttribute pos = tokenizer.addAttribute(PartOfSpeechAttribute.class); PosTag posTag = GOSEN_POS_TAG_SET.getTag(pos.getPartOfSpeech()); if(posTag == null){ posTag = adhocTags.get(pos.getPartOfSpeech()); if(posTag == null){ posTag = new PosTag(pos.getPartOfSpeech()); adhocTags.put(pos.getPartOfSpeech(), posTag); log.info(" ... missing PosTag mapping for {}",pos.getPartOfSpeech()); } } token.addAnnotation(POS_ANNOTATION, Value.value(posTag)); //NER NerTag nerTag = GOSEN_NER_TAG_SET.getTag(pos.getPartOfSpeech()); if(ner != null && (nerTag == null || !ner.tag.getType().equals(nerTag.getType()))){ //write NER annotation Chunk chunk = at.addChunk(ner.start, ner.end); chunk.addAnnotation(NlpAnnotations.NER_ANNOTATION, Value.value(ner.tag)); //NOTE that the fise:TextAnnotation are written later based on the nerList //clean up ner = null; } if(nerTag != null){ if(ner == null){ ner = new NerData(nerTag, offset.startOffset()); nerList.add(ner); } ner.end = offset.endOffset(); } } //we still need to write the last sentence Sentence lastSent = null; if(offset != null && sentStartOffset >= 0 && offset.endOffset() > sentStartOffset){ lastSent = at.addSentence(sentStartOffset, offset.endOffset()); } //and set the context off remaining named entities while(nerSentIndex < nerList.size()){ if(lastSent != null){ nerList.get(nerSentIndex).context = lastSent.getSpan(); } else { //no sentence detected nerList.get(nerSentIndex).context = at.getSpan(); } nerSentIndex++; } } catch (IOException e) { throw new EngineException(this, ci, "Exception while reading from " + "AnalyzedText contentpart",e); } finally { try { tokenizer.close(); } catch (IOException e) {/* ignore */} } //finally write the NER annotations to the metadata of the ContentItem final MGraph metadata = ci.getMetadata(); ci.getLock().writeLock().lock(); try { Language lang = new Language("ja"); for(NerData nerData : nerList){ UriRef ta = EnhancementEngineHelper.createTextEnhancement(ci, this); metadata.add(new TripleImpl(ta, ENHANCER_SELECTED_TEXT, new PlainLiteralImpl( at.getSpan().substring(nerData.start, nerData.end),lang))); metadata.add(new TripleImpl(ta, DC_TYPE, nerData.tag.getType())); metadata.add(new TripleImpl(ta, ENHANCER_START, lf.createTypedLiteral(nerData.start))); metadata.add(new TripleImpl(ta, ENHANCER_END, lf.createTypedLiteral(nerData.end))); metadata.add(new TripleImpl(ta, ENHANCER_SELECTION_CONTEXT, new PlainLiteralImpl(nerData.context, lang))); } } finally{ ci.getLock().writeLock().unlock(); } } @Override public Map<String,Object> getServiceProperties() { return SERVICE_PROPERTIES; } /** * Used as intermediate representation of NER annotations so that one needs * not to obtain a write lock on the {@link ContentItem} for each detected * entity * @author Rupert Westenthaler * */ private class NerData { protected final NerTag tag; protected final int start; protected int end; protected String context; protected NerData(NerTag ner, int start){ this.tag = ner; this.start = start; } } }
package authoring_environment.object_editor; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.ResourceBundle; import authoring_environment.PopUpError; import authoring_environment.Event.EventController; import authoring_environment.ParamPopups.ParamController; import authoring_environment.main.IUpdateHandle; import javafx.collections.MapChangeListener; import javafx.event.ActionEvent; import javafx.event.EventHandler; import javafx.scene.Node; import javafx.scene.control.ListCell; import javafx.scene.control.ListView; import javafx.scene.control.Menu; import javafx.scene.control.MenuItem; import javafx.scene.control.TextInputDialog; import javafx.scene.input.KeyCode; import javafx.scene.input.KeyEvent; import javafx.scene.input.MouseEvent; import javafx.stage.Stage; import javafx.util.Callback; import structures.data.DataGame; import structures.data.DataObject; import structures.data.DataSprite; import structures.data.access_restricters.IObjectInterface; import structures.data.actions.logic.Close; import structures.data.actions.logic.Else; import structures.data.actions.logic.Open; import structures.data.actions.params.IParameter; import structures.data.interfaces.IAction; import structures.data.interfaces.IDataEvent; public class ObjectEditorController { ObjectEditorView view; ObjectEditorModel model; IUpdateHandle updater; private ResourceBundle r = ResourceBundle.getBundle("authoring_environment/object_editor/ObjectControllerResources"); public ObjectEditorController(IObjectInterface dataGame, DataObject o) { view = new ObjectEditorView(dataGame.getName()); model = new ObjectEditorModel(dataGame, o); initAll(); } public ObjectEditorController(IObjectInterface dataGame) { TextInputDialog dialog = new TextInputDialog(r.getString("name")); dialog.setTitle(r.getString("create")); dialog.setHeaderText(r.getString("enter")); Optional<String> result = dialog.showAndWait(); String name = ""; List<DataObject> list =dataGame.getObjects(); List<String> strlist = new ArrayList<String>(); for(DataObject d :list){ strlist.add(d.getName()); } if (result.isPresent()) { boolean dup =false; for(String str:strlist){ if(str.equals(result.get())){ dup = true; } } if(!dup){ name = result.get(); model = new ObjectEditorModel((IObjectInterface) dataGame, name); view = new ObjectEditorView(dataGame.getName()); initAll(); } else{ PopUpError er = new PopUpError(r.getString("duplicate")); ObjectEditorController control = new ObjectEditorController(dataGame); } } } public void initAll() { view.getBottomPane().getCloseButton().setOnAction(e -> { model.changeObjectName(view.getBottomPane().getNameBoxText()); model.setSolid(view.getBottomPane().getCheckBox().isSelected()); close(e); }); // view.getCenterPane().getSpriteUpdateButton().setOnAction(e -> { // refreshSprite(); // }); view.getBottomPane().getCheckBox().setSelected(model.isSolid()); view.getBottomPane().getNameBox().setText(model.getObject().getName()); view.getRightPane().getListView().setItems(model.getEvents()); view.getRightPane().getListView().setCellFactory(new Callback<ListView<IDataEvent>, ListCell<IDataEvent>>() { @Override public ListCell<IDataEvent> call(ListView<IDataEvent> arg0) { final ListCell<IDataEvent> cell = new ListCell<IDataEvent>() { @Override public void updateItem(IDataEvent item, boolean empty) { super.updateItem(item, empty); if (empty) { setText(null); } else { String description =item.getName()+ ":"; int indents =0 ; for(IAction action: model.getMap().get(item)){ description += "\n "; if(action instanceof Close || action instanceof Else){ indents -=1; } for(int i=0; i<indents;i++){ description += " "; } description +=action.getDescription(); if(action instanceof Open || action instanceof Else){ indents +=1; } } setText(description); } } }; cell.requestFocus(); cell.setOnKeyPressed(new EventHandler<KeyEvent>() { @Override public void handle(KeyEvent key) { if (key.getCode().equals(KeyCode.ENTER)) { eventPopup(cell.getItem()); } } }); cell.setOnMouseClicked(new EventHandler<MouseEvent>() { @Override public void handle(MouseEvent click) { if (click.getClickCount() == 2) { //Use ListView's getSelected Item IDataEvent selected = cell.getItem(); eventPopup(selected); } } }); return cell; } }); view.getRightPane().getListView().setOnKeyPressed(new EventHandler<KeyEvent>() { @Override public void handle(KeyEvent event) { if (event.getCode().equals(KeyCode.ENTER)) { eventPopup(view.getRightPane().getListView().getSelectionModel().getSelectedItem()); } if (event.getCode().equals(KeyCode.DELETE)) { model.deleteEvent(view.getRightPane().getListView().getSelectionModel().getSelectedItem()); } } }); view.getRightPane().getDeleteButton().setOnAction(e -> { model.deleteEvent(view.getRightPane().getListView().getSelectionModel().getSelectedItem()); }); view.getRightPane().getEditButton().setOnAction(e -> { eventPopup(view.getRightPane().getListView().getSelectionModel().getSelectedItem()); }); view.getLeftPane().getListView().setItems(model.createLeftPaneList()); view.getLeftPane().getListView().setOnKeyPressed(new EventHandler<KeyEvent>() { @Override public void handle(KeyEvent key) { if (key.getCode().equals(KeyCode.ENTER)) { popUpFactory(); } } }); view.getLeftPane().getListView().setOnMouseClicked(new EventHandler<MouseEvent>() { @Override public void handle(MouseEvent click) { if (click.getClickCount() == 2) { popUpFactory(); } } }); view.getLeftPane().getAddButton().setOnAction(e -> { popUpFactory(); } ); for (DataSprite s : model.getSprites()) { view.getTopPane().addToMenu(view.getTopPane().createMenuItem(s.getName(), e -> { model.setSprite(s); refreshSprite(); })); }; model.getMap().addListener(new MapChangeListener<Object, Object>() { @Override public void onChanged(Change<?, ?> arg0) { model.getEvents(); } }); refreshSprite(); } private void popUpFactory() { String selected = view.getLeftPane().getListView().getSelectionModel().getSelectedItem(); if(selected!=null){ model.getPopUpFactory().create(selected,model.getObject(), model.getGame()); } } private void eventPopup(IDataEvent e) { if(e !=null){ EventController control = new EventController(e, model.getObject(),model.getGame()); control.showAndWait(); List<IDataEvent> itemscopy = new ArrayList<IDataEvent>(view.getRightPane().getListView().getItems()); view.getRightPane().getListView().getItems().setAll(itemscopy); } } private void refreshSprite() { String n = model.getSpriteName(); view.getCenterPane().update(n); } private void close(ActionEvent e) { Node source = (Node) e.getSource(); Stage stage = (Stage) source.getScene().getWindow(); stage.close(); updater.update(); } private void addSpriteToMenu(DataSprite s, Menu menu) { MenuItem m = new MenuItem(s.getName()); m.setOnAction(e -> model.setSprite(s)); menu.getItems().add(m); } public void setOnClose(IUpdateHandle updateHandle) { updater = updateHandle; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The SF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package org.apache.sling.jcr.repository.it; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.ops4j.pax.exam.CoreOptions.junitBundles; import static org.ops4j.pax.exam.CoreOptions.mavenBundle; import static org.ops4j.pax.exam.CoreOptions.systemProperty; import static org.ops4j.pax.exam.CoreOptions.vmOptions; import java.io.Reader; import java.io.StringReader; import java.util.Collection; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import javax.inject.Inject; import javax.jcr.Credentials; import javax.jcr.Item; import javax.jcr.Node; import javax.jcr.NodeIterator; import javax.jcr.Property; import javax.jcr.RepositoryException; import javax.jcr.Session; import javax.jcr.SimpleCredentials; import javax.jcr.observation.EventIterator; import javax.jcr.observation.EventListener; import javax.jcr.observation.ObservationManager; import javax.jcr.query.Query; import org.apache.jackrabbit.commons.cnd.CndImporter; import org.apache.sling.api.SlingConstants; import org.apache.sling.api.resource.ResourceResolver; import org.apache.sling.api.resource.ResourceResolverFactory; import org.apache.sling.jcr.api.SlingRepository; import org.junit.After; import org.junit.Test; import org.ops4j.pax.exam.Option; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceReference; import org.osgi.framework.ServiceRegistration; import org.osgi.util.tracker.ServiceTracker; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** Base class for SlingRepository tests, contains tests * that apply to all implementations. * PaxExamParameterized could also be used in theory to * have single class that tests all implementations, but * in a quick test that didn't work well with variable * @Config annotations. */ public abstract class CommonTests { private final Logger log = LoggerFactory.getLogger(getClass()); protected SlingRepository repository; @Inject protected BundleContext bundleContext; /** Check some repository descriptors to make sure we're * testing the expected implementation. */ // TODO: see SLING-4841 why the template method pattern was removed // protected abstract void doCheckRepositoryDescriptors(); private final List<String> toDelete = new LinkedList<String>(); private final AtomicInteger uniqueNameCounter = new AtomicInteger(); protected static final Integer TEST_SCALE = Integer.getInteger("test.scale", 1); public static final String I18N_MESSAGE_CND = "<sling = 'http://sling.apache.org/jcr/sling/1.0'>\n" + "[mix:language]\n" + "mixin\n" + "- jcr:language (string)\n" + "\n" + "[sling:Message]\n" + "mixin\n" + "- sling:key (string)\n" + "- sling:message (undefined)\n" + "\n" + "[sling:MessageEntry] > nt:hierarchyNode, sling:Message\n" ; protected class JcrEventsCounter implements EventListener { private final Session s; private int jcrEventsCounter; public JcrEventsCounter() throws RepositoryException { s = repository.loginAdministrative(null); final ObservationManager om = s.getWorkspace().getObservationManager(); final int eventTypes = 255; // not sure if that's a recommended value, but common final boolean deep = true; final String [] uuid = null; final String [] nodeTypeNames = new String [] { "mix:language", "sling:Message" }; final boolean noLocal = true; final String root = "/"; om.addEventListener(this, eventTypes, root, deep, uuid, nodeTypeNames, noLocal); } void close() { s.logout(); } @Override public void onEvent(EventIterator it) { while(it.hasNext()) { it.nextEvent(); jcrEventsCounter++; } } int get() { return jcrEventsCounter; } } public Collection<Option> commonOptions() { final String localRepo = System.getProperty("maven.repo.local", ""); final String paxVmOptions = System.getProperty("pax.vm.options", ""); final boolean webconsole = "true".equals(System.getProperty("webconsole.active", "false")); final List<Option> opt = new LinkedList<Option>(); if(localRepo.length() > 0 ) { opt.add(systemProperty("org.ops4j.pax.url.mvn.localRepository").value(localRepo)); } if(paxVmOptions.length() > 0) { opt.add(vmOptions(paxVmOptions)); } // Optionally add webconsole if(webconsole) { opt.add(mavenBundle("org.apache.felix", "org.apache.felix.webconsole", "4.2.6")); opt.add(mavenBundle("org.apache.felix", "org.apache.felix.webconsole.plugins.ds", "1.0.0")); opt.add(mavenBundle("org.apache.felix", "org.apache.felix.webconsole.plugins.packageadmin", "1.0.0")); opt.add(mavenBundle("org.apache.felix", "org.apache.felix.webconsole.plugins.event", "1.1.0")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.jcr.webconsole", "1.0.2")); opt.add(mavenBundle("org.apache.geronimo.bundles", "json", "20090211_1")); } final String SLF4J_VERSION = "1.7.5"; opt.add(mavenBundle("org.slf4j", "slf4j-api", SLF4J_VERSION)); opt.add(mavenBundle("org.slf4j", "jcl-over-slf4j", SLF4J_VERSION)); opt.add(mavenBundle("org.slf4j", "log4j-over-slf4j", SLF4J_VERSION)); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.fragment.xml", "1.0.2")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.fragment.transaction", "1.0.0")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.fragment.activation", "1.0.2")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.fragment.ws", "1.0.2")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.commons.log", "4.0.0")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.commons.logservice", "1.0.2")); opt.add(mavenBundle("commons-io", "commons-io", "2.4")); opt.add(mavenBundle("commons-fileupload", "commons-fileupload", "1.3.1")); opt.add(mavenBundle("commons-collections", "commons-collections", "3.2.1")); opt.add(mavenBundle("commons-codec", "commons-codec", "1.9")); opt.add(mavenBundle("commons-lang", "commons-lang", "2.6")); opt.add(mavenBundle("commons-pool", "commons-pool", "1.6")); opt.add(mavenBundle("org.apache.servicemix.bundles", "org.apache.servicemix.bundles.concurrent", "1.3.4_1")); opt.add(mavenBundle("org.apache.geronimo.bundles", "commons-httpclient", "3.1_1")); opt.add(mavenBundle("org.apache.tika", "tika-core", "1.9")); opt.add(mavenBundle("org.apache.tika", "tika-bundle", "1.9")); opt.add(mavenBundle("org.apache.felix", "org.apache.felix.http.jetty", "2.2.2")); opt.add(mavenBundle("org.apache.felix", "org.apache.felix.eventadmin", "1.3.2")); opt.add(mavenBundle("org.apache.felix", "org.apache.felix.scr", "1.8.2")); opt.add(mavenBundle("org.apache.felix", "org.apache.felix.configadmin", "1.8.0")); opt.add(mavenBundle("org.apache.felix", "org.apache.felix.inventory", "1.0.4")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.commons.osgi", "2.2.2")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.commons.json", "2.0.10")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.commons.mime", "2.1.8")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.commons.classloader", "1.3.2")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.commons.scheduler", "2.4.6")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.commons.threads", "3.2.0")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.launchpad.api", "1.1.0")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.auth.core", "1.3.6")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.discovery.api", "1.0.0")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.discovery.standalone", "1.0.0")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.api", "2.9.0")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.settings", "1.3.6")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.resourceresolver", "1.2.4")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.adapter", "2.1.2")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.jcr.resource", "2.5.0")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.jcr.classloader", "3.2.2")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.jcr.contentloader", "2.1.10")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.engine", "2.4.0")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.event", "3.6.0")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.serviceusermapper", "1.2.0")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.testing.tools", "1.0.8")); opt.add(mavenBundle("org.apache.httpcomponents", "httpcore-osgi", "4.4.1")); opt.add(mavenBundle("org.apache.httpcomponents", "httpclient-osgi", "4.4.1")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.jcr.jcr-wrapper", "2.0.0")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.jcr.api", "2.3.1-SNAPSHOT")); opt.add(mavenBundle("org.apache.sling", "org.apache.sling.jcr.base", "2.3.1-SNAPSHOT")); opt.add(junitBundles()); return opt; } private <ItemType extends Item> ItemType deleteAfterTests(ItemType it) throws RepositoryException { toDelete.add(it.getPath()); return it; } /** Verify that admin can create and retrieve a node of the specified type. * @return the path of the test node that was created. */ private String assertCreateRetrieveNode(String nodeType) throws RepositoryException { Session s = repository.loginAdministrative(null); try { final Node root = s.getRootNode(); final String name = uniqueName("assertCreateRetrieveNode"); final String propName = "PN_" + name; final String propValue = "PV_" + name; final Node child = nodeType == null ? root.addNode(name) : root.addNode(name, nodeType); child.setProperty(propName, propValue); child.setProperty("foo", child.getPath()); s.save(); s.logout(); s = repository.loginAdministrative(null); final Node n = s.getNode("/" + name); assertNotNull(n); assertEquals(propValue, n.getProperty(propName).getString()); return n.getPath(); } finally { s.logout(); } } protected String uniqueName(String hint) { return hint + "_" + uniqueNameCounter.incrementAndGet() + "_" + System.currentTimeMillis(); } @After public void deleteTestItems() throws RepositoryException { if(toDelete.isEmpty()) { return; } final Session s = repository.loginAdministrative(null); try { for(String path : toDelete) { if(s.itemExists(path)) { s.getItem(path).remove(); } } s.save(); toDelete.clear(); } finally { s.logout(); } } @Test public void testRepositoryPresent() { assertNotNull(repository); } @Test public void testLoginAdministrative() throws RepositoryException { final Session s = repository.loginAdministrative(null); assertNotNull(s); s.logout(); } @Test public void testExplicitAdminLogin() throws RepositoryException { final Credentials creds = new SimpleCredentials("admin", "admin".toCharArray()); repository.login(creds).logout(); } @Test(expected=RepositoryException.class) public void testWrongLogin() throws RepositoryException { final Credentials creds = new SimpleCredentials("badName", "badPAssword".toCharArray()); repository.login(creds); } @Test public void testAnonymousLoginA() throws RepositoryException { final Session s = repository.login(); assertNotNull(s); s.logout(); } @Test public void testAnonymousLoginB() throws RepositoryException { final Session s = repository.login(null, null); assertNotNull(s); s.logout(); } @Test public void testCreateRetrieveNode() throws RepositoryException { assertCreateRetrieveNode(null); } @Test public void testCreateRetrieveSlingFolder() throws RepositoryException { assertCreateRetrieveNode("sling:Folder"); } @Test public void testAnonymousHasReadAccess() throws RepositoryException { final String path = assertCreateRetrieveNode(null); final Session s = repository.login(); try { assertTrue("Expecting anonymous to see " + path, s.itemExists(path)); final Node n = s.getNode(path); assertEquals("Expecting anonymous to see the foo property", path, n.getProperty("foo").getString()); } finally { s.logout(); } } @Test public void testSqlQuery() throws RepositoryException { final Session s = repository.loginAdministrative(null); final String id = "ID_" + System.currentTimeMillis(); final String propName = "PROP_" + id; final String value = "VALUE_" + id; try { final int N_NODES = 100; for(int i=0 ; i < N_NODES; i++) { final Node root = s.getRootNode(); root.addNode(id + i).setProperty(propName, value); } s.save(); final String stmt = "SELECT * FROM nt:base WHERE " + propName + " IS NOT NULL"; @SuppressWarnings("deprecation") final Query q = s.getWorkspace().getQueryManager().createQuery(stmt, Query.SQL); final NodeIterator it = q.execute().getNodes(); int count = 0; while(it.hasNext()) { it.next(); count++; } assertEquals("Expected " + N_NODES + " result for query " + stmt, N_NODES, count); } finally { s.logout(); } } @Test public void testXpathQueryWithMixin() throws RepositoryException { Session s = repository.loginAdministrative(null); try { final String path = "XPATH_QUERY_" + System.currentTimeMillis(); final String absPath = "/" + path; final Node n = deleteAfterTests(s.getRootNode().addNode(path)); n.addMixin("mix:title"); s.save(); final String statement = "/jcr:root//element(*, mix:title)"; @SuppressWarnings("deprecation") final Query q = s.getWorkspace().getQueryManager().createQuery(statement, Query.XPATH); final NodeIterator it = q.execute().getNodes(); assertTrue("Expecting a non-empty result", it.hasNext()); boolean found = false; while(it.hasNext()) { if(it.nextNode().getPath().equals(absPath)) { found = true; break; } } assertTrue("Expecting test node " + absPath + " to be found", found); } finally { s.logout(); } } // TODO: see SLING-4841 why the template method pattern was removed //@Test //public final void checkRepositoryDescriptors() { // doCheckRepositoryDescriptors(); //} @Test public void testSingleValueInputStream() throws RepositoryException { Session s = repository.loginAdministrative(null); try { final String path = getClass().getSimpleName() + System.currentTimeMillis(); final Node child = deleteAfterTests(s.getRootNode().addNode(path)); final Property p = child.setProperty("foo", "bar"); s.save(); assertNotNull(p.getBinary().getStream()); } finally { s.logout(); } } @Test public void testMultiValueInputStream() throws RepositoryException { final Session s = repository.loginAdministrative(null); try { final String path = getClass().getSimpleName() + System.currentTimeMillis(); final Node child = deleteAfterTests(s.getRootNode().addNode(path)); final Property p = child.setProperty("foo", new String[] { "bar", "wii " }); s.save(); try { p.getBinary().getStream(); fail("Expecting getStream() to fail on a multi-value Property"); } catch(RepositoryException asExpected) { } } finally { s.logout(); } } @Test public void testOsgiResourceEvents() throws RepositoryException { final ResourceEventListener listener = new ResourceEventListener(); final ServiceRegistration reg = listener.register(bundleContext, SlingConstants.TOPIC_RESOURCE_ADDED); final Session s = repository.loginAdministrative(null); final int nPaths = 2500 * TEST_SCALE; final int timeoutMsec = 2 * nPaths; final String prefix = uniqueName("testOsgiResourceEvents"); // Create N nodes with a unique name under / // and verify that ResourceEventListener gets an event // for each of them try { for(int i=0; i < nPaths; i++) { s.getRootNode().addNode(prefix + i); } s.save(); log.info("Added {} nodes, checking what ResourceEventListener got...", nPaths); final long timeout = System.currentTimeMillis() + timeoutMsec; final Set<String> missing = new HashSet<String>(); while(System.currentTimeMillis() < timeout) { missing.clear(); final Set<String> paths = listener.getPaths(); for(int i=0; i < nPaths; i++) { final String path = "/" + prefix + i; if(!paths.contains(path)) { missing.add(path); } } if(missing.isEmpty()) { break; } } if(!missing.isEmpty()) { final String missingStr = missing.size() > 10 ? missing.size() + " paths missing" : missing.toString(); fail("OSGi add resource events are missing for " + missing.size() + "/" + nPaths + " paths after " + timeoutMsec + " msec: " + missingStr); } } finally { reg.unregister(); s.logout(); } log.info("Successfuly detected OSGi observation events for " + nPaths + " paths"); } @Test public void testNodetypeObservation() throws Exception { Session s = repository.loginAdministrative(null); final Reader cnd = new StringReader(I18N_MESSAGE_CND); JcrEventsCounter counter = null; final String path = "/" + uniqueName("observation"); // Add a sling:MessageEntry and verify that we get JCR events try { CndImporter.registerNodeTypes(cnd, s); counter = new JcrEventsCounter(); final Node n = s.getRootNode().addNode(path.substring(1), "sling:MessageEntry"); toDelete.add(n.getPath()); n.setProperty("sling:key", "foo"); n.setProperty("sling:message", "bar"); s.save(); final JcrEventsCounter c = counter; new Retry(5000) { @Override protected void exec() throws Exception { assertTrue("Expecting JCR events after adding " + path, c.get() > 0); } }; } finally { s.logout(); cnd.close(); if(counter != null) { counter.close(); } } // In a separate session, modify node and verify that we get events counter = new JcrEventsCounter(); s = repository.loginAdministrative(null); try { final Node n = s.getNode(path); n.setProperty("sling:message", "CHANGED now"); s.save(); final JcrEventsCounter c = counter; new Retry(5000) { @Override protected void exec() throws Exception { assertTrue("Expecting JCR events after modifying " + path, c.get() > 0); } }; } finally { s.logout(); cnd.close(); counter.close(); } } public void setup() throws Exception { final ServiceTracker st = new ServiceTracker(bundleContext, SlingRepository.class.getName(), null); st.open(true); try { this.repository = (SlingRepository) st.waitForService(10000); } catch (InterruptedException e) { } // Make sure the JcrResourceProvider is initialized, as it // setups conversion of JCR to OSGi events, and some tests use this // @Injecting the ResourceResolverFactory fails, haven't found why. final int timeout = 10; final long timeoutAt = System.currentTimeMillis() + (timeout * 1000L); ServiceReference ref = null; while(System.currentTimeMillis() < timeoutAt) { ref = bundleContext.getServiceReference(ResourceResolverFactory.class.getName()); if(ref != null) { break; } } assertNotNull("Expecting ResourceResolverFactory within " + timeout + " seconds", ref); ResourceResolver rr = null; try { final ResourceResolverFactory f = (ResourceResolverFactory)bundleContext.getService(ref); rr = f.getAdministrativeResourceResolver(null); rr.getResource("/"); } finally { if(rr != null) { rr.close(); } bundleContext.ungetService(ref); } } }
/* * Copyright 2016 BananaRama. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.bananarama.crud.util; import com.googlecode.cqengine.query.option.QueryOptions; import java.io.IOException; import org.bananarama.crud.CreateOperation; import org.bananarama.crud.DeleteOperation; import org.bananarama.crud.ReadOperation; import org.bananarama.crud.UpdateOperation; import org.bananarama.crud.Adapter; import java.util.Arrays; import java.util.List; import java.util.stream.Stream; /** * * @author Guglielmo De Concini */ @SuppressWarnings("unchecked") public abstract class WeldingAdapter<S> implements Adapter<S>{ private final List<Adapter<S>> adapters; public WeldingAdapter(Adapter<S>... adapters){ if(adapters == null || adapters.length < 2) throw new IllegalArgumentException("Welding requires at least 2 adapters"); this.adapters = Arrays.asList(adapters); } @Override public <T extends S> CreateOperation<T> create(Class<T> clazz) { return new CreateOperation<T>() { @Override public CreateOperation<T> from(Stream<T> data) { adapters.parallelStream() .forEach(adapter -> adapter.create(clazz).from(data)); return this; } @Override public CreateOperation<T> from(Stream<T> data, QueryOptions options) { adapters.parallelStream() .forEach(adapter -> adapter.create(clazz).from(data,options)); return this; } @Override public void close() throws IOException { throw new UnsupportedOperationException("Not supported yet."); } }; } @Override public <T extends S> ReadOperation<T> read(Class<T> clazz) { return new ReadOperation<T>() { @Override public Stream<T> all() { return adapters.parallelStream() .flatMap(adapter -> adapter.read(clazz).all()); } @Override public Stream<T> all(QueryOptions options) { return adapters.parallelStream() .flatMap(adapter -> adapter.read(clazz).all(options)); } @Override public <Q> Stream<T> where(Q whereClause) { return adapters.parallelStream() .flatMap(adapter -> adapter.read(clazz).where(whereClause)); } @Override public <Q> Stream<T> where(Q whereClause, QueryOptions options) { return adapters.parallelStream() .flatMap(adapter -> adapter.read(clazz).where(whereClause,options)); } @Override public Stream<T> fromKeys(List<?> keys) { return adapters.parallelStream() .flatMap(adapter -> adapter.read(clazz).fromKeys(keys)); } @Override public Stream<T> fromKeys(List<?> keys, QueryOptions options) { return adapters.parallelStream() .flatMap(adapter -> adapter.read(clazz).fromKeys(keys,options)); } @Override public void close() throws IOException { throw new UnsupportedOperationException("Not supported yet."); } }; } @Override public <T extends S> UpdateOperation<T> update(Class<T> clazz) { return new UpdateOperation<T>() { @Override public UpdateOperation<T> from(Stream<T> data) { adapters.parallelStream() .forEach(adapter -> adapter.update(clazz).from(data)); return this; } @Override public UpdateOperation<T> from(Stream<T> data, QueryOptions options) { adapters.parallelStream() .forEach(adapter -> adapter.update(clazz).from(data, options)); return this; } @Override public void close() throws IOException { throw new UnsupportedOperationException("Not supported yet."); } }; } @Override public <T extends S> DeleteOperation<T> delete(Class<T> clazz) { return new DeleteOperation<T>() { @Override public <Q> DeleteOperation<T> where(Q whereClaus) { adapters.parallelStream() .forEach(adapter -> adapter.delete(clazz).where(whereClaus)); return this; } @Override public <Q> DeleteOperation<T> where(Q whereClaus, QueryOptions options) { adapters.parallelStream() .forEach(adapter -> adapter.delete(clazz).where(whereClaus)); return this; } @Override public DeleteOperation<T> from(Stream<T> data) { adapters.parallelStream() .forEach(adapter -> adapter.delete(clazz).from(data)); return this; } @Override public DeleteOperation<T> from(Stream<T> data, QueryOptions options) { adapters.parallelStream() .forEach(adapter -> adapter.delete(clazz).from(data,options)); return this; } @Override public void close() throws IOException { throw new UnsupportedOperationException("Not supported yet."); } @Override public DeleteOperation<T> all() { adapters.parallelStream() .forEach(adapter -> adapter.delete(clazz).all()); return this; } }; } }
/*************************GO-LICENSE-START********************************* * Copyright 2014 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *************************GO-LICENSE-END***********************************/ package com.thoughtworks.go.presentation.pipelinehistory; import java.util.Date; import com.thoughtworks.go.config.CaseInsensitiveString; import com.thoughtworks.go.config.MingleConfig; import com.thoughtworks.go.config.TrackingTool; import com.thoughtworks.go.config.materials.MaterialConfigs; import com.thoughtworks.go.config.materials.dependency.DependencyMaterialConfig; import com.thoughtworks.go.domain.MaterialRevision; import com.thoughtworks.go.domain.MaterialRevisions; import com.thoughtworks.go.domain.NullMaterialRevision; import com.thoughtworks.go.domain.PipelineIdentifier; import com.thoughtworks.go.domain.PipelineInfo; import com.thoughtworks.go.domain.PipelineTimelineEntry; import com.thoughtworks.go.domain.StageIdentifier; import com.thoughtworks.go.domain.buildcause.BuildCause; import com.thoughtworks.go.domain.materials.MaterialConfig; import com.thoughtworks.go.domain.materials.Revision; import com.thoughtworks.go.util.TimeConverter; public class PipelineInstanceModel implements PipelineInfo { private long id; private String name; private String label; private BuildCause buildCause; private StageInstanceModels stageHistory; protected boolean canRun; private Integer counter; private MaterialRevisions latest = new MaterialRevisions(); private MaterialConfigs materialConfigs = new MaterialConfigs(); protected boolean isPreparingToSchedule; private PipelineTimelineEntry pipelineAfter; private PipelineTimelineEntry pipelineBefore; private boolean canUnlock; private boolean lockable; private double naturalOrder; private int previousPipelineCounter; private String previousPipelineLabel; private boolean isCurrentlyLocked; private TrackingTool trackingTool; private MingleConfig mingleConfig; private PipelineInstanceModel() { stageHistory = new StageInstanceModels(); } public PipelineInstanceModel(String name, Integer counter, String label, BuildCause buildCause, StageInstanceModels stageHistory) { this.name = name; this.counter = counter; this.label = label; this.buildCause = buildCause; this.stageHistory = stageHistory; } public boolean hasHistoricalData() { return true; } public BuildCause getBuildCause() { return buildCause; } public void setBuildCause(BuildCause buildCause) { this.buildCause = buildCause; } public long getId() { return id; } public void setId(long id) { this.id = id; } public String getName() { return name; } public String getUri() { return "/" + name + "/" + label; } public String getBuildCauseMessage() { return buildCause == null ? "Unknown" : buildCause.getBuildCauseMessage(); } public boolean hasStageBeenRun(String stageName) { return stageHistory.hasStage(stageName); } public String nextStageName(String stageName) { return stageHistory.nextStageName(stageName); } public void setName(String name) { this.name = name; } public StageInstanceModels getStageHistory() { return stageHistory; } public String getBuildCauseBy() { return buildCause.getBuildCauseMessage(); } public void setStageHistory(StageInstanceModels stageHistory) { this.stageHistory = stageHistory; } public Date getScheduledDate() { return stageHistory == null ? null : stageHistory.getScheduledDate(); } public TimeConverter.ConvertedTime getCreatedTimeForDisplay() { return TimeConverter.convertHandleNull(getScheduledDate()); } public String getLabel() { return label; } public void setLabel(String label) { this.label = label; } public boolean isAnyStageActive() { for (StageInstanceModel stage : stageHistory) { if (stage.getState().isActive()) { return true; } } return false; } public boolean hasPreviousStageBeenScheduled(String stageName) { StageInstanceModel stage = stageHistory.byName(stageName); int index = stageHistory.indexOf(stage); if (index == 0) { return true; } if (index > 0) { return stageHistory.get(index - 1).isScheduled(); } return false; } public void selectStage(String selectedStageName) { StageInstanceModel stage = stageHistory.byName(selectedStageName); if (stage != null && stage.isScheduled()) { stage.setSelected(true); } } public boolean isScheduled() { for (StageInstanceModel stageInstanceModel : stageHistory) { if (stageInstanceModel.isScheduled()) { return true; } } return false; } public void setCanRun(boolean canRun) { this.canRun = canRun; } public boolean getCanRun() { return canRun; } public String getRevisionOfLatestModification() { return abbreviate(buildCause.getMaterialRevisions().latestRevision()); } private String abbreviate(String originalRevision) { return originalRevision.length() < 12 ? originalRevision : originalRevision.substring(0, 12) + "..."; } public Integer getCounter() { return counter; } public void setCounter(Integer counter) { this.counter = counter; } public PipelineIdentifier getPipelineIdentifier() { return new PipelineIdentifier(name, counter, label); } public void setMaterialRevisionsOnBuildCause(MaterialRevisions materialRevisions) { buildCause.setMaterialRevisions(materialRevisions); } public MaterialRevisions getCurrentRevisions() { return buildCause.getMaterialRevisions(); } public MaterialRevisions getLatestRevisions() { return latest; } public MaterialConfigs getMaterials() { return materialConfigs; } public Revision getLatestRevision(MaterialConfig materialConfig) { return revisionFor(getLatestMaterialRevision(materialConfig)); } private Revision revisionFor(MaterialRevision materialRevision) { return materialRevision.hasModifications() ? materialRevision.getRevision() : UNKNOWN_REVISION; } public MaterialRevision getLatestMaterialRevision(MaterialConfig materialConfig) { return findMaterialRevisionOf(materialConfig, latest); } public Revision getCurrentRevision(MaterialConfig materialConfig) { return revisionFor(findMaterialRevisionOf(materialConfig, buildCause.getMaterialRevisions())); } private MaterialRevision findMaterialRevisionOf(MaterialConfig materialConfig, MaterialRevisions materialRevisions) { for (MaterialRevision materialRevision : materialRevisions) { if (materialRevision.getMaterial().hasSameFingerprint(materialConfig)) { return materialRevision; } } return new NullMaterialRevision(); } public MaterialRevision findCurrentMaterialRevisionForUI(MaterialConfig materialConfig) { MaterialRevision materialRevision = findCurrentMaterialRevisionUsingPipelineUniqueFingerprint(materialConfig); if (materialRevision == null) { materialRevision = findCurrentMaterialRevisionUsingFingerprint(materialConfig); } return materialRevision; } private MaterialRevision findCurrentMaterialRevisionUsingFingerprint(MaterialConfig materialConfig) { for (MaterialRevision materialRevision : buildCause.getMaterialRevisions()) { if (materialRevision.getMaterial().hasSameFingerprint(materialConfig)) { return materialRevision; } } return null; } private MaterialRevision findCurrentMaterialRevisionUsingPipelineUniqueFingerprint(MaterialConfig materialConfig) { for (MaterialRevision materialRevision : buildCause.getMaterialRevisions()) { if (materialRevision.getMaterial().getPipelineUniqueFingerprint().equals(materialConfig.getPipelineUniqueFingerprint())) { return materialRevision; } } return null; } public void setLatestRevisions(MaterialRevisions latest) { this.latest = latest; } public void setMaterialConfigs(MaterialConfigs materialConfigs) { this.materialConfigs = materialConfigs; } public boolean hasModificationsFor(MaterialConfig materialConfig) { return getLatestRevision(materialConfig).isRealRevision(); } public Revision getCurrentRevision(String requestedMaterialName) { for (MaterialRevision materialRevision : getCurrentRevisions()) { String materialName = CaseInsensitiveString.str(materialRevision.getMaterial().getName()); if(materialName != null && materialName.equals(requestedMaterialName)) { return materialRevision.getRevision(); } } throw new RuntimeException("material not known for pipeline " + getName()); } public String getApprovedBy() { return getStageHistory().first().getApprovedBy(); } public Boolean isLatestStageUnsuccessful() { return stageHistory.isLatestStageUnsuccessful(); } public Boolean isLatestStageSuccessful() { return stageHistory.isLatestStageSuccessful(); } public StageInstanceModel latestStage() { return stageHistory.latestStage(); } public int indexOf(StageInstanceModel stageInstanceModel) { return stageHistory.indexOf(stageInstanceModel); } public int numberOfStages() { return stageHistory.size(); } public Boolean isRunning() { for (StageInstanceModel model : stageHistory) { if (model instanceof NullStageHistoryItem || model.isRunning()) { return true; } } return false; } public boolean hasNeverCheckedForRevisions() { return latest.isEmpty(); } public boolean hasNewRevisions() { for (MaterialConfig materialConfig : materialConfigs) { if (hasNewRevisions(materialConfig)) { return true; } } return false; } public boolean hasNewRevisions(MaterialConfig materialConfig) { Revision currentRevision = getCurrentRevision(materialConfig); Revision revision = getLatestRevision(materialConfig); return !currentRevision.equals(revision); } public boolean isPreparingToSchedule() { return isPreparingToSchedule; } public String getPipelineStatusMessage() { StageInstanceModel latestStage = stageHistory.latestStage(); return String.format("%s: %s", latestStage.getState(), latestStage.getName()); } /** * @depracated use the other construction methods */ public static PipelineInstanceModel createEmptyModel() { return new PipelineInstanceModel(); } public static PipelineInstanceModel createPreparingToSchedule(String name, StageInstanceModels stages) { return new PreparingToScheduleInstance(name, stages); } public static PipelineInstanceModel createPipeline(String name, Integer counter, String label, BuildCause buildCause, StageInstanceModels stageHistory) { return new PipelineInstanceModel(name, counter, label, buildCause, stageHistory); } public static EmptyPipelineInstanceModel createEmptyPipelineInstanceModel(String pipelineName, BuildCause withEmptyModifications, StageInstanceModels stageHistory) { return new EmptyPipelineInstanceModel(pipelineName, withEmptyModifications, stageHistory); } public boolean isLatestStage(StageInstanceModel stage) { return stageHistory.isLatestStage(stage); } public PipelineTimelineEntry getPipelineBefore() { return pipelineBefore; } public PipelineTimelineEntry getPipelineAfter() { return pipelineAfter; } public void setPipelineAfter(PipelineTimelineEntry pipelineAfter) { this.pipelineAfter = pipelineAfter; } public void setPipelineBefore(PipelineTimelineEntry pipelineBefore) { this.pipelineBefore = pipelineBefore; } public boolean canUnlock() { return canUnlock; } public void setCanUnlock(boolean canUnlock) { this.canUnlock = canUnlock; } public boolean isLockable() { return lockable; } public void setIsLockable(boolean isLockable) { this.lockable = isLockable; } public StageInstanceModel activeStage() { for (StageInstanceModel stageInstanceModel : stageHistory) { if (stageInstanceModel.getState().isActive()) return stageInstanceModel; } return null; } public void setNaturalOrder(double naturalOrder) { this.naturalOrder = naturalOrder; } public double getNaturalOrder() { return naturalOrder; } public static final Revision UNKNOWN_REVISION = new Revision() { public static final String NO_HISTORICAL_DATA = "No historical data"; public String getRevision() { return NO_HISTORICAL_DATA; } public String getRevisionUrl() { return NO_HISTORICAL_DATA; } public boolean isRealRevision() { return false; } }; public StageInstanceModel stage(String stageName) { return getStageHistory().byName(stageName); } public String getPreviousLabel() { return previousPipelineLabel; } public int getPreviousCounter() { return previousPipelineCounter; } public void setPreviousPipelineCounter(int counter) { this.previousPipelineCounter = counter; } public void setPreviousPipelineLabel(String label) { this.previousPipelineLabel = label; } public boolean hasStage(StageIdentifier identifier) { for (StageInstanceModel instanceModel : stageHistory) { if(identifier.equals(instanceModel.getIdentifier())) return true; } return false; } public String getApprovedByForDisplay() { return "Triggered by " + getApprovedBy(); } public boolean isCurrentlyLocked() { return isCurrentlyLocked; } public void setCurrentlyLocked(boolean isCurrentlyLocked) { this.isCurrentlyLocked = isCurrentlyLocked; } public boolean isBisect() { double naturalOrder = this.naturalOrder; return isBisect(naturalOrder); } public static boolean isBisect(double naturalOrder) { return naturalOrder - new Double(naturalOrder).intValue() > 0;//TODO: may be we should be using long, as int can lead to truncation } public TrackingTool getTrackingTool() { return trackingTool; } public void setTrackingTool(TrackingTool trackingTool) { this.trackingTool = trackingTool; } public MingleConfig getMingleConfig() { return mingleConfig; } public void setMingleConfig(MingleConfig mingleConfig) { this.mingleConfig = mingleConfig; } public DependencyMaterialConfig findDependencyMaterial(CaseInsensitiveString pipelineName) { return getMaterials().findDependencyMaterial(pipelineName); } }
/* * The MIT License * * Copyright 2017 mkanis. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.fit.vutbr.relaxdms.rest.impl; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; import javax.ejb.Stateless; import javax.inject.Inject; import javax.ws.rs.core.Context; import javax.ws.rs.core.Response; import javax.ws.rs.core.SecurityContext; import org.fit.vutbr.relaxdms.api.security.AuthController; import org.fit.vutbr.relaxdms.api.service.DocumentService; import org.fit.vutbr.relaxdms.api.service.WorkflowService; import org.fit.vutbr.relaxdms.api.system.Convert; import org.fit.vutbr.relaxdms.data.db.dao.model.Document; import org.fit.vutbr.relaxdms.data.db.dao.model.DocumentMetadata; import org.fit.vutbr.relaxdms.data.db.dao.model.workflow.Workflow; import org.fit.vutbr.relaxdms.rest.api.DocumentRest; /** * * @author Martin Kanis */ @Stateless public class DocumentRestImpl implements DocumentRest { @Inject private DocumentService documentService; @Inject private WorkflowService workflowService; @Inject private AuthController authController; @Context private SecurityContext sc; @Inject private Convert convert; @Override public List<String> getAllDocIds() { return documentService.getAllDocIds(); } @Override public String getCurrentRevision(String id) { // create valid JSON string by adding "" return "\"" + documentService.getCurrentRevision(id) + "\""; }; @Override public List<String> getRevisions(String id) { return convert.revisionToString(documentService.getRevisions(id)); } @Override public Response read(String id) { if (workflowService.isUserAuthorized(id, authController.getUserName(sc))) { JsonNode doc = documentService.getDocumentById(id); return Response.ok(doc).build(); } else { return Response.status(403).entity("Can't read document due missing permissions!").build(); } } @Override // secured by security-constraint in web.xml in relax-dms-web module public Response create(String json) { String author = authController.getUserName(sc); List<JsonNode> templates = documentService.getAllTemplates(); try { JsonNode jsonNode = new ObjectMapper().readValue(json, JsonNode.class); byte[] data = documentService.getDataFromJson(jsonNode); byte[] attachments = documentService.getAttachmentsFromJson(jsonNode); DocumentMetadata metadata = new DocumentMetadata(); metadata.setAuthor(author); metadata.setLastModifiedBy(author); List<JsonNode> matchedTemplates = templates.stream().filter((template) -> (documentService.validateJsonDataWithSchema(jsonNode, template))).collect(Collectors.toList()); if (matchedTemplates.isEmpty()) { return Response.status(400).entity("Provided JSON doesn't match any available template!").build(); } JsonNode schema = matchedTemplates.get(0); String id = schema.get("_id").asText(); String rev = schema.get("_rev").asText(); metadata.setSchemaId(id); metadata.setSchemaRev(rev); Document docData = new Document(data, attachments, metadata, new Workflow()); documentService.storeDocument(jsonNode, docData); } catch (IOException ex) { return Response.status(400).entity("Jackson error: Could not serialize provided object!").build(); } return Response.status(201).entity("Created").build(); } @Override public Response delete(String json) { try { JsonNode jsonNode = new ObjectMapper().readValue(json, JsonNode.class); String docId = jsonNode.get("_id").asText(); // doc is not read-only if (!workflowService.isReadOnly(docId)) { // user is authorized to delete document if (authController.isUserAuthorized(sc, "writer") && workflowService.isUserAuthorized(docId, authController.getUserName(sc))) { documentService.deleteDocument(jsonNode); return Response.ok().build(); } else { return Response.status(403).entity("Forbidden: Can't delete document due missing permissions!").build(); } } } catch (IOException ex) { return Response.status(500).entity("Jackson error: Could not serialize provided object!").build(); } catch (Exception ex) { return Response.status(404).entity(ex.getMessage()).build(); } return Response.status(403).entity("Forbidden: Can't delete document due its state!").build(); } @Override public Response update(String json) { try { JsonNode jsonNode = new ObjectMapper().readValue(json, JsonNode.class); byte[] data = documentService.getDataFromJson(jsonNode); byte[] attachments = documentService.getAttachmentsFromJson(jsonNode); DocumentMetadata metadata = documentService.getMetadataFromJson(jsonNode); Workflow workflow = workflowService.getWorkflowFromJson(jsonNode); String docId = jsonNode.get("_id").asText(); byte[] currentData = documentService.getDataFromDoc(docId); // user is authorized to edit document if (authController.isUserAuthorized(sc, "writer") && workflowService.isUserAuthorized(docId, authController.getUserName(sc))) { // if the document is read-only and we want to change data of document, return error if (workflowService.isReadOnly(docId)) { if (!Arrays.equals(currentData, data)) { return Response.status(403).entity("Forbidden: Can't update document due its state!").build(); } } JsonNode diff = documentService.updateDocument(new Document(data, attachments, metadata, workflow)); // no diff, update was succesfull if (diff.isNull()) { return Response.status(200).entity("Updated").build(); } else { return Response.status(409).entity("Conflict: Document has changed!").build(); } } else { return Response.status(403).entity("Forbidden: Can't delete document due missing permissions!").build(); } } catch (IOException ex) { return Response.status(500).entity("Jackson error: Could not serialize provided object!").build(); } } @Override public Response getAllTemplates() { if (authController.isUserAuthorized(sc, "writer")) { return Response.ok(documentService.getAllTemplates()).build(); } return Response.status(403).entity("Forbidden: Can't see templates due missing permissions!").build(); } @Override public Response getDocsHeadersByAuthor() { String author = authController.getUserName(sc); return Response.ok(documentService.getDocumentsByAuthor(author)).build(); } @Override public Response getDocsHeadersByAssignee() { String assignee = authController.getUserName(sc); return Response.ok(documentService.getDocumentsByAssignee(assignee)).build(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.tx; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.ArgumentMatchers.argThat; import static org.mockito.Mockito.any; import static org.mockito.Mockito.anyInt; import static org.mockito.Mockito.anyLong; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.util.concurrent.ExecutorService; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.apache.geode.cache.CacheClosedException; import org.apache.geode.cache.RegionDestroyedException; import org.apache.geode.cache.TransactionException; import org.apache.geode.distributed.DistributedSystemDisconnectedException; import org.apache.geode.distributed.internal.ClusterDistributionManager; import org.apache.geode.distributed.internal.InternalDistributedSystem; import org.apache.geode.distributed.internal.OperationExecutors; import org.apache.geode.distributed.internal.ReplyException; import org.apache.geode.distributed.internal.ReplyProcessor21; import org.apache.geode.distributed.internal.membership.InternalDistributedMember; import org.apache.geode.internal.cache.GemFireCacheImpl; import org.apache.geode.internal.cache.LocalRegion; import org.apache.geode.internal.cache.RemoteOperationException; import org.apache.geode.internal.cache.TXManagerImpl; import org.apache.geode.internal.cache.TXStateProxy; import org.apache.geode.internal.cache.TXStateProxyImpl; import org.apache.geode.test.fake.Fakes; public class RemoteOperationMessageTest { private TestableRemoteOperationMessage msg; // the class under test private InternalDistributedMember sender; private final String regionPath = "regionPath"; private GemFireCacheImpl cache; private InternalDistributedSystem system; private ClusterDistributionManager dm; private LocalRegion r; private TXManagerImpl txMgr; private final long startTime = 0; private TXStateProxy tx; @Before public void setUp() throws Exception { cache = Fakes.cache(); system = cache.getSystem(); dm = (ClusterDistributionManager) system.getDistributionManager(); r = mock(LocalRegion.class); txMgr = mock(TXManagerImpl.class); tx = mock(TXStateProxyImpl.class); OperationExecutors executors = mock(OperationExecutors.class); ExecutorService executorService = mock(ExecutorService.class); when(cache.getRegionByPathForProcessing(regionPath)).thenReturn(r); when(cache.getTxManager()).thenReturn(txMgr); when(dm.getExecutors()).thenReturn(executors); when(executors.getWaitingThreadPool()).thenReturn(executorService); sender = mock(InternalDistributedMember.class); InternalDistributedMember recipient = mock(InternalDistributedMember.class); ReplyProcessor21 processor = mock(ReplyProcessor21.class); // make it a spy to aid verification msg = spy(new TestableRemoteOperationMessage(recipient, regionPath, processor)); } @Test public void messageWithNoTXPerformsOnRegion() throws Exception { when(txMgr.masqueradeAs(msg)).thenReturn(null); msg.setSender(sender); msg.doRemoteOperation(dm, cache); verify(msg, times(1)).operateOnRegion(dm, r, startTime); verify(dm, times(1)).putOutgoing(any()); } @Test public void messageForNotFinishedTXPerformsOnRegion() throws Exception { when(txMgr.masqueradeAs(msg)).thenReturn(tx); when(tx.isInProgress()).thenReturn(true); msg.setSender(sender); msg.doRemoteOperation(dm, cache); verify(msg, times(1)).operateOnRegion(dm, r, startTime); verify(dm, times(1)).putOutgoing(any()); } @Test public void messageForFinishedTXDoesNotPerformOnRegion() throws Exception { when(txMgr.masqueradeAs(msg)).thenReturn(tx); when(tx.isInProgress()).thenReturn(false); msg.setSender(sender); msg.doRemoteOperation(dm, cache); verify(msg, times(0)).operateOnRegion(dm, r, startTime); // A reply is sent even though we do not call operationOnRegion verify(dm, times(1)).putOutgoing(any()); } @Test public void messageForFinishedTXRepliesWithException() throws Exception { when(txMgr.masqueradeAs(msg)).thenReturn(tx); when(tx.isInProgress()).thenReturn(false); msg.setSender(sender); msg.doRemoteOperation(dm, cache); verify(msg, times(1)).sendReply( eq(sender), eq(0), eq(dm), argThat(ex -> ex != null && ex.getCause() instanceof TransactionException), eq(r), eq(startTime)); } @Test public void noNewTxProcessingAfterTXManagerImplClosed() throws Exception { when(txMgr.masqueradeAs(msg)).thenReturn(tx); when(txMgr.isClosed()).thenReturn(true); msg.setSender(sender); msg.doRemoteOperation(dm, cache); verify(msg, times(0)).operateOnRegion(dm, r, startTime); // If we do not respond what prevents the sender from waiting forever? verify(dm, times(0)).putOutgoing(any()); } @Test public void processWithNullCacheSendsReplyContainingCacheClosedException() throws Exception { when(dm.getExistingCache()).thenReturn(null); msg.setSender(sender); msg.process(dm); verify(msg, times(0)).operateOnRegion(dm, r, startTime); verify(dm, times(1)).putOutgoing(any()); ArgumentCaptor<ReplyException> captor = ArgumentCaptor.forClass(ReplyException.class); verify(msg, times(1)).sendReply(any(), anyInt(), eq(dm), captor.capture(), any(), eq(startTime)); assertThat(captor.getValue().getCause()).isInstanceOf(CacheClosedException.class); } @Test public void processWithDisconnectingDSAndClosedCacheSendsReplyContainingCachesClosedException() throws Exception { CacheClosedException reasonCacheWasClosed = mock(CacheClosedException.class); when(system.isDisconnecting()).thenReturn(true); when(cache.getCacheClosedException(any())).thenReturn(reasonCacheWasClosed); msg.setSender(sender); msg.doRemoteOperation(dm, cache); verify(msg, times(0)).operateOnRegion(dm, r, startTime); verify(dm, times(1)).putOutgoing(any()); ArgumentCaptor<ReplyException> captor = ArgumentCaptor.forClass(ReplyException.class); verify(msg, times(1)).sendReply(any(), anyInt(), eq(dm), captor.capture(), any(), eq(startTime)); assertThat(captor.getValue().getCause()).isSameAs(reasonCacheWasClosed); } @Test public void processWithNullPointerExceptionFromOperationOnRegionWithNoSystemFailureSendsReplyWithNPE() throws Exception { when(msg.operateOnRegion(dm, r, startTime)).thenThrow(NullPointerException.class); doNothing().when(msg).checkForSystemFailure(); msg.setSender(sender); msg.doRemoteOperation(dm, cache); verify(dm, times(1)).putOutgoing(any()); ArgumentCaptor<ReplyException> captor = ArgumentCaptor.forClass(ReplyException.class); verify(msg, times(1)).sendReply(any(), anyInt(), eq(dm), captor.capture(), eq(r), eq(startTime)); assertThat(captor.getValue().getCause()).isInstanceOf(NullPointerException.class); } @Test public void processWithNullPointerExceptionFromOperationOnRegionWithNoSystemFailureAndIsDisconnectingSendsReplyWithRemoteOperationException() throws Exception { when(msg.operateOnRegion(dm, r, startTime)).thenThrow(NullPointerException.class); doNothing().when(msg).checkForSystemFailure(); when(system.isDisconnecting()).thenReturn(false).thenReturn(true); msg.setSender(sender); msg.doRemoteOperation(dm, cache); verify(dm, times(1)).putOutgoing(any()); ArgumentCaptor<ReplyException> captor = ArgumentCaptor.forClass(ReplyException.class); verify(msg, times(1)).sendReply(any(), anyInt(), eq(dm), captor.capture(), eq(r), eq(startTime)); assertThat(captor.getValue().getCause()).isInstanceOf(RemoteOperationException.class); } @Test public void processWithRegionDestroyedExceptionFromOperationOnRegionSendsReplyWithSameRegionDestroyedException() throws Exception { RegionDestroyedException ex = mock(RegionDestroyedException.class); when(msg.operateOnRegion(dm, r, startTime)).thenThrow(ex); msg.setSender(sender); msg.doRemoteOperation(dm, cache); verify(dm, times(1)).putOutgoing(any()); ArgumentCaptor<ReplyException> captor = ArgumentCaptor.forClass(ReplyException.class); verify(msg, times(1)).sendReply(any(), anyInt(), eq(dm), captor.capture(), eq(r), eq(startTime)); assertThat(captor.getValue().getCause()).isSameAs(ex); } @Test public void processWithRegionDoesNotExistSendsReplyWithRegionDestroyedExceptionReply() throws Exception { when(cache.getRegionByPathForProcessing(regionPath)).thenReturn(null); msg.setSender(sender); msg.doRemoteOperation(dm, cache); verify(msg, never()).operateOnRegion(any(), any(), anyLong()); verify(dm, times(1)).putOutgoing(any()); ArgumentCaptor<ReplyException> captor = ArgumentCaptor.forClass(ReplyException.class); verify(msg, times(1)).sendReply(any(), anyInt(), eq(dm), captor.capture(), eq(null), eq(startTime)); assertThat(captor.getValue().getCause()).isInstanceOf(RegionDestroyedException.class); } @Test public void processWithDistributedSystemDisconnectedExceptionFromOperationOnRegionDoesNotSendReply() throws Exception { when(msg.operateOnRegion(dm, r, startTime)) .thenThrow(DistributedSystemDisconnectedException.class); msg.setSender(sender); msg.doRemoteOperation(dm, cache); verify(dm, never()).putOutgoing(any()); } @Test public void processWithOperateOnRegionReturningFalseDoesNotSendReply() { msg.setOperationOnRegionResult(false); msg.setSender(sender); msg.doRemoteOperation(dm, cache); verify(dm, never()).putOutgoing(any()); } @Test public void processWithRemoteOperationExceptionFromOperationOnRegionSendsReplyWithSameRemoteOperationException() throws Exception { RemoteOperationException theException = mock(RemoteOperationException.class); when(msg.operateOnRegion(dm, r, startTime)).thenThrow(theException); msg.setSender(sender); msg.doRemoteOperation(dm, cache); verify(dm, times(1)).putOutgoing(any()); ArgumentCaptor<ReplyException> captor = ArgumentCaptor.forClass(ReplyException.class); verify(msg, times(1)).sendReply(any(), anyInt(), eq(dm), captor.capture(), eq(r), eq(startTime)); assertThat(captor.getValue().getCause()).isSameAs(theException); } @Test public void processWithNullPointerExceptionFromOperationOnRegionWithSystemFailureSendsReplyWithRemoteOperationException() throws Exception { when(msg.operateOnRegion(dm, r, startTime)).thenThrow(NullPointerException.class); doThrow(new RuntimeException("SystemFailure")).when(msg).checkForSystemFailure(); msg.setSender(sender); assertThatThrownBy(() -> msg.doRemoteOperation(dm, cache)).isInstanceOf(RuntimeException.class) .hasMessage("SystemFailure"); verify(dm, times(1)).putOutgoing(any()); ArgumentCaptor<ReplyException> captor = ArgumentCaptor.forClass(ReplyException.class); verify(msg, times(1)).sendReply(any(), anyInt(), eq(dm), captor.capture(), eq(r), eq(startTime)); assertThat(captor.getValue().getCause()).isInstanceOf(RemoteOperationException.class) .hasMessageContaining("system failure"); } @Test public void processInvokesDoRemoteOperationIfThreadOwnsResources() { when(system.threadOwnsResources()).thenReturn(true); doNothing().when(msg).doRemoteOperation(dm, cache); msg.process(dm); verify(msg).doRemoteOperation(dm, cache); verify(msg, never()).isTransactional(); } @Test public void processInvokesDoRemoteOperationIfThreadDoesNotOwnResourcesAndNotTransactional() { when(system.threadOwnsResources()).thenReturn(false); doReturn(false).when(msg).isTransactional(); doNothing().when(msg).doRemoteOperation(dm, cache); msg.process(dm); verify(msg).doRemoteOperation(dm, cache); verify(msg).isTransactional(); } @Test public void isTransactionalReturnsFalseIfTXUniqueIdIsNOTX() { assertThat(msg.getTXUniqId()).isEqualTo(TXManagerImpl.NOTX); assertThat(msg.isTransactional()).isFalse(); } @Test public void isTransactionalReturnsFalseIfCannotParticipateInTransaction() { doReturn(1).when(msg).getTXUniqId(); doReturn(false).when(msg).canParticipateInTransaction(); assertThat(msg.isTransactional()).isFalse(); } @Test public void isTransactionalReturnsTrueIfHasTXUniqueIdAndCanParticipateInTransaction() { doReturn(1).when(msg).getTXUniqId(); assertThat(msg.canParticipateInTransaction()).isTrue(); assertThat(msg.isTransactional()).isTrue(); } private static class TestableRemoteOperationMessage extends RemoteOperationMessage { private boolean operationOnRegionResult = true; TestableRemoteOperationMessage(InternalDistributedMember recipient, String regionPath, ReplyProcessor21 processor) { super(recipient, regionPath, processor); } @Override public int getDSFID() { return 0; } @Override protected boolean operateOnRegion(ClusterDistributionManager dm, LocalRegion r, long startTime) throws RemoteOperationException { return operationOnRegionResult; } void setOperationOnRegionResult(boolean v) { operationOnRegionResult = v; } } }
package org.ovirt.engine.ui.common.widget.uicommon.popup.vm; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.List; import org.ovirt.engine.core.common.businessentities.Snapshot; import org.ovirt.engine.core.common.businessentities.storage.DiskImage; import org.ovirt.engine.core.common.businessentities.storage.ImageStatus; import org.ovirt.engine.ui.common.CommonApplicationConstants; import org.ovirt.engine.ui.common.CommonApplicationMessages; import org.ovirt.engine.ui.common.CommonApplicationResources; import org.ovirt.engine.ui.common.CommonApplicationTemplates; import org.ovirt.engine.ui.common.gin.AssetProvider; import org.ovirt.engine.ui.common.idhandler.ElementIdHandler; import org.ovirt.engine.ui.common.widget.editor.EntityModelCellTable; import org.ovirt.engine.ui.common.widget.table.cell.RadioboxCell; import org.ovirt.engine.ui.common.widget.table.column.AbstractCheckboxColumn; import org.ovirt.engine.ui.common.widget.table.column.AbstractFullDateTimeColumn; import org.ovirt.engine.ui.common.widget.table.column.AbstractTextColumn; import org.ovirt.engine.ui.common.widget.table.header.ImageResourceHeader; import org.ovirt.engine.ui.common.widget.table.header.SafeHtmlHeader; import org.ovirt.engine.ui.common.widget.uicommon.popup.AbstractModelBoundPopupWidget; import org.ovirt.engine.ui.common.widget.uicommon.vm.VmSnapshotInfoPanel; import org.ovirt.engine.ui.frontend.INewAsyncCallback; import org.ovirt.engine.ui.uicommonweb.Linq; import org.ovirt.engine.ui.uicommonweb.models.EntityModel; import org.ovirt.engine.ui.uicommonweb.models.ListModel; import org.ovirt.engine.ui.uicommonweb.models.vms.PreviewSnapshotModel; import org.ovirt.engine.ui.uicommonweb.models.vms.SnapshotModel; import org.ovirt.engine.ui.uicompat.Event; import org.ovirt.engine.ui.uicompat.EventArgs; import org.ovirt.engine.ui.uicompat.IEventListener; import com.google.gwt.cell.client.Cell.Context; import com.google.gwt.cell.client.FieldUpdater; import com.google.gwt.core.client.GWT; import com.google.gwt.dom.client.BrowserEvents; import com.google.gwt.dom.client.NativeEvent; import com.google.gwt.editor.client.SimpleBeanEditorDriver; import com.google.gwt.resources.client.ImageResource; import com.google.gwt.safehtml.shared.SafeHtml; import com.google.gwt.safehtml.shared.SafeHtmlBuilder; import com.google.gwt.safehtml.shared.SafeHtmlUtils; import com.google.gwt.uibinder.client.UiBinder; import com.google.gwt.uibinder.client.UiField; import com.google.gwt.user.cellview.client.Column; import com.google.gwt.user.client.ui.AbstractImagePrototype; import com.google.gwt.user.client.ui.FlowPanel; import com.google.gwt.user.client.ui.HTML; import com.google.gwt.user.client.ui.Label; import com.google.gwt.user.client.ui.SimplePanel; import com.google.gwt.user.client.ui.SplitLayoutPanel; import com.google.gwt.view.client.CellPreviewEvent; import com.google.gwt.view.client.NoSelectionModel; public class VmSnapshotCustomPreviewPopupWidget extends AbstractModelBoundPopupWidget<PreviewSnapshotModel> { interface Driver extends SimpleBeanEditorDriver<PreviewSnapshotModel, VmSnapshotCustomPreviewPopupWidget> { } interface ViewUiBinder extends UiBinder<SplitLayoutPanel, VmSnapshotCustomPreviewPopupWidget> { ViewUiBinder uiBinder = GWT.create(ViewUiBinder.class); } interface ViewIdHandler extends ElementIdHandler<VmSnapshotCustomPreviewPopupWidget> { ViewIdHandler idHandler = GWT.create(ViewIdHandler.class); } @UiField @Ignore Label previewTableLabel; @UiField(provided = true) @Ignore EntityModelCellTable<ListModel> previewTable; @UiField(provided = true) SplitLayoutPanel splitLayoutPanel; @UiField SimplePanel snapshotInfoContainer; @UiField FlowPanel warningPanel; private PreviewSnapshotModel previewSnapshotModel; private VmSnapshotInfoPanel vmSnapshotInfoPanel; private static final CommonApplicationTemplates templates = AssetProvider.getTemplates(); private static final CommonApplicationResources resources = AssetProvider.getResources(); private static final CommonApplicationConstants constants = AssetProvider.getConstants(); private static final CommonApplicationMessages messages = AssetProvider.getMessages(); private final Driver driver = GWT.create(Driver.class); public VmSnapshotCustomPreviewPopupWidget() { initTables(); initWidget(ViewUiBinder.uiBinder.createAndBindUi(this)); localize(); ViewIdHandler.idHandler.generateAndSetIds(this); driver.initialize(this); } private void initTables() { // Create custom preview table previewTable = new EntityModelCellTable<>(false, true); previewTable.enableColumnResizing(); // Create Snapshot information tab panel vmSnapshotInfoPanel = new VmSnapshotInfoPanel(); // Create split layout panel splitLayoutPanel = new SplitLayoutPanel(4); } private void createPreviewTable() { previewTable.addColumn(new AbstractFullDateTimeColumn<SnapshotModel>() { @Override protected Date getRawValue(SnapshotModel snapshotModel) { return snapshotModel.getEntity().getCreationDate(); } }, constants.dateSnapshot(), "140px"); //$NON-NLS-1$ previewTable.addColumn(new AbstractTextColumn<SnapshotModel>() { @Override public String getValue(SnapshotModel snapshotModel) { return snapshotModel.getEntity().getDescription(); } }, constants.descriptionSnapshot(), "100px"); //$NON-NLS-1$ previewTable.setSelectionModel(new NoSelectionModel()); Column<SnapshotModel, Boolean> vmConfColumn = new Column<SnapshotModel, Boolean>(new RadioboxCell()) { @Override public Boolean getValue(SnapshotModel model) { Snapshot snapshotVmConf = model.getEntity(); Snapshot toPreviewVmConf = previewSnapshotModel.getSnapshotModel().getEntity(); if (snapshotVmConf == null && toPreviewVmConf == null) { return true; } return snapshotVmConf != null && snapshotVmConf.equals(toPreviewVmConf); } }; vmConfColumn.setFieldUpdater(new FieldUpdater<SnapshotModel, Boolean>() { @Override public void update(int index, SnapshotModel snapshotModel, Boolean value) { previewSnapshotModel.setSnapshotModel(snapshotModel); previewSnapshotModel.clearMemorySelection(); updateWarnings(); refreshTable(previewTable); if (snapshotModel.getVm() == null) { snapshotModel.updateVmConfiguration(new INewAsyncCallback() { @Override public void onSuccess(Object model, Object returnValue) { updateInfoPanel(); } }); } else { updateInfoPanel(); } } }); previewTable.addColumn(vmConfColumn, new ImageResourceHeader(resources.vmConfIcon(), SafeHtmlUtils.fromTrustedString(constants.vmConfiguration())), "30px"); //$NON-NLS-1$ AbstractCheckboxColumn<SnapshotModel> memoryColumn = new AbstractCheckboxColumn<SnapshotModel>( new FieldUpdater<SnapshotModel, Boolean>() { @Override public void update(int index, SnapshotModel snapshotModel, Boolean value) { previewSnapshotModel.getSnapshotModel().getMemory().setEntity(value); refreshTable(previewTable); updateWarnings(); } }) { @Override public Boolean getValue(SnapshotModel snapshotModel) { return snapshotModel.getMemory().getEntity(); } @Override protected boolean canEdit(SnapshotModel snapshotModel) { boolean containsMemory = !snapshotModel.getEntity().getMemoryVolume().isEmpty(); SnapshotModel selectedSnapshotModel = previewSnapshotModel.getSnapshotModel(); return containsMemory && snapshotModel == selectedSnapshotModel; } @Override public void render(Context context, SnapshotModel snapshotModel, SafeHtmlBuilder sb) { if (!snapshotModel.getEntity().getMemoryVolume().isEmpty()) { super.render(context, snapshotModel, sb); } else { sb.appendEscaped(constants.notAvailableLabel()); } } }; previewTable.addColumn( memoryColumn, templates.iconWithText(imageResourceToSafeHtml(resources.memorySmallIcon()), constants.memorySnapshot()), "100px"); //$NON-NLS-1$ List<DiskImage> disks = previewSnapshotModel.getAllDisks(); Collections.sort(disks, new Linq.DiskByAliasComparer()); for (final DiskImage disk : disks) { previewTable.addColumn(new AbstractCheckboxColumn<SnapshotModel>(new FieldUpdater<SnapshotModel, Boolean>() { @Override public void update(int index, SnapshotModel snapshotModel, Boolean value) { ListModel diskListModel = previewSnapshotModel.getDiskSnapshotsMap().get(disk.getId()); DiskImage image = snapshotModel.getImageByDiskId(disk.getId()); diskListModel.setSelectedItem(Boolean.TRUE.equals(value) ? image : null); refreshTable(previewTable); updateWarnings(); updateInfoPanel(); } }) { @Override public Boolean getValue(SnapshotModel snapshotModel) { ListModel diskListModel = previewSnapshotModel.getDiskSnapshotsMap().get(disk.getId()); DiskImage image = snapshotModel.getImageByDiskId(disk.getId()); return image != null ? image.equals(diskListModel.getSelectedItem()) : false; } @Override protected boolean canEdit(SnapshotModel model) { return true; } @Override public void render(Context context, SnapshotModel snapshotModel, SafeHtmlBuilder sb) { DiskImage image = snapshotModel.getImageByDiskId(disk.getId()); if (image == null) { sb.appendEscaped(constants.notAvailableLabel()); } else if (image.getImageStatus() == ImageStatus.ILLEGAL) { sb.append(templates.text(constants.notAvailableLabel())); } else { super.render(context, snapshotModel, sb); } } @Override public SafeHtml getTooltip(SnapshotModel model) { if (disk != null && disk.getId() != null) { DiskImage image = model.getImageByDiskId(disk.getId()); if (image != null && image.getImageStatus() == ImageStatus.ILLEGAL) { return SafeHtmlUtils.fromSafeConstant(constants.illegalStatus()); } } return null; } }, new SafeHtmlHeader(templates.iconWithText(imageResourceToSafeHtml(resources.diskIcon()), disk.getDiskAlias()), SafeHtmlUtils.fromString(disk.getId().toString())), "120px"); //$NON-NLS-1$ // Edit preview table previewTable.asEditor().edit(previewSnapshotModel.getSnapshots()); } previewTable.addCellPreviewHandler(new CellPreviewEvent.Handler<EntityModel>() { long lastClick = -1000; @Override public void onCellPreview(CellPreviewEvent<EntityModel> event) { NativeEvent nativeEvent = event.getNativeEvent(); long clickAt = System.currentTimeMillis(); if (BrowserEvents.CLICK.equals(nativeEvent.getType())) { if (clickAt - lastClick < 300) { // double click: 2 clicks detected within 300 ms SnapshotModel selectedSnapshotModel = (SnapshotModel) event.getValue(); previewSnapshotModel.clearSelection(); previewSnapshotModel.selectSnapshot(selectedSnapshotModel.getEntity().getId()); updateWarnings(); refreshTable(previewTable); } lastClick = System.currentTimeMillis(); } } }); } private void refreshTable(EntityModelCellTable table) { table.asEditor().edit(table.asEditor().flush()); table.redraw(); } private void updateWarnings() { List<DiskImage> selectedDisks = previewSnapshotModel.getSelectedDisks(); List<DiskImage> disksOfSelectedSnapshot = previewSnapshotModel.getSnapshotModel().getEntity().getDiskImages(); List<DiskImage> disksOfActiveSnapshot; if (previewSnapshotModel.getActiveSnapshotModel() != null) { disksOfActiveSnapshot = previewSnapshotModel.getActiveSnapshotModel().getEntity().getDiskImages(); } else { disksOfActiveSnapshot = Collections.emptyList(); } boolean includeAllDisksOfSnapshot = selectedDisks.containsAll(disksOfSelectedSnapshot); boolean includeMemory = previewSnapshotModel.getSnapshotModel().getMemory().getEntity(); SafeHtml warningImage = SafeHtmlUtils.fromTrustedString(AbstractImagePrototype.create( resources.logWarningImage()).getHTML()); HTML partialSnapshotWarningWidget = new HTML(templates.iconWithText( warningImage, constants.snapshotPreviewWithExcludedDisksWarning())); HTML memoryWarningWidget = new HTML(templates.iconWithText( warningImage, constants.snapshotPreviewWithMemoryAndPartialDisksWarning())); warningPanel.clear(); // Show warning in case of previewing a memory snapshot and excluding disks of the selected snapshot. if (!includeAllDisksOfSnapshot && includeMemory) { warningPanel.add(memoryWarningWidget); } // Show warning when excluding disks. if (isDisksExcluded(disksOfActiveSnapshot, selectedDisks)) { warningPanel.add(partialSnapshotWarningWidget); } } // Search disks by ID (i.e. for each image, determines whether any image from the image-group is selected) private boolean isDisksExcluded(List<DiskImage> disks, List<DiskImage> selectedDisks) { for (DiskImage disk : disks) { if (!containsDisk(disk, selectedDisks)) { return true; } } return false; } // Check whether the specified disk list contains a disk by its ID (image-group) private boolean containsDisk(DiskImage snapshotDisk, List<DiskImage> disks) { for (DiskImage disk : disks) { if (disk.getId().equals(snapshotDisk.getId())) { return true; } } return false; } private void updateInfoPanel() { ArrayList<DiskImage> selectedImages = (ArrayList<DiskImage>) previewSnapshotModel.getSelectedDisks(); Collections.sort(selectedImages, new Linq.DiskByAliasComparer()); SnapshotModel snapshotModel = previewSnapshotModel.getSnapshotModel(); snapshotModel.setDisks(selectedImages); vmSnapshotInfoPanel.updateTabsData(snapshotModel); } void localize() { previewTableLabel.setText(constants.customPreviewSnapshotTableTitle()); } @Override public void edit(PreviewSnapshotModel model) { driver.edit(model); previewSnapshotModel = model; snapshotInfoContainer.add(vmSnapshotInfoPanel); previewTable.asEditor().edit(previewSnapshotModel.getSnapshots()); // Add selection listener model.getSnapshots().getSelectedItemChangedEvent().addListener(new IEventListener<EventArgs>() { @Override public void eventRaised(Event<? extends EventArgs> ev, Object sender, EventArgs args) { ListModel snapshots = (ListModel) sender; SnapshotModel snapshotModel = (SnapshotModel) snapshots.getSelectedItem(); if (snapshotModel != null) { vmSnapshotInfoPanel.updatePanel(snapshotModel); } } }); model.getSnapshots().getItemsChangedEvent().addListener(new IEventListener<EventArgs>() { @Override public void eventRaised(Event<? extends EventArgs> ev, Object sender, EventArgs args) { createPreviewTable(); } }); } @Override public PreviewSnapshotModel flush() { previewTable.flush(); return driver.flush(); } private SafeHtml imageResourceToSafeHtml(ImageResource resource) { return SafeHtmlUtils.fromTrustedString(AbstractImagePrototype.create(resource).getHTML()); } }
package cgeo.geocaching.connector.oc; import static android.util.Base64.DEFAULT; import cgeo.geocaching.connector.ConnectorFactory; import cgeo.geocaching.connector.IConnector; import cgeo.geocaching.connector.ImageResult; import cgeo.geocaching.connector.LogResult; import cgeo.geocaching.connector.gc.GCConnector; import cgeo.geocaching.connector.oc.OCApiConnector.ApiSupport; import cgeo.geocaching.connector.oc.OCApiConnector.OAuthLevel; import cgeo.geocaching.connector.oc.UserInfo.UserInfoStatus; import cgeo.geocaching.connector.trackable.TrackableBrand; import cgeo.geocaching.enumerations.CacheAttribute; import cgeo.geocaching.enumerations.CacheSize; import cgeo.geocaching.enumerations.CacheType; import cgeo.geocaching.enumerations.LoadFlags.SaveFlag; import cgeo.geocaching.enumerations.StatusCode; import cgeo.geocaching.enumerations.WaypointType; import cgeo.geocaching.location.Geopoint; import cgeo.geocaching.location.GeopointFormatter; import cgeo.geocaching.location.Viewport; import cgeo.geocaching.log.LogEntry; import cgeo.geocaching.log.LogType; import cgeo.geocaching.models.Geocache; import cgeo.geocaching.models.Image; import cgeo.geocaching.models.Trackable; import cgeo.geocaching.models.Waypoint; import cgeo.geocaching.network.Network; import cgeo.geocaching.network.OAuth; import cgeo.geocaching.network.OAuthTokens; import cgeo.geocaching.network.Parameters; import cgeo.geocaching.settings.Settings; import cgeo.geocaching.storage.DataStore; import cgeo.geocaching.utils.JsonUtils; import cgeo.geocaching.utils.Log; import cgeo.geocaching.utils.SynchronizedDateFormat; import android.net.Uri; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.util.Base64; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.text.ParseException; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Collections; import java.util.Date; import java.util.EnumSet; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.TimeZone; import java.util.regex.Pattern; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import okhttp3.Response; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; /** * Client for the OpenCaching API (Okapi). * * @see <a href="http://www.opencaching.de/okapi/introduction.html">Okapi overview</a> * */ final class OkapiClient { private static final String PARAMETER_LOGCOUNT_KEY = "lpc"; private static final String PARAMETER_LOGCOUNT_VALUE = "all"; private static final String PARAMETER_LOG_FIELDS_KEY = "log_fields"; private static final String PARAMETER_LOG_FIELDS_VALUE = "uuid|date|user|type|comment|images"; private static final char SEPARATOR = '|'; private static final String SEPARATOR_STRING = Character.toString(SEPARATOR); private static final SynchronizedDateFormat LOG_DATE_FORMAT = new SynchronizedDateFormat("yyyy-MM-dd HH:mm:ss.SSSZ", TimeZone.getTimeZone("UTC"), Locale.US); private static final SynchronizedDateFormat ISO8601DATEFORMAT = new SynchronizedDateFormat("yyyy-MM-dd'T'HH:mm:ssZ", Locale.getDefault()); private static final String CACHE_ATTRNAMES = "attrnames"; private static final String CACHE_ATTR_ACODES = "attr_acodes"; private static final String WPT_LOCATION = "location"; private static final String WPT_DESCRIPTION = "description"; private static final String WPT_TYPE = "type"; private static final String WPT_NAME = "name"; private static final String CACHE_IS_WATCHED = "is_watched"; private static final String CACHE_WPTS = "alt_wpts"; private static final String CACHE_STATUS_ARCHIVED = "Archived"; private static final String CACHE_STATUS_DISABLED = "Temporarily unavailable"; private static final String CACHE_IS_FOUND = "is_found"; private static final String CACHE_SIZE_DEPRECATED = "size"; private static final String CACHE_SIZE2 = "size2"; private static final String CACHE_VOTES = "rating_votes"; private static final String CACHE_NOTFOUNDS = "notfounds"; private static final String CACHE_FOUNDS = "founds"; private static final String CACHE_WILLATTENDS = "willattends"; private static final String CACHE_HIDDEN = "date_hidden"; private static final String CACHE_LATEST_LOGS = "latest_logs"; private static final String CACHE_IMAGE_URL = "url"; private static final String CACHE_IMAGE_CAPTION = "caption"; private static final String CACHE_IMAGES = "images"; private static final String CACHE_HINT = "hint"; private static final String CACHE_DESCRIPTION = "description"; private static final String CACHE_SHORT_DESCRIPTION = "short_description"; private static final String CACHE_RECOMMENDATIONS = "recommendations"; private static final String CACHE_RATING = "rating"; private static final String CACHE_TERRAIN = "terrain"; private static final String CACHE_DIFFICULTY = "difficulty"; private static final String CACHE_OWNER = "owner"; private static final String CACHE_STATUS = "status"; private static final String CACHE_TYPE = "type"; private static final String CACHE_LOCATION = "location"; private static final String CACHE_NAME = "name"; private static final String CACHE_CODE = "code"; private static final String CACHE_REQ_PASSWORD = "req_passwd"; private static final String CACHE_MY_NOTES = "my_notes"; private static final String CACHE_TRACKABLES_COUNT = "trackables_count"; private static final String CACHE_TRACKABLES = "trackables"; private static final String TRK_GEOCODE = "code"; private static final String TRK_NAME = "name"; private static final String LOG_TYPE = "type"; private static final String LOG_COMMENT = "comment"; private static final String LOG_DATE = "date"; private static final String LOG_USER = "user"; private static final String LOG_IMAGES = "images"; private static final String USER_UUID = "uuid"; private static final String USER_USERNAME = "username"; private static final String USER_CACHES_FOUND = "caches_found"; private static final String USER_INFO_FIELDS = "username|caches_found"; private static final String IMAGE_CAPTION = "caption"; private static final String IMAGE_URL = "url"; // the several realms of possible fields for cache retrieval: // Core: for livemap requests (L3 - only with level 3 auth) // Additional: additional fields for full cache (L3 - only for level 3 auth, current - only for connectors with current api) private static final String SERVICE_CACHE_CORE_FIELDS = "code|name|location|type|status|difficulty|terrain|size|size2|date_hidden|trackables_count"; private static final String SERVICE_CACHE_CORE_L3_FIELDS = "is_found"; private static final String SERVICE_CACHE_ADDITIONAL_FIELDS = "owner|founds|notfounds|rating|rating_votes|recommendations|description|hint|images|latest_logs|alt_wpts|attrnames|req_passwd|trackables"; private static final String SERVICE_CACHE_ADDITIONAL_CURRENT_FIELDS = "gc_code|attribution_note|attr_acodes|willattends|short_description"; private static final String SERVICE_CACHE_ADDITIONAL_L3_FIELDS = "my_notes"; private static final String SERVICE_CACHE_ADDITIONAL_CURRENT_L3_FIELDS = "is_watched"; private static final String METHOD_SEARCH_ALL = "services/caches/search/all"; private static final String METHOD_SEARCH_BBOX = "services/caches/search/bbox"; private static final String METHOD_SEARCH_NEAREST = "services/caches/search/nearest"; private static final String METHOD_RETRIEVE_CACHES = "services/caches/geocaches"; private static final Pattern PATTERN_TIMEZONE = Pattern.compile("([+-][01][0-9]):([03])0"); private static final ObjectMapper MAPPER = new ObjectMapper(); private OkapiClient() { // utility class } @Nullable public static Geocache getCache(final String geoCode) { final IConnector connector = ConnectorFactory.getConnector(geoCode); if (!(connector instanceof OCApiConnector)) { return null; } final OCApiConnector ocapiConn = (OCApiConnector) connector; final Parameters params = new Parameters("cache_code", geoCode); params.add("fields", getFullFields(ocapiConn)); params.add("attribution_append", "none"); params.add(PARAMETER_LOGCOUNT_KEY, PARAMETER_LOGCOUNT_VALUE); params.add(PARAMETER_LOG_FIELDS_KEY, PARAMETER_LOG_FIELDS_VALUE); final JSONResult result = getRequest(ocapiConn, OkapiService.SERVICE_CACHE, params); return result.isSuccess ? parseCache(result.data) : null; } @NonNull public static List<Geocache> getCachesAround(@NonNull final Geopoint center, @NonNull final OCApiConnector connector) { final String centerString = GeopointFormatter.format(GeopointFormatter.Format.LAT_DECDEGREE_RAW, center) + SEPARATOR + GeopointFormatter.format(GeopointFormatter.Format.LON_DECDEGREE_RAW, center); final Parameters params = new Parameters("search_method", METHOD_SEARCH_NEAREST); final Map<String, String> valueMap = new LinkedHashMap<>(); valueMap.put("center", centerString); valueMap.put("limit", getCacheLimit()); valueMap.put("radius", "200"); return requestCaches(connector, params, valueMap, false); } @NonNull public static List<Geocache> getCachesByOwner(@NonNull final String username, @NonNull final OCApiConnector connector) { return getCachesByUser(username, connector, "owner_uuid"); } @NonNull public static List<Geocache> getCachesByFinder(@NonNull final String username, @NonNull final OCApiConnector connector) { return getCachesByUser(username, connector, "found_by"); } @NonNull private static List<Geocache> getCachesByUser(@NonNull final String username, @NonNull final OCApiConnector connector, final String userRequestParam) { final String uuid = getUserUUID(connector, username); if (StringUtils.isEmpty(uuid)) { return Collections.emptyList(); } final Parameters params = new Parameters("search_method", METHOD_SEARCH_ALL); final Map<String, String> valueMap = new LinkedHashMap<>(); valueMap.put(userRequestParam, uuid); return requestCaches(connector, params, valueMap, connector.isSearchForMyCaches(username)); } @NonNull public static List<Geocache> getCachesNamed(@Nullable final Geopoint center, final String namePart, @NonNull final OCApiConnector connector) { final Map<String, String> valueMap = new LinkedHashMap<>(); final Parameters params; // search around current position, if there is a position if (center != null) { final String centerString = GeopointFormatter.format(GeopointFormatter.Format.LAT_DECDEGREE_RAW, center) + SEPARATOR + GeopointFormatter.format(GeopointFormatter.Format.LON_DECDEGREE_RAW, center); params = new Parameters("search_method", METHOD_SEARCH_NEAREST); valueMap.put("center", centerString); } else { params = new Parameters("search_method", METHOD_SEARCH_ALL); } valueMap.put("limit", getCacheLimit()); // full wildcard search, maybe we need to change this after some testing and evaluation valueMap.put("name", "*" + namePart + "*"); return requestCaches(connector, params, valueMap, false); } @NonNull private static List<Geocache> requestCaches(@NonNull final OCApiConnector connector, @NonNull final Parameters params, @NonNull final Map<String, String> valueMap, final boolean my) { // if a global type filter is set, and OKAPI does not know that type, then return an empty list instead of all caches if (Settings.getCacheType() != CacheType.ALL && StringUtils.isBlank(getFilterFromType())) { return Collections.emptyList(); } addFilterParams(valueMap, connector, my); try { params.add("search_params", JsonUtils.writer.writeValueAsString(valueMap)); } catch (final JsonProcessingException e) { Log.e("requestCaches", e); return Collections.emptyList(); } addRetrieveParams(params, connector); final ObjectNode data = getRequest(connector, OkapiService.SERVICE_SEARCH_AND_RETRIEVE, params).data; if (data == null) { return Collections.emptyList(); } return parseCaches(data); } /** * Assumes level 3 OAuth. */ @NonNull public static List<Geocache> getCachesBBox(final Viewport viewport, @NonNull final OCApiConnector connector) { if (viewport.getLatitudeSpan() == 0 || viewport.getLongitudeSpan() == 0) { return Collections.emptyList(); } final String bboxString = GeopointFormatter.format(GeopointFormatter.Format.LAT_DECDEGREE_RAW, viewport.bottomLeft) + SEPARATOR + GeopointFormatter.format(GeopointFormatter.Format.LON_DECDEGREE_RAW, viewport.bottomLeft) + SEPARATOR + GeopointFormatter.format(GeopointFormatter.Format.LAT_DECDEGREE_RAW, viewport.topRight) + SEPARATOR + GeopointFormatter.format(GeopointFormatter.Format.LON_DECDEGREE_RAW, viewport.topRight); final Parameters params = new Parameters("search_method", METHOD_SEARCH_BBOX); final Map<String, String> valueMap = new LinkedHashMap<>(); valueMap.put("bbox", bboxString); return requestCaches(connector, params, valueMap, false); } public static boolean setWatchState(@NonNull final Geocache cache, final boolean watched, @NonNull final OCApiConnector connector) { final Parameters params = new Parameters("cache_code", cache.getGeocode()); params.add("watched", watched ? "true" : "false"); final ObjectNode data = getRequest(connector, OkapiService.SERVICE_MARK_CACHE, params).data; if (data == null) { return false; } cache.setOnWatchlist(watched); return true; } public static boolean setIgnored(@NonNull final Geocache cache, @NonNull final OCApiConnector connector) { final Parameters params = new Parameters("cache_code", cache.getGeocode()); params.add("ignored", "true"); final ObjectNode data = getRequest(connector, OkapiService.SERVICE_MARK_CACHE, params).data; return data != null; } @NonNull public static LogResult postLog(@NonNull final Geocache cache, @NonNull final LogType logType, @NonNull final Calendar date, @NonNull final String log, @Nullable final String logPassword, @NonNull final OCApiConnector connector) { final Parameters params = new Parameters("cache_code", cache.getGeocode()); params.add("logtype", logType.ocType); params.add("comment", log); params.add("comment_format", "plaintext"); params.add("when", LOG_DATE_FORMAT.format(date.getTime())); if (logType == LogType.NEEDS_MAINTENANCE) { params.add("needs_maintenance", "true"); } if (logPassword != null) { params.add("password", logPassword); } final ObjectNode data = getRequest(connector, OkapiService.SERVICE_SUBMIT_LOG, params).data; if (data == null) { return new LogResult(StatusCode.LOG_POST_ERROR, ""); } try { if (data.get("success").asBoolean()) { return new LogResult(StatusCode.NO_ERROR, data.get("log_uuid").asText()); } return new LogResult(StatusCode.LOG_POST_ERROR, ""); } catch (final NullPointerException e) { Log.e("OkapiClient.postLog", e); } return new LogResult(StatusCode.LOG_POST_ERROR, ""); } @NonNull public static ImageResult postLogImage(final String logId, final Image image, @NonNull final OCApiConnector connector) { try { final Parameters params = new Parameters("log_uuid", logId); final File file = image.getFile(); if (file == null) { return new ImageResult(StatusCode.LOGIMAGE_POST_ERROR, ""); } params.add("image", Base64.encodeToString(IOUtils.readFully(new FileInputStream(file), (int) file.length()), DEFAULT)); params.add("caption", createImageCaption(image)); final ObjectNode data = postRequest(connector, OkapiService.SERVICE_ADD_LOG_IMAGE, params).data; if (data == null) { return new ImageResult(StatusCode.LOGIMAGE_POST_ERROR, ""); } if (data.get("success").asBoolean()) { return new ImageResult(StatusCode.NO_ERROR, data.get("image_url").asText()); } return new ImageResult(StatusCode.LOGIMAGE_POST_ERROR, ""); } catch (final Exception e) { Log.e("OkapiClient.postLogImage", e); } return new ImageResult(StatusCode.LOGIMAGE_POST_ERROR, ""); } @NonNull private static String createImageCaption(final Image image) { final StringBuilder caption = new StringBuilder(StringUtils.trimToEmpty(image.getTitle())); if (StringUtils.isNotEmpty(caption) && StringUtils.isNotBlank(image.getDescription())) { caption.append(": "); } caption.append(StringUtils.trimToEmpty(image.getDescription())); return caption.toString(); } public static boolean uploadPersonalNotes(@NonNull final OCApiConnector connector, @NonNull final Geocache cache) { Log.d("Uploading personal note for opencaching"); final Parameters notesParam = new Parameters("cache_code", cache.getGeocode(), "fields", CACHE_MY_NOTES); final ObjectNode notesData = getRequest(connector, OkapiService.SERVICE_CACHE, notesParam).data; String prevNote = StringUtils.EMPTY; if (notesData != null && notesData.get(CACHE_MY_NOTES) != null) { prevNote = notesData.get(CACHE_MY_NOTES).asText(); } final String currentNote = StringUtils.defaultString(cache.getPersonalNote()); final Parameters params = new Parameters("cache_code", cache.getGeocode(), "new_value", currentNote, "old_value", prevNote); final ObjectNode data = getRequest(connector, OkapiService.SERVICE_UPLOAD_PERSONAL_NOTE, params).data; if (data == null) { return false; } if (data.get("replaced").asBoolean()) { Log.d("Successfully uploaded"); return true; } return false; } @NonNull private static List<Geocache> parseCaches(final ObjectNode response) { try { // Check for empty result final JsonNode results = response.path("results"); if (!results.isObject()) { return Collections.emptyList(); } // Get and iterate result list final List<Geocache> caches = new ArrayList<>(results.size()); for (final JsonNode cache: results) { caches.add(parseSmallCache((ObjectNode) cache)); } return caches; } catch (ClassCastException | NullPointerException e) { Log.e("OkapiClient.parseCachesResult", e); } return Collections.emptyList(); } @NonNull private static Geocache parseSmallCache(final ObjectNode response) { final Geocache cache = new Geocache(); cache.setReliableLatLon(true); try { parseCoreCache(response, cache); DataStore.saveCache(cache, EnumSet.of(SaveFlag.CACHE)); } catch (final NullPointerException e) { // FIXME: here we may return a partially filled cache Log.e("OkapiClient.parseSmallCache", e); } return cache; } @NonNull private static Geocache parseCache(final ObjectNode response) { final Geocache cache = new Geocache(); cache.setReliableLatLon(true); try { parseCoreCache(response, cache); // not used: url final String owner = parseUser(response.get(CACHE_OWNER)); cache.setOwnerDisplayName(owner); // OpenCaching has no distinction between user id and user display name. Set the ID anyway to simplify c:geo workflows. cache.setOwnerUserId(owner); final Map<LogType, Integer> logCounts = cache.getLogCounts(); logCounts.put(LogType.FOUND_IT, response.get(CACHE_FOUNDS).asInt()); logCounts.put(LogType.DIDNT_FIND_IT, response.get(CACHE_NOTFOUNDS).asInt()); // only current Api logCounts.put(LogType.WILL_ATTEND, response.path(CACHE_WILLATTENDS).asInt()); if (response.has(CACHE_RATING)) { cache.setRating((float) response.get(CACHE_RATING).asDouble()); } cache.setVotes(response.get(CACHE_VOTES).asInt()); cache.setFavoritePoints(response.get(CACHE_RECOMMENDATIONS).asInt()); // not used: req_password // Prepend gc-link to description if available final StringBuilder description = new StringBuilder(500); if (response.hasNonNull("gc_code")) { final String gccode = response.get("gc_code").asText(); description.append(Geocache.getAlternativeListingText(gccode)); } description.append(response.get(CACHE_DESCRIPTION).asText()); cache.setDescription(description.toString()); if (response.has(CACHE_SHORT_DESCRIPTION)) { final String shortDescription = StringUtils.trim(response.get(CACHE_SHORT_DESCRIPTION).asText()); if (StringUtils.isNotEmpty(shortDescription)) { cache.setShortDescription(shortDescription); } } // currently the hint is delivered as HTML (contrary to OKAPI documentation), so we can store it directly cache.setHint(response.get(CACHE_HINT).asText()); // not used: hints final ArrayNode images = (ArrayNode) response.get(CACHE_IMAGES); if (images != null) { for (final JsonNode imageResponse: images) { final String title = imageResponse.get(CACHE_IMAGE_CAPTION).asText(); final String url = absoluteUrl(imageResponse.get(CACHE_IMAGE_URL).asText(), cache.getGeocode()); // all images are added as spoiler images, although OKAPI has spoiler and non spoiler images cache.addSpoiler(new Image.Builder().setUrl(url).setTitle(title).build()); } } cache.setAttributes(parseAttributes((ArrayNode) response.path(CACHE_ATTRNAMES), (ArrayNode) response.get(CACHE_ATTR_ACODES))); //TODO: Store license per cache //cache.setLicense(response.getString("attribution_note")); cache.setWaypoints(parseWaypoints((ArrayNode) response.path(CACHE_WPTS)), false); cache.mergeInventory(parseTrackables((ArrayNode) response.path(CACHE_TRACKABLES)), EnumSet.of(TrackableBrand.GEOKRETY)); if (response.has(CACHE_IS_WATCHED)) { cache.setOnWatchlist(response.get(CACHE_IS_WATCHED).asBoolean()); } if (response.hasNonNull(CACHE_MY_NOTES)) { cache.setPersonalNote(response.get(CACHE_MY_NOTES).asText()); } cache.setLogPasswordRequired(response.get(CACHE_REQ_PASSWORD).asBoolean()); cache.setDetailedUpdatedNow(); // save full detailed caches DataStore.saveCache(cache, EnumSet.of(SaveFlag.DB)); DataStore.saveLogs(cache.getGeocode(), parseLogs((ArrayNode) response.path(CACHE_LATEST_LOGS), cache.getGeocode())); } catch (ClassCastException | NullPointerException e) { Log.e("OkapiClient.parseCache", e); } return cache; } private static void parseCoreCache(final ObjectNode response, @NonNull final Geocache cache) { cache.setGeocode(response.get(CACHE_CODE).asText()); cache.setName(response.get(CACHE_NAME).asText()); // not used: names setLocation(cache, response.get(CACHE_LOCATION).asText()); cache.setType(getCacheType(response.get(CACHE_TYPE).asText())); final String status = response.get(CACHE_STATUS).asText(); cache.setDisabled(status.equalsIgnoreCase(CACHE_STATUS_DISABLED)); cache.setArchived(status.equalsIgnoreCase(CACHE_STATUS_ARCHIVED)); cache.setSize(getCacheSize(response)); cache.setDifficulty((float) response.get(CACHE_DIFFICULTY).asDouble()); cache.setTerrain((float) response.get(CACHE_TERRAIN).asDouble()); cache.setInventoryItems(response.get(CACHE_TRACKABLES_COUNT).asInt()); if (response.has(CACHE_IS_FOUND)) { cache.setFound(response.get(CACHE_IS_FOUND).asBoolean()); } cache.setHidden(parseDate(response.get(CACHE_HIDDEN).asText())); } private static String absoluteUrl(final String url, final String geocode) { final Uri uri = Uri.parse(url); if (!uri.isAbsolute()) { final IConnector connector = ConnectorFactory.getConnector(geocode); final String hostUrl = connector.getHostUrl(); if (StringUtils.isNotBlank(hostUrl)) { return hostUrl + "/" + url; } } return url; } private static String parseUser(final JsonNode user) { return user.get(USER_USERNAME).asText(); } @NonNull private static List<LogEntry> parseLogs(final ArrayNode logsJSON, final String geocode) { final List<LogEntry> result = new LinkedList<>(); for (final JsonNode logResponse: logsJSON) { try { final Date date = parseDate(logResponse.get(LOG_DATE).asText()); if (date == null) { continue; } final LogEntry log = new LogEntry.Builder() .setAuthor(parseUser(logResponse.get(LOG_USER))) .setDate(date.getTime()) .setLogType(parseLogType(logResponse.get(LOG_TYPE).asText())) .setLogImages(parseLogImages((ArrayNode) logResponse.path(LOG_IMAGES), geocode)) .setLog(logResponse.get(LOG_COMMENT).asText().trim()).build(); result.add(log); } catch (final NullPointerException e) { Log.e("OkapiClient.parseLogs", e); } } return result; } private static List<Image> parseLogImages(final ArrayNode imagesNode, final String geocode) { final List<Image> images = new ArrayList<>(); for (final JsonNode image : imagesNode) { images.add(new Image.Builder().setUrl(absoluteUrl(image.get(IMAGE_URL).asText(), geocode)).setTitle(image.get(IMAGE_CAPTION).asText()).build()); } return images; } @Nullable private static List<Waypoint> parseWaypoints(final ArrayNode wptsJson) { List<Waypoint> result = null; for (final JsonNode wptResponse: wptsJson) { try { final Waypoint wpt = new Waypoint(wptResponse.get(WPT_NAME).asText(), parseWptType(wptResponse.get(WPT_TYPE).asText()), false); wpt.setNote(wptResponse.get(WPT_DESCRIPTION).asText()); final Geopoint pt = parseCoords(wptResponse.get(WPT_LOCATION).asText()); if (pt != null) { wpt.setCoords(pt); } else { wpt.setOriginalCoordsEmpty(true); } if (result == null) { result = new ArrayList<>(); } wpt.setPrefix(wpt.getName()); result.add(wpt); } catch (final NullPointerException e) { Log.e("OkapiClient.parseWaypoints", e); } } return result; } @NonNull private static List<Trackable> parseTrackables(final ArrayNode trackablesJson) { if (trackablesJson.size() == 0) { return Collections.emptyList(); } final List<Trackable> result = new ArrayList<>(); for (final JsonNode trackableResponse: trackablesJson) { try { final Trackable trk = new Trackable(); trk.setGeocode(trackableResponse.get(TRK_GEOCODE).asText()); trk.setName(trackableResponse.get(TRK_NAME).asText()); result.add(trk); } catch (final NullPointerException e) { Log.e("OkapiClient.parseWaypoints", e); } } return result; } @NonNull private static LogType parseLogType(@Nullable final String logType) { if ("Found it".equalsIgnoreCase(logType)) { return LogType.FOUND_IT; } if ("Didn't find it".equalsIgnoreCase(logType)) { return LogType.DIDNT_FIND_IT; } if ("Will attend".equalsIgnoreCase(logType)) { return LogType.WILL_ATTEND; } if ("Attended".equalsIgnoreCase(logType)) { return LogType.ATTENDED; } if ("Temporarily unavailable".equalsIgnoreCase(logType)) { return LogType.TEMP_DISABLE_LISTING; } if ("Ready to search".equalsIgnoreCase(logType)) { return LogType.ENABLE_LISTING; } if ("Archived".equalsIgnoreCase(logType)) { return LogType.ARCHIVE; } if ("Locked".equalsIgnoreCase(logType)) { return LogType.ARCHIVE; } if ("Needs maintenance".equalsIgnoreCase(logType)) { return LogType.NEEDS_MAINTENANCE; } if ("Maintenance performed".equalsIgnoreCase(logType)) { return LogType.OWNER_MAINTENANCE; } if ("Moved".equalsIgnoreCase(logType)) { return LogType.UPDATE_COORDINATES; } if ("OC Team comment".equalsIgnoreCase(logType)) { return LogType.POST_REVIEWER_NOTE; } return LogType.NOTE; } @NonNull private static WaypointType parseWptType(@Nullable final String wptType) { if ("parking".equalsIgnoreCase(wptType)) { return WaypointType.PARKING; } if ("path".equalsIgnoreCase(wptType)) { return WaypointType.TRAILHEAD; } if ("stage".equalsIgnoreCase(wptType)) { return WaypointType.STAGE; } if ("physical-stage".equalsIgnoreCase(wptType)) { return WaypointType.STAGE; } if ("virtual-stage".equalsIgnoreCase(wptType)) { return WaypointType.PUZZLE; } if ("final".equalsIgnoreCase(wptType)) { return WaypointType.FINAL; } if ("poi".equalsIgnoreCase(wptType)) { return WaypointType.TRAILHEAD; } return WaypointType.WAYPOINT; } @Nullable private static Date parseDate(final String date) { final String strippedDate = PATTERN_TIMEZONE.matcher(date).replaceAll("$1$20"); try { return ISO8601DATEFORMAT.parse(strippedDate); } catch (final ParseException e) { Log.e("OkapiClient.parseDate", e); } return null; } @Nullable private static Geopoint parseCoords(final String location) { final String latitude = StringUtils.substringBefore(location, SEPARATOR_STRING); final String longitude = StringUtils.substringAfter(location, SEPARATOR_STRING); if (StringUtils.isNotBlank(latitude) && StringUtils.isNotBlank(longitude)) { return new Geopoint(Double.parseDouble(latitude), Double.parseDouble(longitude)); } return null; } @NonNull private static List<String> parseAttributes(final ArrayNode nameList, final ArrayNode acodeList) { final List<String> result = new ArrayList<>(); for (int i = 0; i < nameList.size(); i++) { try { final String name = nameList.get(i).asText(); final int acode = acodeList != null ? Integer.parseInt(acodeList.get(i).asText().substring(1)) : CacheAttribute.NO_ID; final CacheAttribute attr = CacheAttribute.getByOcACode(acode); if (attr != null) { result.add(attr.rawName); } else { result.add(name); } } catch (final NullPointerException e) { Log.e("OkapiClient.parseAttributes", e); } } return result; } private static void setLocation(@NonNull final Geocache cache, final String location) { final String latitude = StringUtils.substringBefore(location, SEPARATOR_STRING); final String longitude = StringUtils.substringAfter(location, SEPARATOR_STRING); cache.setCoords(new Geopoint(Double.parseDouble(latitude), Double.parseDouble(longitude))); } @NonNull private static CacheSize getCacheSize(final ObjectNode response) { if (!response.has(CACHE_SIZE2)) { return getCacheSizeDeprecated(response); } try { final String size = response.get(CACHE_SIZE2).asText(); return CacheSize.getById(size); } catch (final NullPointerException e) { Log.e("OkapiClient.getCacheSize", e); return getCacheSizeDeprecated(response); } } @NonNull private static CacheSize getCacheSizeDeprecated(final ObjectNode response) { if (!response.has(CACHE_SIZE_DEPRECATED)) { return CacheSize.NOT_CHOSEN; } double size = 0; try { size = response.get(CACHE_SIZE_DEPRECATED).asDouble(); } catch (final NullPointerException e) { Log.e("OkapiClient.getCacheSize", e); } switch ((int) Math.round(size)) { case 1: return CacheSize.MICRO; case 2: return CacheSize.SMALL; case 3: return CacheSize.REGULAR; case 4: return CacheSize.LARGE; case 5: return CacheSize.VERY_LARGE; default: break; } return CacheSize.NOT_CHOSEN; } @NonNull private static CacheType getCacheType(@Nullable final String cacheType) { if ("Traditional".equalsIgnoreCase(cacheType)) { return CacheType.TRADITIONAL; } if ("Multi".equalsIgnoreCase(cacheType)) { return CacheType.MULTI; } if ("Quiz".equalsIgnoreCase(cacheType)) { return CacheType.MYSTERY; } if ("Virtual".equalsIgnoreCase(cacheType)) { return CacheType.VIRTUAL; } if ("Event".equalsIgnoreCase(cacheType)) { return CacheType.EVENT; } if ("Webcam".equalsIgnoreCase(cacheType)) { return CacheType.WEBCAM; } if ("Math/Physics".equalsIgnoreCase(cacheType)) { return CacheType.MYSTERY; } if ("Drive-In".equalsIgnoreCase(cacheType)) { return CacheType.TRADITIONAL; } return CacheType.UNKNOWN; } @NonNull private static String getCoreFields(@NonNull final OCApiConnector connector) { if (connector.getSupportedAuthLevel() == OAuthLevel.Level3) { return SERVICE_CACHE_CORE_FIELDS + SEPARATOR + SERVICE_CACHE_CORE_L3_FIELDS; } return SERVICE_CACHE_CORE_FIELDS; } @NonNull private static String getFullFields(@NonNull final OCApiConnector connector) { final StringBuilder res = new StringBuilder(500); res.append(SERVICE_CACHE_CORE_FIELDS); res.append(SEPARATOR).append(SERVICE_CACHE_ADDITIONAL_FIELDS); if (connector.getSupportedAuthLevel() == OAuthLevel.Level3) { res.append(SEPARATOR).append(SERVICE_CACHE_CORE_L3_FIELDS); res.append(SEPARATOR).append(SERVICE_CACHE_ADDITIONAL_L3_FIELDS); } if (connector.getApiSupport() == ApiSupport.current) { res.append(SEPARATOR).append(SERVICE_CACHE_ADDITIONAL_CURRENT_FIELDS); if (connector.getSupportedAuthLevel() == OAuthLevel.Level3) { res.append(SEPARATOR).append(SERVICE_CACHE_ADDITIONAL_CURRENT_L3_FIELDS); } } return res.toString(); } @NonNull private static JSONResult request(@NonNull final OCApiConnector connector, @NonNull final OkapiService service, @NonNull final String method, @NonNull final Parameters params) { final String host = connector.getHost(); if (StringUtils.isBlank(host)) { return new JSONResult("unknown OKAPI connector host"); } params.add("langpref", getPreferredLanguage()); switch (connector.getSupportedAuthLevel()) { case Level3: { final OAuthTokens tokens = new OAuthTokens(connector); if (!tokens.isValid()) { return new JSONResult("invalid oauth tokens"); } OAuth.signOAuth(host, service.methodName, method, connector.getHttps(), params, tokens, connector.getCK(), connector.getCS()); break; } case Level1 : { connector.addAuthentication(params); break; } default: // do nothing, anonymous access break; } final String uri = connector.getHostUrl() + service.methodName; try { if ("GET".equals(method)) { return new JSONResult(Network.getRequest(uri, params).blockingGet()); } return new JSONResult(Network.postRequest(uri, params).blockingGet()); } catch (final Exception e) { return new JSONResult("connection error"); } } @NonNull private static JSONResult getRequest(@NonNull final OCApiConnector connector, @NonNull final OkapiService service, @NonNull final Parameters params) { return request(connector, service, "GET", params); } @NonNull private static JSONResult postRequest(@NonNull final OCApiConnector connector, @NonNull final OkapiService service, @NonNull final Parameters params) { return request(connector, service, "POST", params); } /** * Return a pipe-separated list of preferred languages. English and the device default language (if different) will * always be in the list. Forcing cgeo language to English will prefer English over the device default language. */ @NonNull static String getPreferredLanguage() { final String defaultLanguage = StringUtils.defaultIfBlank(StringUtils.lowerCase(Locale.getDefault().getLanguage()), "en"); if ("en".equals(defaultLanguage)) { return defaultLanguage; } return Settings.useEnglish() ? "en|" + defaultLanguage : defaultLanguage + "|en"; } private static void addFilterParams(@NonNull final Map<String, String> valueMap, @NonNull final OCApiConnector connector, final boolean my) { if (!Settings.isExcludeDisabledCaches()) { valueMap.put("status", "Available|Temporarily unavailable"); } if (!my && Settings.isExcludeMyCaches() && connector.getSupportedAuthLevel() == OAuthLevel.Level3) { valueMap.put("exclude_my_own", "true"); valueMap.put("found_status", "notfound_only"); } // OKAPI returns ignored caches, we have to actively suppress them if (connector.getSupportedAuthLevel() == OAuthLevel.Level3) { valueMap.put("ignored_status", "notignored_only"); } if (Settings.getCacheType() != CacheType.ALL) { valueMap.put("type", getFilterFromType()); } } private static void addRetrieveParams(@NonNull final Parameters params, @NonNull final OCApiConnector connector) { params.add("retr_method", METHOD_RETRIEVE_CACHES); params.add("retr_params", "{\"fields\": \"" + getCoreFields(connector) + "\"}"); params.add("wrap", "true"); } @NonNull private static String getFilterFromType() { switch (Settings.getCacheType()) { case EVENT: return "Event"; case MULTI: return "Multi"; case MYSTERY: return "Quiz"; case TRADITIONAL: return "Traditional"; case VIRTUAL: return "Virtual"; case WEBCAM: return "Webcam"; default: return ""; } } @Nullable public static String getUserUUID(@NonNull final OCApiConnector connector, @NonNull final String userName) { final Parameters params = new Parameters("fields", USER_UUID, USER_USERNAME, userName); final JSONResult result = getRequest(connector, OkapiService.SERVICE_USER_BY_USERNAME, params); if (!result.isSuccess) { final OkapiError error = new OkapiError(result.data); Log.e("OkapiClient.getUserUUID: error getting user info: '" + error.getMessage() + "'"); return null; } return result.data.path(USER_UUID).asText(null); } @NonNull public static UserInfo getUserInfo(@NonNull final OCApiLiveConnector connector) { final Parameters params = new Parameters("fields", USER_INFO_FIELDS); final JSONResult result = getRequest(connector, OkapiService.SERVICE_USER, params); if (!result.isSuccess) { final OkapiError error = new OkapiError(result.data); Log.e("OkapiClient.getUserInfo: error getting user info: '" + error.getMessage() + "'"); return new UserInfo(StringUtils.EMPTY, 0, UserInfoStatus.getFromOkapiError(error.getResult())); } final ObjectNode data = result.data; final boolean successUserName = data.has(USER_USERNAME); final String name = data.path(USER_USERNAME).asText(); final boolean successFinds = data.has(USER_CACHES_FOUND); final int finds = data.path(USER_CACHES_FOUND).asInt(); return new UserInfo(name, finds, successUserName && successFinds ? UserInfoStatus.SUCCESSFUL : UserInfoStatus.FAILED); } /** * Retrieves error information from an unsuccessful Okapi-response * * @param response * response containing an error object * @return OkapiError object with detailed information */ @NonNull public static OkapiError decodeErrorResponse(final Response response) { final JSONResult result = new JSONResult(response); if (!result.isSuccess) { return new OkapiError(result.data); } return new OkapiError(new ObjectNode(JsonUtils.factory)); } /** * Encapsulates response state and content of an HTTP-getRequest that expects a JSON result. {@code isSuccess} is * only true, if the response state was success and {@code data} is not null. */ private static class JSONResult { public final boolean isSuccess; public final ObjectNode data; JSONResult(final Response response) { ObjectNode tempData = null; try { tempData = (ObjectNode) JsonUtils.reader.readTree(response.body().byteStream()); } catch (final Exception e) { // ignore } finally { response.close(); } data = tempData; isSuccess = response.isSuccessful() && tempData != null; } JSONResult(@NonNull final String errorMessage) { isSuccess = false; data = new ObjectNode(JsonUtils.factory); data.putObject("error").put("developer_message", errorMessage); } } /** * extract the geocode from an URL, by using a backward mapping on the server */ @Nullable public static String getGeocodeByUrl(@NonNull final OCApiConnector connector, @NonNull final String url) { final Parameters params = new Parameters("urls", url); final ObjectNode data = getRequest(connector, OkapiService.SERVICE_RESOLVE_URL, params).data; if (data == null) { return null; } return data.path("results").path(0).asText(null); } /** * get the registration url for mobile devices */ public static String getMobileRegistrationUrl(@NonNull final OCApiConnector connector) { return getInstallationInformation(connector).mobileRegistrationUrl; } /** * get the normal registration url */ public static String getRegistrationUrl(@NonNull final OCApiConnector connector) { return getInstallationInformation(connector).registrationUrl; } @JsonIgnoreProperties(ignoreUnknown = true) public static class InstallationInformation { @JsonProperty("site_url") String siteUrl; @JsonProperty("okapi_base_url") String okapiBaseUrl; @JsonProperty("okapi_base_urls") String[] okapiBaseUrls; @JsonProperty("site_name") String siteName; @JsonProperty("okapi_version_number") String okapiVersionNumber; @JsonProperty("okapi_revision") String okapiRevision; @JsonProperty("git_revision") String gitRevision; @JsonProperty("registration_url") String registrationUrl; @JsonProperty("mobile_registration_url") String mobileRegistrationUrl; @JsonProperty("image_max_upload_size") Long imageMaxUploadSize; @JsonProperty("image_rcmd_max_pixels") Long imageRcmdMaxPixels; @Override public String toString() { return "InstallationInformation{" + "siteUrl='" + siteUrl + '\'' + ", okapiBaseUrl='" + okapiBaseUrl + '\'' + ", okapiBaseUrls=" + Arrays.toString(okapiBaseUrls) + ", siteName='" + siteName + '\'' + ", okapiVersionNumber='" + okapiVersionNumber + '\'' + ", okapiRevision='" + okapiRevision + '\'' + ", gitRevision='" + gitRevision + '\'' + ", registrationUrl='" + registrationUrl + '\'' + ", mobileRegistrationUrl='" + mobileRegistrationUrl + '\'' + ", imageMaxUploadSize=" + imageMaxUploadSize + ", imageRcmdMaxPixels=" + imageRcmdMaxPixels + '}'; } } @NonNull static InstallationInformation getInstallationInformation(final OCApiConnector connector) { if (connector.getInstallationInformation() != null) { return connector.getInstallationInformation(); } final ObjectNode data = getRequest(connector, OkapiService.SERVICE_API_INSTALLATION, new Parameters()).data; if (data == null) { return new InstallationInformation(); } try { final InstallationInformation info = MAPPER.readValue(data.traverse(), InstallationInformation.class); connector.setInstallationInformation(info); Log.i("OkapiClient.getInstallationInformation: " + info); return info; } catch (final IOException e) { Log.e("OkapiClient.getInstallationInformation: Couldn't read InstallationInformation", e); } return new InstallationInformation(); } /** * Fetch more caches, if the GC connector is not active at all. */ private static String getCacheLimit() { return GCConnector.getInstance().isActive() ? "20" : "100"; } }
package net.daw.service.implementation; import com.google.gson.Gson; import java.io.IOException; import java.io.PrintWriter; import java.sql.Connection; import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import net.daw.bean.implementation.MunicipioBean; import net.daw.bean.implementation.UsuarioBean; import net.daw.connection.publicinterface.ConnectionInterface; import net.daw.dao.implementation.MunicipioDao; import net.daw.dao.implementation.UsuarioDao; import net.daw.helper.statics.AppConfigurationHelper; import static net.daw.helper.statics.AppConfigurationHelper.getSourceConnection; import net.daw.helper.statics.ExceptionBooster; import net.daw.helper.statics.FilterBeanHelper; import net.daw.helper.statics.JsonMessage; import net.daw.helper.statics.ParameterCook; import net.daw.service.publicinterface.TableServiceInterface; import net.daw.service.publicinterface.ViewServiceInterface; public class MunicipioService implements TableServiceInterface, ViewServiceInterface { protected HttpServletRequest oRequest = null; public MunicipioService(HttpServletRequest request) { oRequest = request; } private Boolean checkpermission(String strMethodName) throws Exception { UsuarioBean oUserBean = (UsuarioBean) oRequest.getSession().getAttribute("userBean"); if (oUserBean != null) { return true; } else { return false; } } @Override public String getcount() throws Exception { if (this.checkpermission("getcount")) { String data = null; ArrayList<FilterBeanHelper> alFilter = ParameterCook.prepareFilter(oRequest); Connection oConnection = null; ConnectionInterface oDataConnectionSource = null; try { oDataConnectionSource = getSourceConnection(); oConnection = oDataConnectionSource.newConnection(); MunicipioDao oMunicipioDao = new MunicipioDao(oConnection); data = JsonMessage.getJson("200", Integer.toString(oMunicipioDao.getCount(alFilter))); } catch (Exception ex) { ExceptionBooster.boost(new Exception(this.getClass().getName() + ":getCount ERROR: " + ex.getMessage())); } finally { if (oConnection != null) { oConnection.close(); } if (oDataConnectionSource != null) { oDataConnectionSource.disposeConnection(); } } return data; } else { return JsonMessage.getJsonMsg("401", "Unauthorized"); } } @Override public String get() throws Exception { if (this.checkpermission("get")) { int id = ParameterCook.prepareId(oRequest); String data = null; Connection oConnection = null; ConnectionInterface oDataConnectionSource = null; try { oDataConnectionSource = getSourceConnection(); oConnection = oDataConnectionSource.newConnection(); MunicipioDao oMunicipioDao = new MunicipioDao(oConnection); MunicipioBean oMunicipioBean = new MunicipioBean(id); oMunicipioBean = oMunicipioDao.get(oMunicipioBean, AppConfigurationHelper.getJsonDepth()); Gson gson = AppConfigurationHelper.getGson(); data = JsonMessage.getJson("200", AppConfigurationHelper.getGson().toJson(oMunicipioBean)); } catch (Exception ex) { ExceptionBooster.boost(new Exception(this.getClass().getName() + ":get ERROR: " + ex.getMessage())); } finally { if (oConnection != null) { oConnection.close(); } if (oDataConnectionSource != null) { oDataConnectionSource.disposeConnection(); } } return data; } else { return JsonMessage.getJsonMsg("401", "Unauthorized"); } } @Override public String getall() throws Exception { if (this.checkpermission("getall")) { ArrayList<FilterBeanHelper> alFilter = ParameterCook.prepareFilter(oRequest); HashMap<String, String> hmOrder = ParameterCook.prepareOrder(oRequest); String data = null; Connection oConnection = null; ConnectionInterface oDataConnectionSource = null; try { oDataConnectionSource = getSourceConnection(); oConnection = oDataConnectionSource.newConnection(); MunicipioDao oMunicipioDao = new MunicipioDao(oConnection); ArrayList<MunicipioBean> arrBeans = oMunicipioDao.getAll(alFilter, hmOrder, 1); data = JsonMessage.getJson("200", AppConfigurationHelper.getGson().toJson(arrBeans)); } catch (Exception ex) { ExceptionBooster.boost(new Exception(this.getClass().getName() + ":getAll ERROR: " + ex.getMessage())); } finally { if (oConnection != null) { oConnection.close(); } if (oDataConnectionSource != null) { oDataConnectionSource.disposeConnection(); } } return data; } else { return JsonMessage.getJsonMsg("401", "Unauthorized"); } } @Override @SuppressWarnings("empty-statement") public String getpage() throws Exception { if (this.checkpermission("getpage")) { int intRegsPerPag = ParameterCook.prepareRpp(oRequest);; int intPage = ParameterCook.preparePage(oRequest); ArrayList<FilterBeanHelper> alFilter = ParameterCook.prepareFilter(oRequest); HashMap<String, String> hmOrder = ParameterCook.prepareOrder(oRequest); String data = null; Connection oConnection = null; ConnectionInterface oDataConnectionSource = null; try { oDataConnectionSource = getSourceConnection(); oConnection = oDataConnectionSource.newConnection(); MunicipioDao oMunicipioDao = new MunicipioDao(oConnection); List<MunicipioBean> arrBeans = oMunicipioDao.getPage(intRegsPerPag, intPage, alFilter, hmOrder, AppConfigurationHelper.getJsonDepth()); data = JsonMessage.getJson("200", AppConfigurationHelper.getGson().toJson(arrBeans)); } catch (Exception ex) { ExceptionBooster.boost(new Exception(this.getClass().getName() + ":getPage ERROR: " + ex.getMessage())); } finally { if (oConnection != null) { oConnection.close(); } if (oDataConnectionSource != null) { oDataConnectionSource.disposeConnection(); } } return data; } else { return JsonMessage.getJsonMsg("401", "Unauthorized"); } } @Override public String getpages() throws Exception { if (this.checkpermission("getpages")) { int intRegsPerPag = ParameterCook.prepareRpp(oRequest); ArrayList<FilterBeanHelper> alFilter = ParameterCook.prepareFilter(oRequest); String data = null; Connection oConnection = null; ConnectionInterface oDataConnectionSource = null; try { oDataConnectionSource = getSourceConnection(); oConnection = oDataConnectionSource.newConnection(); MunicipioDao oMunicipioDao = new MunicipioDao(oConnection); data = JsonMessage.getJson("200", Integer.toString(oMunicipioDao.getPages(intRegsPerPag, alFilter))); } catch (Exception ex) { ExceptionBooster.boost(new Exception(this.getClass().getName() + ":getPages ERROR: " + ex.getMessage())); } finally { if (oConnection != null) { oConnection.close(); } if (oDataConnectionSource != null) { oDataConnectionSource.disposeConnection(); } } return data; } else { return JsonMessage.getJsonMsg("401", "Unauthorized"); } } @Override public String getaggregateviewsome() throws Exception { if (this.checkpermission("getaggregateviewsome")) { String data = null; try { String page = this.getpage(); String pages = this.getpages(); String registers = this.getcount(); data = "{" + "\"page\":" + page + ",\"pages\":" + pages + ",\"registers\":" + registers + "}"; data = JsonMessage.getJson("200", data); } catch (Exception ex) { ExceptionBooster.boost(new Exception(this.getClass().getName() + ":getAggregateViewSome ERROR: " + ex.getMessage())); } return data; } else { return JsonMessage.getJsonMsg("401", "Unauthorized"); } } @Override public String remove() throws Exception { if (this.checkpermission("remove")) { Integer id = ParameterCook.prepareId(oRequest); String resultado = null; Connection oConnection = null; ConnectionInterface oDataConnectionSource = null; try { oDataConnectionSource = getSourceConnection(); oConnection = oDataConnectionSource.newConnection(); oConnection.setAutoCommit(false); MunicipioDao oMunicipioDao = new MunicipioDao(oConnection); resultado = JsonMessage.getJson("200", (String) oMunicipioDao.remove(id).toString()); oConnection.commit(); } catch (Exception ex) { oConnection.rollback(); ExceptionBooster.boost(new Exception(this.getClass().getName() + ":remove ERROR: " + ex.getMessage())); } finally { if (oConnection != null) { oConnection.close(); } if (oDataConnectionSource != null) { oDataConnectionSource.disposeConnection(); } } return resultado; } else { return JsonMessage.getJsonMsg("401", "Unauthorized"); } } @Override public String set() throws Exception { if (this.checkpermission("set")) { String jason = ParameterCook.prepareJson(oRequest); String resultado = null; Connection oConnection = null; ConnectionInterface oDataConnectionSource = null; try { oDataConnectionSource = getSourceConnection(); oConnection = oDataConnectionSource.newConnection(); oConnection.setAutoCommit(false); MunicipioDao oMunicipioDao = new MunicipioDao(oConnection); MunicipioBean oMunicipioBean = new MunicipioBean(); oMunicipioBean = AppConfigurationHelper.getGson().fromJson(jason, oMunicipioBean.getClass()); if (oMunicipioBean != null) { Integer iResult = oMunicipioDao.set(oMunicipioBean); if (iResult >= 1) { resultado = JsonMessage.getJson("200", iResult.toString()); } else { resultado = JsonMessage.getJson("500", "Error during registry set"); } } else { resultado = JsonMessage.getJson("500", "Error during registry set"); } oConnection.commit(); } catch (Exception ex) { oConnection.rollback(); ExceptionBooster.boost(new Exception(this.getClass().getName() + ":set ERROR: " + ex.getMessage())); } finally { if (oConnection != null) { oConnection.close(); } if (oDataConnectionSource != null) { oDataConnectionSource.disposeConnection(); } } return resultado; } else { return JsonMessage.getJsonMsg("401", "Unauthorized"); } } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/vision/v1p4beta1/image_annotator.proto package com.google.cloud.vision.v1p4beta1; /** * * * <pre> * An offline file annotation request. * </pre> * * Protobuf type {@code google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest} */ public final class AsyncAnnotateFileRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest) AsyncAnnotateFileRequestOrBuilder { private static final long serialVersionUID = 0L; // Use AsyncAnnotateFileRequest.newBuilder() to construct. private AsyncAnnotateFileRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AsyncAnnotateFileRequest() { features_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new AsyncAnnotateFileRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private AsyncAnnotateFileRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.vision.v1p4beta1.InputConfig.Builder subBuilder = null; if (inputConfig_ != null) { subBuilder = inputConfig_.toBuilder(); } inputConfig_ = input.readMessage( com.google.cloud.vision.v1p4beta1.InputConfig.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(inputConfig_); inputConfig_ = subBuilder.buildPartial(); } break; } case 18: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { features_ = new java.util.ArrayList<com.google.cloud.vision.v1p4beta1.Feature>(); mutable_bitField0_ |= 0x00000001; } features_.add( input.readMessage( com.google.cloud.vision.v1p4beta1.Feature.parser(), extensionRegistry)); break; } case 26: { com.google.cloud.vision.v1p4beta1.ImageContext.Builder subBuilder = null; if (imageContext_ != null) { subBuilder = imageContext_.toBuilder(); } imageContext_ = input.readMessage( com.google.cloud.vision.v1p4beta1.ImageContext.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(imageContext_); imageContext_ = subBuilder.buildPartial(); } break; } case 34: { com.google.cloud.vision.v1p4beta1.OutputConfig.Builder subBuilder = null; if (outputConfig_ != null) { subBuilder = outputConfig_.toBuilder(); } outputConfig_ = input.readMessage( com.google.cloud.vision.v1p4beta1.OutputConfig.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(outputConfig_); outputConfig_ = subBuilder.buildPartial(); } break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { features_ = java.util.Collections.unmodifiableList(features_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vision.v1p4beta1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1p4beta1_AsyncAnnotateFileRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vision.v1p4beta1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1p4beta1_AsyncAnnotateFileRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest.class, com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest.Builder.class); } public static final int INPUT_CONFIG_FIELD_NUMBER = 1; private com.google.cloud.vision.v1p4beta1.InputConfig inputConfig_; /** * * * <pre> * Required. Information about the input file. * </pre> * * <code>.google.cloud.vision.v1p4beta1.InputConfig input_config = 1;</code> * * @return Whether the inputConfig field is set. */ @java.lang.Override public boolean hasInputConfig() { return inputConfig_ != null; } /** * * * <pre> * Required. Information about the input file. * </pre> * * <code>.google.cloud.vision.v1p4beta1.InputConfig input_config = 1;</code> * * @return The inputConfig. */ @java.lang.Override public com.google.cloud.vision.v1p4beta1.InputConfig getInputConfig() { return inputConfig_ == null ? com.google.cloud.vision.v1p4beta1.InputConfig.getDefaultInstance() : inputConfig_; } /** * * * <pre> * Required. Information about the input file. * </pre> * * <code>.google.cloud.vision.v1p4beta1.InputConfig input_config = 1;</code> */ @java.lang.Override public com.google.cloud.vision.v1p4beta1.InputConfigOrBuilder getInputConfigOrBuilder() { return getInputConfig(); } public static final int FEATURES_FIELD_NUMBER = 2; private java.util.List<com.google.cloud.vision.v1p4beta1.Feature> features_; /** * * * <pre> * Required. Requested features. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Feature features = 2;</code> */ @java.lang.Override public java.util.List<com.google.cloud.vision.v1p4beta1.Feature> getFeaturesList() { return features_; } /** * * * <pre> * Required. Requested features. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Feature features = 2;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.vision.v1p4beta1.FeatureOrBuilder> getFeaturesOrBuilderList() { return features_; } /** * * * <pre> * Required. Requested features. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Feature features = 2;</code> */ @java.lang.Override public int getFeaturesCount() { return features_.size(); } /** * * * <pre> * Required. Requested features. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Feature features = 2;</code> */ @java.lang.Override public com.google.cloud.vision.v1p4beta1.Feature getFeatures(int index) { return features_.get(index); } /** * * * <pre> * Required. Requested features. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Feature features = 2;</code> */ @java.lang.Override public com.google.cloud.vision.v1p4beta1.FeatureOrBuilder getFeaturesOrBuilder(int index) { return features_.get(index); } public static final int IMAGE_CONTEXT_FIELD_NUMBER = 3; private com.google.cloud.vision.v1p4beta1.ImageContext imageContext_; /** * * * <pre> * Additional context that may accompany the image(s) in the file. * </pre> * * <code>.google.cloud.vision.v1p4beta1.ImageContext image_context = 3;</code> * * @return Whether the imageContext field is set. */ @java.lang.Override public boolean hasImageContext() { return imageContext_ != null; } /** * * * <pre> * Additional context that may accompany the image(s) in the file. * </pre> * * <code>.google.cloud.vision.v1p4beta1.ImageContext image_context = 3;</code> * * @return The imageContext. */ @java.lang.Override public com.google.cloud.vision.v1p4beta1.ImageContext getImageContext() { return imageContext_ == null ? com.google.cloud.vision.v1p4beta1.ImageContext.getDefaultInstance() : imageContext_; } /** * * * <pre> * Additional context that may accompany the image(s) in the file. * </pre> * * <code>.google.cloud.vision.v1p4beta1.ImageContext image_context = 3;</code> */ @java.lang.Override public com.google.cloud.vision.v1p4beta1.ImageContextOrBuilder getImageContextOrBuilder() { return getImageContext(); } public static final int OUTPUT_CONFIG_FIELD_NUMBER = 4; private com.google.cloud.vision.v1p4beta1.OutputConfig outputConfig_; /** * * * <pre> * Required. The desired output location and metadata (e.g. format). * </pre> * * <code>.google.cloud.vision.v1p4beta1.OutputConfig output_config = 4;</code> * * @return Whether the outputConfig field is set. */ @java.lang.Override public boolean hasOutputConfig() { return outputConfig_ != null; } /** * * * <pre> * Required. The desired output location and metadata (e.g. format). * </pre> * * <code>.google.cloud.vision.v1p4beta1.OutputConfig output_config = 4;</code> * * @return The outputConfig. */ @java.lang.Override public com.google.cloud.vision.v1p4beta1.OutputConfig getOutputConfig() { return outputConfig_ == null ? com.google.cloud.vision.v1p4beta1.OutputConfig.getDefaultInstance() : outputConfig_; } /** * * * <pre> * Required. The desired output location and metadata (e.g. format). * </pre> * * <code>.google.cloud.vision.v1p4beta1.OutputConfig output_config = 4;</code> */ @java.lang.Override public com.google.cloud.vision.v1p4beta1.OutputConfigOrBuilder getOutputConfigOrBuilder() { return getOutputConfig(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (inputConfig_ != null) { output.writeMessage(1, getInputConfig()); } for (int i = 0; i < features_.size(); i++) { output.writeMessage(2, features_.get(i)); } if (imageContext_ != null) { output.writeMessage(3, getImageContext()); } if (outputConfig_ != null) { output.writeMessage(4, getOutputConfig()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (inputConfig_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getInputConfig()); } for (int i = 0; i < features_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, features_.get(i)); } if (imageContext_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getImageContext()); } if (outputConfig_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, getOutputConfig()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest)) { return super.equals(obj); } com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest other = (com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest) obj; if (hasInputConfig() != other.hasInputConfig()) return false; if (hasInputConfig()) { if (!getInputConfig().equals(other.getInputConfig())) return false; } if (!getFeaturesList().equals(other.getFeaturesList())) return false; if (hasImageContext() != other.hasImageContext()) return false; if (hasImageContext()) { if (!getImageContext().equals(other.getImageContext())) return false; } if (hasOutputConfig() != other.hasOutputConfig()) return false; if (hasOutputConfig()) { if (!getOutputConfig().equals(other.getOutputConfig())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasInputConfig()) { hash = (37 * hash) + INPUT_CONFIG_FIELD_NUMBER; hash = (53 * hash) + getInputConfig().hashCode(); } if (getFeaturesCount() > 0) { hash = (37 * hash) + FEATURES_FIELD_NUMBER; hash = (53 * hash) + getFeaturesList().hashCode(); } if (hasImageContext()) { hash = (37 * hash) + IMAGE_CONTEXT_FIELD_NUMBER; hash = (53 * hash) + getImageContext().hashCode(); } if (hasOutputConfig()) { hash = (37 * hash) + OUTPUT_CONFIG_FIELD_NUMBER; hash = (53 * hash) + getOutputConfig().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * An offline file annotation request. * </pre> * * Protobuf type {@code google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest) com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vision.v1p4beta1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1p4beta1_AsyncAnnotateFileRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vision.v1p4beta1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1p4beta1_AsyncAnnotateFileRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest.class, com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest.Builder.class); } // Construct using com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getFeaturesFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (inputConfigBuilder_ == null) { inputConfig_ = null; } else { inputConfig_ = null; inputConfigBuilder_ = null; } if (featuresBuilder_ == null) { features_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { featuresBuilder_.clear(); } if (imageContextBuilder_ == null) { imageContext_ = null; } else { imageContext_ = null; imageContextBuilder_ = null; } if (outputConfigBuilder_ == null) { outputConfig_ = null; } else { outputConfig_ = null; outputConfigBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.vision.v1p4beta1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1p4beta1_AsyncAnnotateFileRequest_descriptor; } @java.lang.Override public com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest getDefaultInstanceForType() { return com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest build() { com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest buildPartial() { com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest result = new com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest(this); int from_bitField0_ = bitField0_; if (inputConfigBuilder_ == null) { result.inputConfig_ = inputConfig_; } else { result.inputConfig_ = inputConfigBuilder_.build(); } if (featuresBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { features_ = java.util.Collections.unmodifiableList(features_); bitField0_ = (bitField0_ & ~0x00000001); } result.features_ = features_; } else { result.features_ = featuresBuilder_.build(); } if (imageContextBuilder_ == null) { result.imageContext_ = imageContext_; } else { result.imageContext_ = imageContextBuilder_.build(); } if (outputConfigBuilder_ == null) { result.outputConfig_ = outputConfig_; } else { result.outputConfig_ = outputConfigBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest) { return mergeFrom((com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest other) { if (other == com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest.getDefaultInstance()) return this; if (other.hasInputConfig()) { mergeInputConfig(other.getInputConfig()); } if (featuresBuilder_ == null) { if (!other.features_.isEmpty()) { if (features_.isEmpty()) { features_ = other.features_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureFeaturesIsMutable(); features_.addAll(other.features_); } onChanged(); } } else { if (!other.features_.isEmpty()) { if (featuresBuilder_.isEmpty()) { featuresBuilder_.dispose(); featuresBuilder_ = null; features_ = other.features_; bitField0_ = (bitField0_ & ~0x00000001); featuresBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFeaturesFieldBuilder() : null; } else { featuresBuilder_.addAllMessages(other.features_); } } } if (other.hasImageContext()) { mergeImageContext(other.getImageContext()); } if (other.hasOutputConfig()) { mergeOutputConfig(other.getOutputConfig()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private com.google.cloud.vision.v1p4beta1.InputConfig inputConfig_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vision.v1p4beta1.InputConfig, com.google.cloud.vision.v1p4beta1.InputConfig.Builder, com.google.cloud.vision.v1p4beta1.InputConfigOrBuilder> inputConfigBuilder_; /** * * * <pre> * Required. Information about the input file. * </pre> * * <code>.google.cloud.vision.v1p4beta1.InputConfig input_config = 1;</code> * * @return Whether the inputConfig field is set. */ public boolean hasInputConfig() { return inputConfigBuilder_ != null || inputConfig_ != null; } /** * * * <pre> * Required. Information about the input file. * </pre> * * <code>.google.cloud.vision.v1p4beta1.InputConfig input_config = 1;</code> * * @return The inputConfig. */ public com.google.cloud.vision.v1p4beta1.InputConfig getInputConfig() { if (inputConfigBuilder_ == null) { return inputConfig_ == null ? com.google.cloud.vision.v1p4beta1.InputConfig.getDefaultInstance() : inputConfig_; } else { return inputConfigBuilder_.getMessage(); } } /** * * * <pre> * Required. Information about the input file. * </pre> * * <code>.google.cloud.vision.v1p4beta1.InputConfig input_config = 1;</code> */ public Builder setInputConfig(com.google.cloud.vision.v1p4beta1.InputConfig value) { if (inputConfigBuilder_ == null) { if (value == null) { throw new NullPointerException(); } inputConfig_ = value; onChanged(); } else { inputConfigBuilder_.setMessage(value); } return this; } /** * * * <pre> * Required. Information about the input file. * </pre> * * <code>.google.cloud.vision.v1p4beta1.InputConfig input_config = 1;</code> */ public Builder setInputConfig( com.google.cloud.vision.v1p4beta1.InputConfig.Builder builderForValue) { if (inputConfigBuilder_ == null) { inputConfig_ = builderForValue.build(); onChanged(); } else { inputConfigBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Required. Information about the input file. * </pre> * * <code>.google.cloud.vision.v1p4beta1.InputConfig input_config = 1;</code> */ public Builder mergeInputConfig(com.google.cloud.vision.v1p4beta1.InputConfig value) { if (inputConfigBuilder_ == null) { if (inputConfig_ != null) { inputConfig_ = com.google.cloud.vision.v1p4beta1.InputConfig.newBuilder(inputConfig_) .mergeFrom(value) .buildPartial(); } else { inputConfig_ = value; } onChanged(); } else { inputConfigBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Required. Information about the input file. * </pre> * * <code>.google.cloud.vision.v1p4beta1.InputConfig input_config = 1;</code> */ public Builder clearInputConfig() { if (inputConfigBuilder_ == null) { inputConfig_ = null; onChanged(); } else { inputConfig_ = null; inputConfigBuilder_ = null; } return this; } /** * * * <pre> * Required. Information about the input file. * </pre> * * <code>.google.cloud.vision.v1p4beta1.InputConfig input_config = 1;</code> */ public com.google.cloud.vision.v1p4beta1.InputConfig.Builder getInputConfigBuilder() { onChanged(); return getInputConfigFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Information about the input file. * </pre> * * <code>.google.cloud.vision.v1p4beta1.InputConfig input_config = 1;</code> */ public com.google.cloud.vision.v1p4beta1.InputConfigOrBuilder getInputConfigOrBuilder() { if (inputConfigBuilder_ != null) { return inputConfigBuilder_.getMessageOrBuilder(); } else { return inputConfig_ == null ? com.google.cloud.vision.v1p4beta1.InputConfig.getDefaultInstance() : inputConfig_; } } /** * * * <pre> * Required. Information about the input file. * </pre> * * <code>.google.cloud.vision.v1p4beta1.InputConfig input_config = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vision.v1p4beta1.InputConfig, com.google.cloud.vision.v1p4beta1.InputConfig.Builder, com.google.cloud.vision.v1p4beta1.InputConfigOrBuilder> getInputConfigFieldBuilder() { if (inputConfigBuilder_ == null) { inputConfigBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vision.v1p4beta1.InputConfig, com.google.cloud.vision.v1p4beta1.InputConfig.Builder, com.google.cloud.vision.v1p4beta1.InputConfigOrBuilder>( getInputConfig(), getParentForChildren(), isClean()); inputConfig_ = null; } return inputConfigBuilder_; } private java.util.List<com.google.cloud.vision.v1p4beta1.Feature> features_ = java.util.Collections.emptyList(); private void ensureFeaturesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { features_ = new java.util.ArrayList<com.google.cloud.vision.v1p4beta1.Feature>(features_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.vision.v1p4beta1.Feature, com.google.cloud.vision.v1p4beta1.Feature.Builder, com.google.cloud.vision.v1p4beta1.FeatureOrBuilder> featuresBuilder_; /** * * * <pre> * Required. Requested features. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Feature features = 2;</code> */ public java.util.List<com.google.cloud.vision.v1p4beta1.Feature> getFeaturesList() { if (featuresBuilder_ == null) { return java.util.Collections.unmodifiableList(features_); } else { return featuresBuilder_.getMessageList(); } } /** * * * <pre> * Required. Requested features. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Feature features = 2;</code> */ public int getFeaturesCount() { if (featuresBuilder_ == null) { return features_.size(); } else { return featuresBuilder_.getCount(); } } /** * * * <pre> * Required. Requested features. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Feature features = 2;</code> */ public com.google.cloud.vision.v1p4beta1.Feature getFeatures(int index) { if (featuresBuilder_ == null) { return features_.get(index); } else { return featuresBuilder_.getMessage(index); } } /** * * * <pre> * Required. Requested features. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Feature features = 2;</code> */ public Builder setFeatures(int index, com.google.cloud.vision.v1p4beta1.Feature value) { if (featuresBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFeaturesIsMutable(); features_.set(index, value); onChanged(); } else { featuresBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Required. Requested features. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Feature features = 2;</code> */ public Builder setFeatures( int index, com.google.cloud.vision.v1p4beta1.Feature.Builder builderForValue) { if (featuresBuilder_ == null) { ensureFeaturesIsMutable(); features_.set(index, builderForValue.build()); onChanged(); } else { featuresBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Required. Requested features. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Feature features = 2;</code> */ public Builder addFeatures(com.google.cloud.vision.v1p4beta1.Feature value) { if (featuresBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFeaturesIsMutable(); features_.add(value); onChanged(); } else { featuresBuilder_.addMessage(value); } return this; } /** * * * <pre> * Required. Requested features. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Feature features = 2;</code> */ public Builder addFeatures(int index, com.google.cloud.vision.v1p4beta1.Feature value) { if (featuresBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFeaturesIsMutable(); features_.add(index, value); onChanged(); } else { featuresBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Required. Requested features. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Feature features = 2;</code> */ public Builder addFeatures(com.google.cloud.vision.v1p4beta1.Feature.Builder builderForValue) { if (featuresBuilder_ == null) { ensureFeaturesIsMutable(); features_.add(builderForValue.build()); onChanged(); } else { featuresBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Required. Requested features. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Feature features = 2;</code> */ public Builder addFeatures( int index, com.google.cloud.vision.v1p4beta1.Feature.Builder builderForValue) { if (featuresBuilder_ == null) { ensureFeaturesIsMutable(); features_.add(index, builderForValue.build()); onChanged(); } else { featuresBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Required. Requested features. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Feature features = 2;</code> */ public Builder addAllFeatures( java.lang.Iterable<? extends com.google.cloud.vision.v1p4beta1.Feature> values) { if (featuresBuilder_ == null) { ensureFeaturesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, features_); onChanged(); } else { featuresBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Required. Requested features. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Feature features = 2;</code> */ public Builder clearFeatures() { if (featuresBuilder_ == null) { features_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { featuresBuilder_.clear(); } return this; } /** * * * <pre> * Required. Requested features. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Feature features = 2;</code> */ public Builder removeFeatures(int index) { if (featuresBuilder_ == null) { ensureFeaturesIsMutable(); features_.remove(index); onChanged(); } else { featuresBuilder_.remove(index); } return this; } /** * * * <pre> * Required. Requested features. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Feature features = 2;</code> */ public com.google.cloud.vision.v1p4beta1.Feature.Builder getFeaturesBuilder(int index) { return getFeaturesFieldBuilder().getBuilder(index); } /** * * * <pre> * Required. Requested features. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Feature features = 2;</code> */ public com.google.cloud.vision.v1p4beta1.FeatureOrBuilder getFeaturesOrBuilder(int index) { if (featuresBuilder_ == null) { return features_.get(index); } else { return featuresBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Required. Requested features. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Feature features = 2;</code> */ public java.util.List<? extends com.google.cloud.vision.v1p4beta1.FeatureOrBuilder> getFeaturesOrBuilderList() { if (featuresBuilder_ != null) { return featuresBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(features_); } } /** * * * <pre> * Required. Requested features. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Feature features = 2;</code> */ public com.google.cloud.vision.v1p4beta1.Feature.Builder addFeaturesBuilder() { return getFeaturesFieldBuilder() .addBuilder(com.google.cloud.vision.v1p4beta1.Feature.getDefaultInstance()); } /** * * * <pre> * Required. Requested features. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Feature features = 2;</code> */ public com.google.cloud.vision.v1p4beta1.Feature.Builder addFeaturesBuilder(int index) { return getFeaturesFieldBuilder() .addBuilder(index, com.google.cloud.vision.v1p4beta1.Feature.getDefaultInstance()); } /** * * * <pre> * Required. Requested features. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.Feature features = 2;</code> */ public java.util.List<com.google.cloud.vision.v1p4beta1.Feature.Builder> getFeaturesBuilderList() { return getFeaturesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.vision.v1p4beta1.Feature, com.google.cloud.vision.v1p4beta1.Feature.Builder, com.google.cloud.vision.v1p4beta1.FeatureOrBuilder> getFeaturesFieldBuilder() { if (featuresBuilder_ == null) { featuresBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.vision.v1p4beta1.Feature, com.google.cloud.vision.v1p4beta1.Feature.Builder, com.google.cloud.vision.v1p4beta1.FeatureOrBuilder>( features_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); features_ = null; } return featuresBuilder_; } private com.google.cloud.vision.v1p4beta1.ImageContext imageContext_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vision.v1p4beta1.ImageContext, com.google.cloud.vision.v1p4beta1.ImageContext.Builder, com.google.cloud.vision.v1p4beta1.ImageContextOrBuilder> imageContextBuilder_; /** * * * <pre> * Additional context that may accompany the image(s) in the file. * </pre> * * <code>.google.cloud.vision.v1p4beta1.ImageContext image_context = 3;</code> * * @return Whether the imageContext field is set. */ public boolean hasImageContext() { return imageContextBuilder_ != null || imageContext_ != null; } /** * * * <pre> * Additional context that may accompany the image(s) in the file. * </pre> * * <code>.google.cloud.vision.v1p4beta1.ImageContext image_context = 3;</code> * * @return The imageContext. */ public com.google.cloud.vision.v1p4beta1.ImageContext getImageContext() { if (imageContextBuilder_ == null) { return imageContext_ == null ? com.google.cloud.vision.v1p4beta1.ImageContext.getDefaultInstance() : imageContext_; } else { return imageContextBuilder_.getMessage(); } } /** * * * <pre> * Additional context that may accompany the image(s) in the file. * </pre> * * <code>.google.cloud.vision.v1p4beta1.ImageContext image_context = 3;</code> */ public Builder setImageContext(com.google.cloud.vision.v1p4beta1.ImageContext value) { if (imageContextBuilder_ == null) { if (value == null) { throw new NullPointerException(); } imageContext_ = value; onChanged(); } else { imageContextBuilder_.setMessage(value); } return this; } /** * * * <pre> * Additional context that may accompany the image(s) in the file. * </pre> * * <code>.google.cloud.vision.v1p4beta1.ImageContext image_context = 3;</code> */ public Builder setImageContext( com.google.cloud.vision.v1p4beta1.ImageContext.Builder builderForValue) { if (imageContextBuilder_ == null) { imageContext_ = builderForValue.build(); onChanged(); } else { imageContextBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Additional context that may accompany the image(s) in the file. * </pre> * * <code>.google.cloud.vision.v1p4beta1.ImageContext image_context = 3;</code> */ public Builder mergeImageContext(com.google.cloud.vision.v1p4beta1.ImageContext value) { if (imageContextBuilder_ == null) { if (imageContext_ != null) { imageContext_ = com.google.cloud.vision.v1p4beta1.ImageContext.newBuilder(imageContext_) .mergeFrom(value) .buildPartial(); } else { imageContext_ = value; } onChanged(); } else { imageContextBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Additional context that may accompany the image(s) in the file. * </pre> * * <code>.google.cloud.vision.v1p4beta1.ImageContext image_context = 3;</code> */ public Builder clearImageContext() { if (imageContextBuilder_ == null) { imageContext_ = null; onChanged(); } else { imageContext_ = null; imageContextBuilder_ = null; } return this; } /** * * * <pre> * Additional context that may accompany the image(s) in the file. * </pre> * * <code>.google.cloud.vision.v1p4beta1.ImageContext image_context = 3;</code> */ public com.google.cloud.vision.v1p4beta1.ImageContext.Builder getImageContextBuilder() { onChanged(); return getImageContextFieldBuilder().getBuilder(); } /** * * * <pre> * Additional context that may accompany the image(s) in the file. * </pre> * * <code>.google.cloud.vision.v1p4beta1.ImageContext image_context = 3;</code> */ public com.google.cloud.vision.v1p4beta1.ImageContextOrBuilder getImageContextOrBuilder() { if (imageContextBuilder_ != null) { return imageContextBuilder_.getMessageOrBuilder(); } else { return imageContext_ == null ? com.google.cloud.vision.v1p4beta1.ImageContext.getDefaultInstance() : imageContext_; } } /** * * * <pre> * Additional context that may accompany the image(s) in the file. * </pre> * * <code>.google.cloud.vision.v1p4beta1.ImageContext image_context = 3;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vision.v1p4beta1.ImageContext, com.google.cloud.vision.v1p4beta1.ImageContext.Builder, com.google.cloud.vision.v1p4beta1.ImageContextOrBuilder> getImageContextFieldBuilder() { if (imageContextBuilder_ == null) { imageContextBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vision.v1p4beta1.ImageContext, com.google.cloud.vision.v1p4beta1.ImageContext.Builder, com.google.cloud.vision.v1p4beta1.ImageContextOrBuilder>( getImageContext(), getParentForChildren(), isClean()); imageContext_ = null; } return imageContextBuilder_; } private com.google.cloud.vision.v1p4beta1.OutputConfig outputConfig_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vision.v1p4beta1.OutputConfig, com.google.cloud.vision.v1p4beta1.OutputConfig.Builder, com.google.cloud.vision.v1p4beta1.OutputConfigOrBuilder> outputConfigBuilder_; /** * * * <pre> * Required. The desired output location and metadata (e.g. format). * </pre> * * <code>.google.cloud.vision.v1p4beta1.OutputConfig output_config = 4;</code> * * @return Whether the outputConfig field is set. */ public boolean hasOutputConfig() { return outputConfigBuilder_ != null || outputConfig_ != null; } /** * * * <pre> * Required. The desired output location and metadata (e.g. format). * </pre> * * <code>.google.cloud.vision.v1p4beta1.OutputConfig output_config = 4;</code> * * @return The outputConfig. */ public com.google.cloud.vision.v1p4beta1.OutputConfig getOutputConfig() { if (outputConfigBuilder_ == null) { return outputConfig_ == null ? com.google.cloud.vision.v1p4beta1.OutputConfig.getDefaultInstance() : outputConfig_; } else { return outputConfigBuilder_.getMessage(); } } /** * * * <pre> * Required. The desired output location and metadata (e.g. format). * </pre> * * <code>.google.cloud.vision.v1p4beta1.OutputConfig output_config = 4;</code> */ public Builder setOutputConfig(com.google.cloud.vision.v1p4beta1.OutputConfig value) { if (outputConfigBuilder_ == null) { if (value == null) { throw new NullPointerException(); } outputConfig_ = value; onChanged(); } else { outputConfigBuilder_.setMessage(value); } return this; } /** * * * <pre> * Required. The desired output location and metadata (e.g. format). * </pre> * * <code>.google.cloud.vision.v1p4beta1.OutputConfig output_config = 4;</code> */ public Builder setOutputConfig( com.google.cloud.vision.v1p4beta1.OutputConfig.Builder builderForValue) { if (outputConfigBuilder_ == null) { outputConfig_ = builderForValue.build(); onChanged(); } else { outputConfigBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Required. The desired output location and metadata (e.g. format). * </pre> * * <code>.google.cloud.vision.v1p4beta1.OutputConfig output_config = 4;</code> */ public Builder mergeOutputConfig(com.google.cloud.vision.v1p4beta1.OutputConfig value) { if (outputConfigBuilder_ == null) { if (outputConfig_ != null) { outputConfig_ = com.google.cloud.vision.v1p4beta1.OutputConfig.newBuilder(outputConfig_) .mergeFrom(value) .buildPartial(); } else { outputConfig_ = value; } onChanged(); } else { outputConfigBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Required. The desired output location and metadata (e.g. format). * </pre> * * <code>.google.cloud.vision.v1p4beta1.OutputConfig output_config = 4;</code> */ public Builder clearOutputConfig() { if (outputConfigBuilder_ == null) { outputConfig_ = null; onChanged(); } else { outputConfig_ = null; outputConfigBuilder_ = null; } return this; } /** * * * <pre> * Required. The desired output location and metadata (e.g. format). * </pre> * * <code>.google.cloud.vision.v1p4beta1.OutputConfig output_config = 4;</code> */ public com.google.cloud.vision.v1p4beta1.OutputConfig.Builder getOutputConfigBuilder() { onChanged(); return getOutputConfigFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The desired output location and metadata (e.g. format). * </pre> * * <code>.google.cloud.vision.v1p4beta1.OutputConfig output_config = 4;</code> */ public com.google.cloud.vision.v1p4beta1.OutputConfigOrBuilder getOutputConfigOrBuilder() { if (outputConfigBuilder_ != null) { return outputConfigBuilder_.getMessageOrBuilder(); } else { return outputConfig_ == null ? com.google.cloud.vision.v1p4beta1.OutputConfig.getDefaultInstance() : outputConfig_; } } /** * * * <pre> * Required. The desired output location and metadata (e.g. format). * </pre> * * <code>.google.cloud.vision.v1p4beta1.OutputConfig output_config = 4;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vision.v1p4beta1.OutputConfig, com.google.cloud.vision.v1p4beta1.OutputConfig.Builder, com.google.cloud.vision.v1p4beta1.OutputConfigOrBuilder> getOutputConfigFieldBuilder() { if (outputConfigBuilder_ == null) { outputConfigBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vision.v1p4beta1.OutputConfig, com.google.cloud.vision.v1p4beta1.OutputConfig.Builder, com.google.cloud.vision.v1p4beta1.OutputConfigOrBuilder>( getOutputConfig(), getParentForChildren(), isClean()); outputConfig_ = null; } return outputConfigBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest) } // @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest) private static final com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest(); } public static com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AsyncAnnotateFileRequest> PARSER = new com.google.protobuf.AbstractParser<AsyncAnnotateFileRequest>() { @java.lang.Override public AsyncAnnotateFileRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new AsyncAnnotateFileRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<AsyncAnnotateFileRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AsyncAnnotateFileRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/*** * ASM: a very small and fast Java bytecode manipulation framework * Copyright (c) 2000-2011 INRIA, France Telecom * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. */ package se.jkrau.mclib.org.objectweb.asm.tree; import java.util.ArrayList; import java.util.List; import java.util.Map; import se.jkrau.mclib.org.objectweb.asm.MethodVisitor; /** * A node that represents a bytecode instruction. <i>An instruction can appear * at most once in at most one {@link InsnList} at a time</i>. * * @author Eric Bruneton */ public abstract class AbstractInsnNode { /** * The type of {@link InsnNode} instructions. */ public static final int INSN = 0; /** * The type of {@link IntInsnNode} instructions. */ public static final int INT_INSN = 1; /** * The type of {@link VarInsnNode} instructions. */ public static final int VAR_INSN = 2; /** * The type of {@link TypeInsnNode} instructions. */ public static final int TYPE_INSN = 3; /** * The type of {@link FieldInsnNode} instructions. */ public static final int FIELD_INSN = 4; /** * The type of {@link MethodInsnNode} instructions. */ public static final int METHOD_INSN = 5; /** * The type of {@link InvokeDynamicInsnNode} instructions. */ public static final int INVOKE_DYNAMIC_INSN = 6; /** * The type of {@link JumpInsnNode} instructions. */ public static final int JUMP_INSN = 7; /** * The type of {@link LabelNode} "instructions". */ public static final int LABEL = 8; /** * The type of {@link LdcInsnNode} instructions. */ public static final int LDC_INSN = 9; /** * The type of {@link IincInsnNode} instructions. */ public static final int IINC_INSN = 10; /** * The type of {@link TableSwitchInsnNode} instructions. */ public static final int TABLESWITCH_INSN = 11; /** * The type of {@link LookupSwitchInsnNode} instructions. */ public static final int LOOKUPSWITCH_INSN = 12; /** * The type of {@link MultiANewArrayInsnNode} instructions. */ public static final int MULTIANEWARRAY_INSN = 13; /** * The type of {@link FrameNode} "instructions". */ public static final int FRAME = 14; /** * The type of {@link LineNumberNode} "instructions". */ public static final int LINE = 15; /** * The opcode of this instruction. */ protected int opcode; /** * The runtime visible type annotations of this instruction. This field is * only used for real instructions (i.e. not for labels, frames, or line * number nodes). This list is a list of {@link TypeAnnotationNode} objects. * May be <tt>null</tt>. * * @associates org.objectweb.asm.tree.TypeAnnotationNode * @label visible */ public List<TypeAnnotationNode> visibleTypeAnnotations; /** * The runtime invisible type annotations of this instruction. This field is * only used for real instructions (i.e. not for labels, frames, or line * number nodes). This list is a list of {@link TypeAnnotationNode} objects. * May be <tt>null</tt>. * * @associates org.objectweb.asm.tree.TypeAnnotationNode * @label invisible */ public List<TypeAnnotationNode> invisibleTypeAnnotations; /** * Previous instruction in the list to which this instruction belongs. */ AbstractInsnNode prev; /** * Next instruction in the list to which this instruction belongs. */ AbstractInsnNode next; /** * Index of this instruction in the list to which it belongs. The value of * this field is correct only when {@link InsnList#cache} is not null. A * value of -1 indicates that this instruction does not belong to any * {@link InsnList}. */ int index; /** * Constructs a new {@link AbstractInsnNode}. * * @param opcode * the opcode of the instruction to be constructed. */ protected AbstractInsnNode(final int opcode) { this.opcode = opcode; this.index = -1; } /** * Returns the opcode of this instruction. * * @return the opcode of this instruction. */ public int getOpcode() { return opcode; } /** * Returns the type of this instruction. * * @return the type of this instruction, i.e. one the constants defined in * this class. */ public abstract int getType(); /** * Returns the previous instruction in the list to which this instruction * belongs, if any. * * @return the previous instruction in the list to which this instruction * belongs, if any. May be <tt>null</tt>. */ public AbstractInsnNode getPrevious() { return prev; } /** * Returns the next instruction in the list to which this instruction * belongs, if any. * * @return the next instruction in the list to which this instruction * belongs, if any. May be <tt>null</tt>. */ public AbstractInsnNode getNext() { return next; } /** * Makes the given code visitor visit this instruction. * * @param cv * a code visitor. */ public abstract void accept(final MethodVisitor cv); /** * Makes the given visitor visit the annotations of this instruction. * * @param mv * a method visitor. */ protected final void acceptAnnotations(final MethodVisitor mv) { int n = visibleTypeAnnotations == null ? 0 : visibleTypeAnnotations .size(); for (int i = 0; i < n; ++i) { TypeAnnotationNode an = visibleTypeAnnotations.get(i); an.accept(mv.visitInsnAnnotation(an.typeRef, an.typePath, an.desc, true)); } n = invisibleTypeAnnotations == null ? 0 : invisibleTypeAnnotations .size(); for (int i = 0; i < n; ++i) { TypeAnnotationNode an = invisibleTypeAnnotations.get(i); an.accept(mv.visitInsnAnnotation(an.typeRef, an.typePath, an.desc, false)); } } /** * Returns a copy of this instruction. * * @param labels * a map from LabelNodes to cloned LabelNodes. * @return a copy of this instruction. The returned instruction does not * belong to any {@link InsnList}. */ public abstract AbstractInsnNode clone( final Map<LabelNode, LabelNode> labels); /** * Returns the clone of the given label. * * @param label * a label. * @param map * a map from LabelNodes to cloned LabelNodes. * @return the clone of the given label. */ static LabelNode clone(final LabelNode label, final Map<LabelNode, LabelNode> map) { return map.get(label); } /** * Returns the clones of the given labels. * * @param labels * a list of labels. * @param map * a map from LabelNodes to cloned LabelNodes. * @return the clones of the given labels. */ static LabelNode[] clone(final List<LabelNode> labels, final Map<LabelNode, LabelNode> map) { LabelNode[] clones = new LabelNode[labels.size()]; for (int i = 0; i < clones.length; ++i) { clones[i] = map.get(labels.get(i)); } return clones; } /** * Clones the annotations of the given instruction into this instruction. * * @param insn * the source instruction. * @return this instruction. */ protected final AbstractInsnNode cloneAnnotations( final AbstractInsnNode insn) { if (insn.visibleTypeAnnotations != null) { this.visibleTypeAnnotations = new ArrayList<TypeAnnotationNode>(); for (int i = 0; i < insn.visibleTypeAnnotations.size(); ++i) { TypeAnnotationNode src = insn.visibleTypeAnnotations.get(i); TypeAnnotationNode ann = new TypeAnnotationNode(src.typeRef, src.typePath, src.desc); src.accept(ann); this.visibleTypeAnnotations.add(ann); } } if (insn.invisibleTypeAnnotations != null) { this.invisibleTypeAnnotations = new ArrayList<TypeAnnotationNode>(); for (int i = 0; i < insn.invisibleTypeAnnotations.size(); ++i) { TypeAnnotationNode src = insn.invisibleTypeAnnotations.get(i); TypeAnnotationNode ann = new TypeAnnotationNode(src.typeRef, src.typePath, src.desc); src.accept(ann); this.invisibleTypeAnnotations.add(ann); } } return this; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.management; import java.util.concurrent.Callable; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import javax.management.Attribute; import javax.management.MBeanServer; import javax.management.ObjectName; import org.apache.camel.Exchange; import org.apache.camel.Processor; import org.apache.camel.builder.NotifyBuilder; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.junit.Test; public class ManagedThrottlerTest extends ManagementTestSupport { @Test public void testManageThrottler() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } getMockEndpoint("mock:result").expectedMessageCount(10); // Send in a first batch of 10 messages and check that the endpoint // gets them. We'll check the total time of the second and third // batches as it seems that there is some time required to prime // things, which can vary significantly... particularly on slower // machines. for (int i = 0; i < 10; i++) { template.sendBody("direct:start", "Message " + i); } assertMockEndpointsSatisfied(); // get the stats for the route MBeanServer mbeanServer = getMBeanServer(); // get the object name for the delayer ObjectName throttlerName = ObjectName.getInstance("org.apache.camel:context=camel-1,type=processors,name=\"mythrottler\""); // use route to get the total time ObjectName routeName = ObjectName.getInstance("org.apache.camel:context=camel-1,type=routes,name=\"route1\""); // reset the counters mbeanServer.invoke(routeName, "reset", null, null); // send in 10 messages for (int i = 0; i < 10; i++) { template.sendBody("direct:start", "Message " + i); } Long completed = (Long) mbeanServer.getAttribute(routeName, "ExchangesCompleted"); assertEquals(10, completed.longValue()); Long timePeriod = (Long) mbeanServer.getAttribute(throttlerName, "TimePeriodMillis"); assertEquals(250, timePeriod.longValue()); Long total = (Long) mbeanServer.getAttribute(routeName, "TotalProcessingTime"); assertTrue("Should take at most 1.0 sec: was " + total, total < 1000); // change the throttler using JMX mbeanServer.setAttribute(throttlerName, new Attribute("MaximumRequestsPerPeriod", (long) 2)); // reset the counters mbeanServer.invoke(routeName, "reset", null, null); // send in another 10 messages for (int i = 0; i < 10; i++) { template.sendBody("direct:start", "Message " + i); } Long period = (Long) mbeanServer.getAttribute(throttlerName, "MaximumRequestsPerPeriod"); assertNotNull(period); assertEquals(2, period.longValue()); completed = (Long) mbeanServer.getAttribute(routeName, "ExchangesCompleted"); assertEquals(10, completed.longValue()); total = (Long) mbeanServer.getAttribute(routeName, "TotalProcessingTime"); assertTrue("Should be around 1 sec now: was " + total, total > 1000); } @Test public void testThrottleVisableViaJmx() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } if (isPlatform("windows")) { // windows needs more sleep to read updated jmx values so we skip as we dont want further delays in core tests return; } // get the stats for the route MBeanServer mbeanServer = getMBeanServer(); // use route to get the total time ObjectName routeName = ObjectName.getInstance("org.apache.camel:context=camel-1,type=routes,name=\"route2\""); // reset the counters mbeanServer.invoke(routeName, "reset", null, null); getMockEndpoint("mock:end").expectedMessageCount(10); NotifyBuilder notifier = new NotifyBuilder(context). from("seda:throttleCount").whenReceived(5).create(); for (int i = 0; i < 10; i++) { template.sendBody("seda:throttleCount", "Message " + i); } assertTrue(notifier.matches(2, TimeUnit.SECONDS)); assertMockEndpointsSatisfied(); Long completed = (Long) mbeanServer.getAttribute(routeName, "ExchangesCompleted"); assertEquals(10, completed.longValue()); } @Test public void testThrottleAsyncVisableViaJmx() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } if (isPlatform("windows")) { // windows needs more sleep to read updated jmx values so we skip as we dont want further delays in core tests return; } // get the stats for the route MBeanServer mbeanServer = getMBeanServer(); // use route to get the total time ObjectName routeName = ObjectName.getInstance("org.apache.camel:context=camel-1,type=routes,name=\"route3\""); // reset the counters mbeanServer.invoke(routeName, "reset", null, null); getMockEndpoint("mock:endAsync").expectedMessageCount(10); // we pick '5' because we are right in the middle of the number of messages // that have been and reduces any race conditions to minimal... NotifyBuilder notifier = new NotifyBuilder(context). from("seda:throttleCountAsync").whenReceived(5).create(); for (int i = 0; i < 10; i++) { template.sendBody("seda:throttleCountAsync", "Message " + i); } assertTrue(notifier.matches(2, TimeUnit.SECONDS)); assertMockEndpointsSatisfied(); Long completed = (Long) mbeanServer.getAttribute(routeName, "ExchangesCompleted"); assertEquals(10, completed.longValue()); } @Test public void testThrottleAsyncExceptionVisableViaJmx() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } if (isPlatform("windows")) { // windows needs more sleep to read updated jmx values so we skip as we dont want further delays in core tests return; } // get the stats for the route MBeanServer mbeanServer = getMBeanServer(); // use route to get the total time ObjectName routeName = ObjectName.getInstance("org.apache.camel:context=camel-1,type=routes,name=\"route4\""); // reset the counters mbeanServer.invoke(routeName, "reset", null, null); getMockEndpoint("mock:endAsyncException").expectedMessageCount(10); NotifyBuilder notifier = new NotifyBuilder(context). from("seda:throttleCountAsyncException").whenReceived(5).create(); for (int i = 0; i < 10; i++) { template.sendBody("seda:throttleCountAsyncException", "Message " + i); } assertTrue(notifier.matches(2, TimeUnit.SECONDS)); assertMockEndpointsSatisfied(); // give a sec for exception handling to finish.. Thread.sleep(500); // since all exchanges ended w/ exception, they are not completed Long completed = (Long) mbeanServer.getAttribute(routeName, "ExchangesCompleted"); assertEquals(0, completed.longValue()); } @Test public void testRejectedExecution() throws Exception { // when delaying async, we can possibly fill up the execution queue //. which would through a RejectedExecutionException.. we need to make // sure that the delayedCount/throttledCount doesn't leak // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } // get the stats for the route MBeanServer mbeanServer = getMBeanServer(); // use route to get the total time ObjectName routeName = ObjectName.getInstance("org.apache.camel:context=camel-1,type=routes,name=\"route2\""); // reset the counters mbeanServer.invoke(routeName, "reset", null, null); MockEndpoint mock = getMockEndpoint("mock:endAsyncReject"); // only one message (the first one) should get through because the rest should get delayed mock.expectedMessageCount(1); MockEndpoint exceptionMock = getMockEndpoint("mock:rejectedExceptionEndpoint1"); exceptionMock.expectedMessageCount(9); for (int i = 0; i < 10; i++) { template.sendBody("seda:throttleCountRejectExecution", "Message " + i); } assertMockEndpointsSatisfied(); } @Test public void testRejectedExecutionCallerRuns() throws Exception { // when delaying async, we can possibly fill up the execution queue //. which would through a RejectedExecutionException.. we need to make // sure that the delayedCount/throttledCount doesn't leak // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } // get the stats for the route MBeanServer mbeanServer = getMBeanServer(); // use route to get the total time ObjectName routeName = ObjectName.getInstance("org.apache.camel:context=camel-1,type=routes,name=\"route2\""); // reset the counters mbeanServer.invoke(routeName, "reset", null, null); MockEndpoint mock = getMockEndpoint("mock:endAsyncRejectCallerRuns"); // only one message (the first one) should get through because the rest should get delayed mock.expectedMessageCount(10); MockEndpoint exceptionMock = getMockEndpoint("mock:rejectedExceptionEndpoint"); exceptionMock.expectedMessageCount(0); for (int i = 0; i < 10; i++) { template.sendBody("seda:throttleCountRejectExecutionCallerRuns", "Message " + i); } assertMockEndpointsSatisfied(); } @Override protected RouteBuilder createRouteBuilder() throws Exception { final ScheduledExecutorService badService = new ScheduledThreadPoolExecutor(1) { @Override public <V> ScheduledFuture<V> schedule(Callable<V> command, long delay, TimeUnit unit) { throw new RejectedExecutionException(); } }; return new RouteBuilder() { @Override public void configure() throws Exception { from("direct:start") .to("log:foo") .throttle(10).timePeriodMillis(250).id("mythrottler") .to("mock:result"); from("seda:throttleCount") .throttle(1).timePeriodMillis(250).id("mythrottler2") .to("mock:end"); from("seda:throttleCountAsync") .throttle(1).asyncDelayed().timePeriodMillis(250).id("mythrottler3") .to("mock:endAsync"); from("seda:throttleCountAsyncException") .throttle(1).asyncDelayed().timePeriodMillis(250).id("mythrottler4") .to("mock:endAsyncException") .process(new Processor() { @Override public void process(Exchange exchange) throws Exception { throw new RuntimeException("Fail me"); } }); from("seda:throttleCountRejectExecutionCallerRuns") .onException(RejectedExecutionException.class).to("mock:rejectedExceptionEndpoint1").end() .throttle(1) .timePeriodMillis(250) .asyncDelayed() .executorService(badService) .callerRunsWhenRejected(true) .id("mythrottler5") .to("mock:endAsyncRejectCallerRuns"); from("seda:throttleCountRejectExecution") .onException(RejectedExecutionException.class).to("mock:rejectedExceptionEndpoint1").end() .throttle(1) .timePeriodMillis(250) .asyncDelayed() .executorService(badService) .callerRunsWhenRejected(false) .id("mythrottler6") .to("mock:endAsyncReject"); } }; } }
/* * Licensed to Apereo under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Apereo licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at the following location: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.cas.web.flow; import org.jasig.cas.AbstractCentralAuthenticationServiceTest; import org.jasig.cas.TestUtils; import org.jasig.cas.authentication.Credential; import org.jasig.cas.ticket.TicketGrantingTicket; import org.jasig.cas.web.support.WebUtils; import org.junit.Before; import org.junit.Test; import org.springframework.binding.message.MessageContext; import org.springframework.mock.web.MockHttpServletRequest; import org.springframework.mock.web.MockHttpServletResponse; import org.springframework.mock.web.MockServletContext; import org.springframework.validation.BindException; import org.springframework.web.util.CookieGenerator; import org.springframework.webflow.context.servlet.ServletExternalContext; import org.springframework.webflow.execution.RequestContext; import org.springframework.webflow.test.MockRequestContext; import javax.validation.constraints.NotNull; import static org.junit.Assert.*; import static org.mockito.Mockito.*; /** * @author Scott Battaglia * @since 3.0.0.4 */ public class AuthenticationViaFormActionTests extends AbstractCentralAuthenticationServiceTest { private AuthenticationViaFormAction action; private CookieGenerator warnCookieGenerator; @Before public void onSetUp() throws Exception { this.action = new AuthenticationViaFormAction(); this.warnCookieGenerator = new CookieGenerator(); this.warnCookieGenerator.setCookieName("WARN"); this.warnCookieGenerator.setCookieName("TGT"); this.warnCookieGenerator.setCookieDomain("/"); this.warnCookieGenerator.setCookiePath("/"); this.action.setCentralAuthenticationService(getCentralAuthenticationService()); this.action.setWarnCookieGenerator(this.warnCookieGenerator); } @Test public void verifySuccessfulAuthenticationWithNoService() throws Exception { final MockHttpServletRequest request = new MockHttpServletRequest(); final MockRequestContext context = new MockRequestContext(); WebUtils.putLoginTicket(context, "LOGIN"); request.addParameter("lt", "LOGIN"); request.addParameter("username", "test"); request.addParameter("password", "test"); context.setExternalContext(new ServletExternalContext( new MockServletContext(), request, new MockHttpServletResponse())); final Credential c = TestUtils.getCredentialsWithSameUsernameAndPassword(); putCredentialInRequestScope(context, c); final MessageContext messageContext = mock(MessageContext.class); assertEquals("success", this.action.submit(context, c, messageContext).getId()); } @Test public void verifySuccessfulAuthenticationWithNoServiceAndWarn() throws Exception { final MockHttpServletRequest request = new MockHttpServletRequest(); final MockHttpServletResponse response = new MockHttpServletResponse(); final MockRequestContext context = new MockRequestContext(); WebUtils.putLoginTicket(context, "LOGIN"); request.addParameter("lt", "LOGIN"); request.addParameter("username", "test"); request.addParameter("password", "test"); request.addParameter("warn", "true"); context.setExternalContext(new ServletExternalContext( new MockServletContext(), request, response)); final Credential c = TestUtils.getCredentialsWithSameUsernameAndPassword(); putCredentialInRequestScope(context, c); final MessageContext messageContext = mock(MessageContext.class); assertEquals("success", this.action.submit(context, c, messageContext).getId()); assertNotNull(WebUtils.getTicketGrantingTicketId(context)); assertNotNull(response.getCookie(this.warnCookieGenerator.getCookieName())); } @Test public void verifySuccessfulAuthenticationWithServiceAndWarn() throws Exception { final MockHttpServletRequest request = new MockHttpServletRequest(); final MockHttpServletResponse response = new MockHttpServletResponse(); final MockRequestContext context = new MockRequestContext(); WebUtils.putLoginTicket(context, "LOGIN"); request.addParameter("lt", "LOGIN"); request.addParameter("username", "test"); request.addParameter("password", "test"); request.addParameter("warn", "true"); request.addParameter("service", "test"); context.setExternalContext(new ServletExternalContext( new MockServletContext(), request, response)); final Credential c = TestUtils.getCredentialsWithSameUsernameAndPassword(); putCredentialInRequestScope(context, c); final MessageContext messageContext = mock(MessageContext.class); assertEquals("success", this.action.submit(context, c, messageContext).getId()); assertNotNull(response.getCookie(this.warnCookieGenerator.getCookieName())); } @Test public void verifyFailedAuthenticationWithNoService() throws Exception { final MockHttpServletRequest request = new MockHttpServletRequest(); final MockRequestContext context = new MockRequestContext(); request.addParameter("username", "test"); request.addParameter("password", "test2"); context.setExternalContext(new ServletExternalContext( new MockServletContext(), request, new MockHttpServletResponse())); final Credential c = TestUtils.getCredentialsWithSameUsernameAndPassword(); putCredentialInRequestScope(context, c); context.getRequestScope().put( "org.springframework.validation.BindException.credentials", new BindException(c, "credentials")); final MessageContext messageContext = mock(MessageContext.class); assertEquals("error", this.action.submit(context, c, messageContext).getId()); } @Test public void verifyRenewWithServiceAndSameCredentials() throws Exception { final Credential c = TestUtils.getCredentialsWithSameUsernameAndPassword(); final TicketGrantingTicket ticketGrantingTicket = getCentralAuthenticationService().createTicketGrantingTicket(c); final MockHttpServletRequest request = new MockHttpServletRequest(); final MockRequestContext context = new MockRequestContext(); WebUtils.putTicketGrantingTicketInScopes(context, ticketGrantingTicket); WebUtils.putLoginTicket(context, "LOGIN"); request.addParameter("lt", "LOGIN"); request.addParameter("renew", "true"); request.addParameter("service", "test"); request.addParameter("username", "test"); request.addParameter("password", "test"); context.setExternalContext(new ServletExternalContext( new MockServletContext(), request, new MockHttpServletResponse())); context.getFlowScope().put("service", TestUtils.getService("test")); final MessageContext messageContext = mock(MessageContext.class); assertEquals("warn", this.action.submit(context, c, messageContext).getId()); } @Test public void verifyRenewWithServiceAndDifferentCredentials() throws Exception { final Credential c = TestUtils.getCredentialsWithSameUsernameAndPassword(); final TicketGrantingTicket ticketGrantingTicket = getCentralAuthenticationService().createTicketGrantingTicket(c); final MockHttpServletRequest request = new MockHttpServletRequest(); final MockRequestContext context = new MockRequestContext(); WebUtils.putLoginTicket(context, "LOGIN"); request.addParameter("lt", "LOGIN"); WebUtils.putTicketGrantingTicketInScopes(context, ticketGrantingTicket); request.addParameter("renew", "true"); request.addParameter("service", "test"); request.addParameter("username", "test2"); request.addParameter("password", "test2"); context.setExternalContext(new ServletExternalContext( new MockServletContext(), request, new MockHttpServletResponse())); final MessageContext messageContext = mock(MessageContext.class); assertEquals("success", this.action.submit(context, c, messageContext).getId()); } @Test public void verifyRenewWithServiceAndBadCredentials() throws Exception { final Credential c = TestUtils.getCredentialsWithSameUsernameAndPassword(); final TicketGrantingTicket ticketGrantingTicket = getCentralAuthenticationService().createTicketGrantingTicket(c); final MockHttpServletRequest request = new MockHttpServletRequest(); final MockRequestContext context = new MockRequestContext(); WebUtils.putTicketGrantingTicketInScopes(context, ticketGrantingTicket); request.addParameter("renew", "true"); request.addParameter("service", "test"); final Credential c2 = TestUtils.getCredentialsWithDifferentUsernameAndPassword(); context.setExternalContext(new ServletExternalContext( new MockServletContext(), request, new MockHttpServletResponse())); putCredentialInRequestScope(context, c2); context.getRequestScope().put( "org.springframework.validation.BindException.credentials", new BindException(c2, "credentials")); final MessageContext messageContext = mock(MessageContext.class); assertEquals("error", this.action.submit(context, c2, messageContext).getId()); } /** * Put credentials in request scope. * * @param context the context * @param c the credential */ private static void putCredentialInRequestScope( final RequestContext context, @NotNull final Credential c) { context.getRequestScope().put("credentials", c); } }
/* * Copyright 2014 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.plugin.httpclient4.interceptor; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import org.apache.http.HeaderElement; import org.apache.http.HttpEntity; import org.apache.http.HttpEntityEnclosingRequest; import org.apache.http.HttpHost; import org.apache.http.HttpMessage; import org.apache.http.HttpRequest; import org.apache.http.NameValuePair; import org.apache.http.ParseException; import org.apache.http.concurrent.BasicFuture; import org.apache.http.nio.protocol.HttpAsyncRequestProducer; import org.apache.http.protocol.HTTP; import com.navercorp.pinpoint.bootstrap.config.DumpType; import com.navercorp.pinpoint.bootstrap.config.ProfilerConfig; import com.navercorp.pinpoint.bootstrap.context.AsyncTraceId; import com.navercorp.pinpoint.bootstrap.context.Header; import com.navercorp.pinpoint.bootstrap.context.SpanEventRecorder; import com.navercorp.pinpoint.bootstrap.context.Trace; import com.navercorp.pinpoint.bootstrap.context.TraceContext; import com.navercorp.pinpoint.bootstrap.context.TraceId; import com.navercorp.pinpoint.bootstrap.interceptor.AsyncTraceIdAccessor; import com.navercorp.pinpoint.bootstrap.interceptor.MethodDescriptor; import com.navercorp.pinpoint.bootstrap.interceptor.SimpleAroundInterceptor; import com.navercorp.pinpoint.bootstrap.logging.PLogger; import com.navercorp.pinpoint.bootstrap.logging.PLoggerFactory; import com.navercorp.pinpoint.bootstrap.pair.NameIntValuePair; import com.navercorp.pinpoint.bootstrap.sampler.SamplingFlagUtils; import com.navercorp.pinpoint.bootstrap.util.InterceptorUtils; import com.navercorp.pinpoint.bootstrap.util.SimpleSampler; import com.navercorp.pinpoint.bootstrap.util.SimpleSamplerFactory; import com.navercorp.pinpoint.bootstrap.util.StringUtils; import com.navercorp.pinpoint.common.trace.AnnotationKey; import com.navercorp.pinpoint.plugin.httpclient4.HttpClient4Constants; import com.navercorp.pinpoint.plugin.httpclient4.RequestProducerGetter; import com.navercorp.pinpoint.plugin.httpclient4.ResultFutureGetter; /** * * @author jaehong.kim * */ public class DefaultClientExchangeHandlerImplStartMethodInterceptor implements SimpleAroundInterceptor, HttpClient4Constants { private final PLogger logger = PLoggerFactory.getLogger(this.getClass()); private final boolean isDebug = logger.isDebugEnabled(); private TraceContext traceContext; private MethodDescriptor methodDescriptor; protected boolean cookie; protected DumpType cookieDumpType; protected SimpleSampler cookieSampler; protected boolean entity; protected DumpType entityDumpType; protected SimpleSampler entitySampler; protected boolean statusCode; public DefaultClientExchangeHandlerImplStartMethodInterceptor(TraceContext traceContext, MethodDescriptor methodDescriptor) { setTraceContext(traceContext); this.methodDescriptor = methodDescriptor; } @Override public void before(Object target, Object[] args) { if (isDebug) { logger.beforeInterceptor(target, "", methodDescriptor.getMethodName(), "", args); } final Trace trace = traceContext.currentRawTraceObject(); if (trace == null) { return; } final HttpRequest httpRequest = getHttpRequest(target); final boolean sampling = trace.canSampled(); if (!sampling) { if (isDebug) { logger.debug("set Sampling flag=false"); } if (httpRequest != null) { httpRequest.setHeader(Header.HTTP_SAMPLED.toString(), SamplingFlagUtils.SAMPLING_RATE_FALSE); } return; } SpanEventRecorder recorder = trace.traceBlockBegin(); // set remote trace final TraceId nextId = trace.getTraceId().getNextTraceId(); recorder.recordNextSpanId(nextId.getSpanId()); recorder.recordServiceType(HttpClient4Constants.HTTP_CLIENT_4); if (httpRequest != null) { httpRequest.setHeader(Header.HTTP_TRACE_ID.toString(), nextId.getTransactionId()); httpRequest.setHeader(Header.HTTP_SPAN_ID.toString(), String.valueOf(nextId.getSpanId())); httpRequest.setHeader(Header.HTTP_PARENT_SPAN_ID.toString(), String.valueOf(nextId.getParentSpanId())); httpRequest.setHeader(Header.HTTP_FLAGS.toString(), String.valueOf(nextId.getFlags())); httpRequest.setHeader(Header.HTTP_PARENT_APPLICATION_NAME.toString(), traceContext.getApplicationName()); httpRequest.setHeader(Header.HTTP_PARENT_APPLICATION_TYPE.toString(), Short.toString(traceContext.getServerTypeCode())); final NameIntValuePair<String> host = getHost(target); if (host != null) { httpRequest.setHeader(Header.HTTP_HOST.toString(), host.getName()); } } try { if (isAsynchronousInvocation(target, args)) { // set asynchronous trace final AsyncTraceId asyncTraceId = trace.getAsyncTraceId(); recorder.recordNextAsyncId(asyncTraceId.getAsyncId()); ((AsyncTraceIdAccessor)((ResultFutureGetter)target)._$PINPOINT$_getResultFuture())._$PINPOINT$_setAsyncTraceId(asyncTraceId); if (isDebug) { logger.debug("Set asyncTraceId metadata {}", asyncTraceId); } } } catch (Throwable t) { logger.warn("Failed to BEFORE process. {}", t.getMessage(), t); } } private HttpRequest getHttpRequest(final Object target) { try { if (!(target instanceof RequestProducerGetter)) { return null; } final HttpAsyncRequestProducer requestProducer = ((RequestProducerGetter)target)._$PINPOINT$_getRequestProducer(); return requestProducer.generateRequest(); } catch (Exception e) { return null; } } private boolean isAsynchronousInvocation(final Object target, final Object[] args) { if (!(target instanceof ResultFutureGetter)) { logger.debug("Invalid target object. Need field accessor({}).", FIELD_RESULT_FUTURE); return false; } BasicFuture<?> future = ((ResultFutureGetter)target)._$PINPOINT$_getResultFuture(); if (future == null) { logger.debug("Invalid target object. field is null({}).", FIELD_RESULT_FUTURE); return false; } if (!(future instanceof AsyncTraceIdAccessor)) { logger.debug("Invalid resultFuture field object. Need metadata accessor({}).", METADATA_ASYNC_TRACE_ID); return false; } return true; } @Override public void after(Object target, Object[] args, Object result, Throwable throwable) { if (isDebug) { logger.afterInterceptor(target, args); } final Trace trace = traceContext.currentTraceObject(); if (trace == null) { return; } try { SpanEventRecorder recorder = trace.currentSpanEventRecorder(); final HttpRequest httpRequest = getHttpRequest(target); if (httpRequest != null) { // Accessing httpRequest here not BEFORE() because it can cause side effect. recorder.recordAttribute(AnnotationKey.HTTP_URL, httpRequest.getRequestLine().getUri()); final NameIntValuePair<String> host = getHost(target); if (host != null) { int port = host.getValue(); String endpoint = getEndpoint(host.getName(), port); recorder.recordDestinationId(endpoint); } recordHttpRequest(recorder, httpRequest, throwable); } recorder.recordApi(methodDescriptor); recorder.recordException(throwable); } finally { trace.traceBlockEnd(); } } private NameIntValuePair<String> getHost(final Object target) { if (!(target instanceof RequestProducerGetter)) { return null; } final HttpAsyncRequestProducer producer = ((RequestProducerGetter)target)._$PINPOINT$_getRequestProducer(); final HttpHost httpHost = producer.getTarget(); return new NameIntValuePair<String>(httpHost.getHostName(), httpHost.getPort()); } private String getEndpoint(String host, int port) { if (host == null) { return "UnknownHttpClient"; } if (port < 0) { return host; } StringBuilder sb = new StringBuilder(host.length() + 8); sb.append(host); sb.append(':'); sb.append(port); return sb.toString(); } private void recordHttpRequest(SpanEventRecorder recorder, HttpRequest httpRequest, Throwable throwable) { final boolean isException = InterceptorUtils.isThrowable(throwable); if (cookie) { if (DumpType.ALWAYS == cookieDumpType) { recordCookie(httpRequest, recorder); } else if (DumpType.EXCEPTION == cookieDumpType && isException) { recordCookie(httpRequest, recorder); } } if (entity) { if (DumpType.ALWAYS == entityDumpType) { recordEntity(httpRequest, recorder); } else if (DumpType.EXCEPTION == entityDumpType && isException) { recordEntity(httpRequest, recorder); } } } protected void recordCookie(HttpMessage httpMessage, SpanEventRecorder recorder) { org.apache.http.Header[] cookies = httpMessage.getHeaders("Cookie"); for (org.apache.http.Header header : cookies) { final String value = header.getValue(); if (value != null && !value.isEmpty()) { if (cookieSampler.isSampling()) { recorder.recordAttribute(AnnotationKey.HTTP_COOKIE, StringUtils.drop(value, 1024)); } // Can a cookie have 2 or more values? // PMD complains if we use break here return; } } } protected void recordEntity(HttpMessage httpMessage, SpanEventRecorder recorder) { if (httpMessage instanceof HttpEntityEnclosingRequest) { final HttpEntityEnclosingRequest entityRequest = (HttpEntityEnclosingRequest) httpMessage; try { final HttpEntity entity = entityRequest.getEntity(); if (entity != null && entity.isRepeatable() && entity.getContentLength() > 0) { if (entitySampler.isSampling()) { final String entityString = entityUtilsToString(entity, "UTF8", 1024); recorder.recordAttribute(AnnotationKey.HTTP_PARAM_ENTITY, StringUtils.drop(entityString, 1024)); } } } catch (IOException e) { logger.debug("HttpEntityEnclosingRequest entity record fail. Caused:{}", e.getMessage(), e); } } } /** * copy: EntityUtils Get the entity content as a String, using the provided default character set if none is found in the entity. If defaultCharset is null, the default "ISO-8859-1" is used. * * @param entity * must not be null * @param defaultCharset * character set to be applied if none found in the entity * @return the entity content as a String. May be null if {@link HttpEntity#getContent()} is null. * @throws ParseException * if header elements cannot be parsed * @throws IllegalArgumentException * if entity is null or if content length > Integer.MAX_VALUE * @throws IOException * if an error occurs reading the input stream */ @SuppressWarnings("deprecation") public static String entityUtilsToString(final HttpEntity entity, final String defaultCharset, int maxLength) throws IOException, ParseException { if (entity == null) { throw new IllegalArgumentException("HTTP entity may not be null"); } final InputStream instream = entity.getContent(); if (instream == null) { return null; } try { if (entity.getContentLength() > Integer.MAX_VALUE) { return "HTTP entity too large to be buffered in memory length:" + entity.getContentLength(); } String charset = getContentCharSet(entity); if (charset == null) { charset = defaultCharset; } if (charset == null) { charset = HTTP.DEFAULT_CONTENT_CHARSET; } Reader reader = new InputStreamReader(instream, charset); final StringBuilder buffer = new StringBuilder(maxLength * 2); char[] tmp = new char[1024]; int l; while ((l = reader.read(tmp)) != -1) { buffer.append(tmp, 0, l); if (buffer.length() >= maxLength) { break; } } return buffer.toString(); } finally { instream.close(); } } /** * copy: EntityUtils Obtains character set of the entity, if known. * * @param entity * must not be null * @return the character set, or null if not found * @throws ParseException * if header elements cannot be parsed * @throws IllegalArgumentException * if entity is null */ public static String getContentCharSet(final HttpEntity entity) throws ParseException { if (entity == null) { throw new IllegalArgumentException("HTTP entity may not be null"); } String charset = null; if (entity.getContentType() != null) { HeaderElement values[] = entity.getContentType().getElements(); if (values.length > 0) { NameValuePair param = values[0].getParameterByName("charset"); if (param != null) { charset = param.getValue(); } } } return charset; } public void setTraceContext(TraceContext traceContext) { this.traceContext = traceContext; final ProfilerConfig profilerConfig = traceContext.getProfilerConfig(); this.cookie = profilerConfig.isApacheHttpClient4ProfileCookie(); this.cookieDumpType = profilerConfig.getApacheHttpClient4ProfileCookieDumpType(); if (cookie) { this.cookieSampler = SimpleSamplerFactory.createSampler(cookie, profilerConfig.getApacheHttpClient4ProfileCookieSamplingRate()); } this.entity = profilerConfig.isApacheHttpClient4ProfileEntity(); this.entityDumpType = profilerConfig.getApacheHttpClient4ProfileEntityDumpType(); if (entity) { this.entitySampler = SimpleSamplerFactory.createSampler(entity, profilerConfig.getApacheHttpClient4ProfileEntitySamplingRate()); } } }
/* * Copyright 2012 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.eureka; import javax.servlet.ServletContext; import javax.servlet.ServletContextEvent; import javax.servlet.ServletContextListener; import java.util.Date; import com.netflix.appinfo.ApplicationInfoManager; import com.netflix.appinfo.CloudInstanceConfig; import com.netflix.appinfo.DataCenterInfo; import com.netflix.appinfo.EurekaInstanceConfig; import com.netflix.appinfo.InstanceInfo; import com.netflix.appinfo.MyDataCenterInstanceConfig; import com.netflix.appinfo.providers.EurekaConfigBasedInstanceInfoProvider; import com.netflix.config.ConfigurationManager; import com.netflix.config.DeploymentContext; import com.netflix.discovery.DefaultEurekaClientConfig; import com.netflix.discovery.DiscoveryClient; import com.netflix.discovery.EurekaClient; import com.netflix.discovery.EurekaClientConfig; import com.netflix.discovery.converters.JsonXStream; import com.netflix.discovery.converters.XmlXStream; import com.netflix.eureka.aws.AwsBinder; import com.netflix.eureka.aws.AwsBinderDelegate; import com.netflix.eureka.cluster.PeerEurekaNodes; import com.netflix.eureka.registry.AwsInstanceRegistry; import com.netflix.eureka.registry.PeerAwareInstanceRegistry; import com.netflix.eureka.registry.PeerAwareInstanceRegistryImpl; import com.netflix.eureka.resources.DefaultServerCodecs; import com.netflix.eureka.resources.ServerCodecs; import com.netflix.eureka.util.EurekaMonitors; import com.thoughtworks.xstream.XStream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The class that kick starts the eureka server. * * <p> * The eureka server is configured by using the configuration * {@link EurekaServerConfig} specified by <em>eureka.server.props</em> in the * classpath. The eureka client component is also initialized by using the * configuration {@link EurekaInstanceConfig} specified by * <em>eureka.client.props</em>. If the server runs in the AWS cloud, the eureka * server binds it to the elastic ip as specified. * </p> * * @author Karthik Ranganathan, Greg Kim, David Liu * */ public class EurekaBootStrap implements ServletContextListener { private static final Logger logger = LoggerFactory.getLogger(EurekaBootStrap.class); private static final String TEST = "test"; private static final String ARCHAIUS_DEPLOYMENT_ENVIRONMENT = "archaius.deployment.environment"; private static final String EUREKA_ENVIRONMENT = "eureka.environment"; private static final String CLOUD = "cloud"; private static final String DEFAULT = "default"; private static final String ARCHAIUS_DEPLOYMENT_DATACENTER = "archaius.deployment.datacenter"; private static final String EUREKA_DATACENTER = "eureka.datacenter"; protected volatile EurekaServerContext serverContext; protected volatile AwsBinder awsBinder; private EurekaClient eurekaClient; /** * Construct a default instance of Eureka boostrap */ public EurekaBootStrap() { this(null); } /** * Construct an instance of eureka bootstrap with the supplied eureka client * * @param eurekaClient the eureka client to bootstrap */ public EurekaBootStrap(EurekaClient eurekaClient) { this.eurekaClient = eurekaClient; } /** * Initializes Eureka, including syncing up with other Eureka peers and publishing the registry. * * @see * javax.servlet.ServletContextListener#contextInitialized(javax.servlet.ServletContextEvent) */ @Override public void contextInitialized(ServletContextEvent event) { try { initEurekaEnvironment(); initEurekaServerContext(); ServletContext sc = event.getServletContext(); sc.setAttribute(EurekaServerContext.class.getName(), serverContext); } catch (Throwable e) { logger.error("Cannot bootstrap eureka server :", e); throw new RuntimeException("Cannot bootstrap eureka server :", e); } } /** * Users can override to initialize the environment themselves. */ protected void initEurekaEnvironment() throws Exception { logger.info("Setting the eureka configuration.."); String dataCenter = ConfigurationManager.getConfigInstance().getString(EUREKA_DATACENTER); if (dataCenter == null) { logger.info("Eureka data center value eureka.datacenter is not set, defaulting to default"); ConfigurationManager.getConfigInstance().setProperty(ARCHAIUS_DEPLOYMENT_DATACENTER, DEFAULT); } else { ConfigurationManager.getConfigInstance().setProperty(ARCHAIUS_DEPLOYMENT_DATACENTER, dataCenter); } String environment = ConfigurationManager.getConfigInstance().getString(EUREKA_ENVIRONMENT); if (environment == null) { ConfigurationManager.getConfigInstance().setProperty(ARCHAIUS_DEPLOYMENT_ENVIRONMENT, TEST); logger.info("Eureka environment value eureka.environment is not set, defaulting to test"); } } /** * init hook for server context. Override for custom logic. */ protected void initEurekaServerContext() throws Exception { EurekaServerConfig eurekaServerConfig = new DefaultEurekaServerConfig(); // For backward compatibility JsonXStream.getInstance().registerConverter(new V1AwareInstanceInfoConverter(), XStream.PRIORITY_VERY_HIGH); XmlXStream.getInstance().registerConverter(new V1AwareInstanceInfoConverter(), XStream.PRIORITY_VERY_HIGH); logger.info("Initializing the eureka client..."); logger.info(eurekaServerConfig.getJsonCodecName()); ServerCodecs serverCodecs = new DefaultServerCodecs(eurekaServerConfig); ApplicationInfoManager applicationInfoManager = null; if (eurekaClient == null) { EurekaInstanceConfig instanceConfig = isCloud(ConfigurationManager.getDeploymentContext()) ? new CloudInstanceConfig() : new MyDataCenterInstanceConfig(); applicationInfoManager = new ApplicationInfoManager( instanceConfig, new EurekaConfigBasedInstanceInfoProvider(instanceConfig).get()); EurekaClientConfig eurekaClientConfig = new DefaultEurekaClientConfig(); eurekaClient = new DiscoveryClient(applicationInfoManager, eurekaClientConfig); } else { applicationInfoManager = eurekaClient.getApplicationInfoManager(); } PeerAwareInstanceRegistry registry; if (isAws(applicationInfoManager.getInfo())) { registry = new AwsInstanceRegistry( eurekaServerConfig, eurekaClient.getEurekaClientConfig(), serverCodecs, eurekaClient ); awsBinder = new AwsBinderDelegate(eurekaServerConfig, eurekaClient.getEurekaClientConfig(), registry, applicationInfoManager); awsBinder.start(); } else { registry = new PeerAwareInstanceRegistryImpl( eurekaServerConfig, eurekaClient.getEurekaClientConfig(), serverCodecs, eurekaClient ); } PeerEurekaNodes peerEurekaNodes = getPeerEurekaNodes( registry, eurekaServerConfig, eurekaClient.getEurekaClientConfig(), serverCodecs, applicationInfoManager ); serverContext = new DefaultEurekaServerContext( eurekaServerConfig, serverCodecs, registry, peerEurekaNodes, applicationInfoManager ); EurekaServerContextHolder.initialize(serverContext); serverContext.initialize(); logger.info("Initialized server context"); // Copy registry from neighboring eureka node int registryCount = registry.syncUp(); registry.openForTraffic(applicationInfoManager, registryCount); // Register all monitoring statistics. EurekaMonitors.registerAllStats(); } protected PeerEurekaNodes getPeerEurekaNodes(PeerAwareInstanceRegistry registry, EurekaServerConfig eurekaServerConfig, EurekaClientConfig eurekaClientConfig, ServerCodecs serverCodecs, ApplicationInfoManager applicationInfoManager) { PeerEurekaNodes peerEurekaNodes = new PeerEurekaNodes( registry, eurekaServerConfig, eurekaClientConfig, serverCodecs, applicationInfoManager ); return peerEurekaNodes; } /** * Handles Eureka cleanup, including shutting down all monitors and yielding all EIPs. * * @see javax.servlet.ServletContextListener#contextDestroyed(javax.servlet.ServletContextEvent) */ @Override public void contextDestroyed(ServletContextEvent event) { try { logger.info("{} Shutting down Eureka Server..", new Date()); ServletContext sc = event.getServletContext(); sc.removeAttribute(EurekaServerContext.class.getName()); destroyEurekaServerContext(); destroyEurekaEnvironment(); } catch (Throwable e) { logger.error("Error shutting down eureka", e); } logger.info("{} Eureka Service is now shutdown...", new Date()); } /** * Server context shutdown hook. Override for custom logic */ protected void destroyEurekaServerContext() throws Exception { EurekaMonitors.shutdown(); if (awsBinder != null) { awsBinder.shutdown(); } if (serverContext != null) { serverContext.shutdown(); } } /** * Users can override to clean up the environment themselves. */ protected void destroyEurekaEnvironment() throws Exception { } protected boolean isAws(InstanceInfo selfInstanceInfo) { boolean result = DataCenterInfo.Name.Amazon == selfInstanceInfo.getDataCenterInfo().getName(); logger.info("isAws returned {}", result); return result; } protected boolean isCloud(DeploymentContext deploymentContext) { logger.info("Deployment datacenter is {}", deploymentContext.getDeploymentDatacenter()); return CLOUD.equals(deploymentContext.getDeploymentDatacenter()); } }