gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Autopsy Forensic Browser * * Copyright 2011-2018 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.imagegallery.datamodel; import java.lang.ref.SoftReference; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.logging.Level; import java.util.stream.Collectors; import javafx.beans.property.SimpleBooleanProperty; import javafx.beans.property.SimpleObjectProperty; import javafx.concurrent.Task; import javafx.concurrent.Worker; import javafx.scene.image.Image; import javafx.util.Pair; import javax.annotation.Nonnull; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.text.WordUtils; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.datamodel.DhsImageCategory; import org.sleuthkit.autopsy.imagegallery.FileTypeUtils; import org.sleuthkit.autopsy.imagegallery.utils.TaskUtils; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.ContentTag; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.Tag; import org.sleuthkit.datamodel.TagName; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskDataException; /** * A file that contains visual information such as an image or video. */ public abstract class DrawableFile { private static final Logger LOGGER = Logger.getLogger(DrawableFile.class.getName()); public static DrawableFile create(AbstractFile abstractFile, boolean analyzed) { return create(abstractFile, analyzed, FileTypeUtils.hasVideoMIMEType(abstractFile)); } /** * Skip the database query if we have already determined the file type. * * @param file The underlying AbstractFile. * @param analyzed Is the file analyzed. * @param isVideo Is the file a video. * * @return */ public static DrawableFile create(AbstractFile file, boolean analyzed, boolean isVideo) { return isVideo ? new VideoFile(file, analyzed) : new ImageFile(file, analyzed); } public static DrawableFile create(Long fileID, boolean analyzed) throws TskCoreException, NoCurrentCaseException { return create(Case.getCurrentCaseThrows().getSleuthkitCase().getAbstractFileById(fileID), analyzed); } private SoftReference<Image> imageRef; private String drawablePath; private final AbstractFile file; private final SimpleBooleanProperty analyzed; private final SimpleObjectProperty<DhsImageCategory> category = new SimpleObjectProperty<>(null); private String make; private String model; protected DrawableFile(AbstractFile file, Boolean analyzed) { this.analyzed = new SimpleBooleanProperty(analyzed); this.file = file; } public abstract boolean isVideo(); public List<Pair<DrawableAttribute<?>, Collection<?>>> getAttributesList() { return DrawableAttribute.getValues().stream() .map(this::makeAttributeValuePair) .collect(Collectors.toList()); } public String getMIMEType() { return file.getMIMEType(); } public long getId() { return file.getId(); } public long getCtime() { return file.getCtime(); } public long getCrtime() { return file.getCrtime(); } public long getAtime() { return file.getAtime(); } public long getMtime() { return file.getMtime(); } public String getMd5Hash() { return file.getMd5Hash(); } public String getName() { return file.getName(); } public String getAtimeAsDate() { return file.getAtimeAsDate(); } public synchronized String getUniquePath() throws TskCoreException { return file.getUniquePath(); } public SleuthkitCase getSleuthkitCase() { return file.getSleuthkitCase(); } public DataSource getDataSource() throws TskCoreException, TskDataException { return getSleuthkitCase().getDataSource(file.getDataSourceObjectId()); } private Pair<DrawableAttribute<?>, Collection<?>> makeAttributeValuePair(DrawableAttribute<?> attribute) { return new Pair<>(attribute, attribute.getValue(this)); } public String getModel() { if (model == null) { model = WordUtils.capitalizeFully((String) getValueOfBBAttribute(ARTIFACT_TYPE.TSK_METADATA_EXIF, ATTRIBUTE_TYPE.TSK_DEVICE_MODEL)); } return model; } public String getMake() { if (make == null) { make = WordUtils.capitalizeFully((String) getValueOfBBAttribute(ARTIFACT_TYPE.TSK_METADATA_EXIF, ATTRIBUTE_TYPE.TSK_DEVICE_MAKE)); } return make; } public Set<TagName> getTagNames() { try { return getContentTags().stream() .map(Tag::getName) .collect(Collectors.toSet()); } catch (TskCoreException ex) { Logger.getAnonymousLogger().log(Level.WARNING, "problem looking up " + DrawableAttribute.TAGS.getDisplayName() + " for " + file.getName(), ex); //NON-NLS } catch (IllegalStateException ex) { Logger.getAnonymousLogger().log(Level.WARNING, "there is no case open; failed to look up " + DrawableAttribute.TAGS.getDisplayName() + " for " + getContentPathSafe(), ex); //NON-NLS } return Collections.emptySet(); } protected Object getValueOfBBAttribute(ARTIFACT_TYPE artType, ATTRIBUTE_TYPE attrType) { try { //why doesn't file.getArtifacts() work? //TODO: this seams like overkill, use a more targeted query ArrayList<BlackboardArtifact> artifacts = file.getArtifacts(artType);// getAllArtifacts(); for (BlackboardArtifact artf : artifacts) { if (artf.getArtifactTypeID() == artType.getTypeID()) { for (BlackboardAttribute attr : artf.getAttributes()) { if (attr.getAttributeType().getTypeID() == attrType.getTypeID()) { switch (attr.getAttributeType().getValueType()) { case BYTE: return attr.getValueBytes(); case DOUBLE: return attr.getValueDouble(); case INTEGER: return attr.getValueInt(); case LONG: return attr.getValueLong(); case STRING: return attr.getValueString(); case DATETIME: return attr.getValueLong(); } } } } } } catch (TskCoreException ex) { Logger.getAnonymousLogger().log(Level.WARNING, ex, () -> MessageFormat.format("problem looking up {0}/{1}" + " " + " for {2}", new Object[]{artType.getDisplayName(), attrType.getDisplayName(), getContentPathSafe()})); //NON-NLS } return ""; } public void setCategory(DhsImageCategory category) { categoryProperty().set(category); } public DhsImageCategory getCategory() { updateCategory(); return category.get(); } public SimpleObjectProperty<DhsImageCategory> categoryProperty() { return category; } /** * set the category property to the most severe one found */ private void updateCategory() { try { category.set(getContentTags().stream() .map(Tag::getName).filter(CategoryManager::isCategoryTagName) .map(TagName::getDisplayName) .map(DhsImageCategory::fromDisplayName) .sorted().findFirst() //sort by severity and take the first .orElse(DhsImageCategory.ZERO) ); } catch (TskCoreException ex) { LOGGER.log(Level.WARNING, "problem looking up category for " + this.getContentPathSafe(), ex); //NON-NLS } catch (IllegalStateException ex) { // We get here many times if the case is closed during ingest, so don't print out a ton of warnings. } } private List<ContentTag> getContentTags() throws TskCoreException { return getSleuthkitCase().getContentTagsByContent(file); } public Task<Image> getReadFullSizeImageTask() { Image image = (imageRef != null) ? imageRef.get() : null; if (image == null || image.isError()) { Task<Image> readImageTask = getReadFullSizeImageTaskHelper(); readImageTask.stateProperty().addListener(stateProperty -> { if (readImageTask.getState() == Worker.State.SUCCEEDED) { try { imageRef = new SoftReference<>(readImageTask.get()); } catch (InterruptedException | ExecutionException exception) { LOGGER.log(Level.WARNING, getMessageTemplate(exception), getContentPathSafe()); } } }); return readImageTask; } else { return TaskUtils.taskFrom(() -> image); } } abstract String getMessageTemplate(Exception exception); abstract Task<Image> getReadFullSizeImageTaskHelper(); public void setAnalyzed(Boolean analyzed) { this.analyzed.set(analyzed); } public boolean isAnalyzed() { return analyzed.get(); } public AbstractFile getAbstractFile() { return this.file; } /** * Get the width of the visual content. * * @return The width. */ abstract Double getWidth(); /** * Get the height of the visual content. * * @return The height. */ abstract Double getHeight(); public String getDrawablePath() { if (drawablePath != null) { return drawablePath; } else { try { drawablePath = StringUtils.removeEnd(getUniquePath(), getName()); return drawablePath; } catch (TskCoreException ex) { LOGGER.log(Level.WARNING, "failed to get drawablePath from " + getContentPathSafe(), ex); //NON-NLS return ""; } } } public Set<String> getHashSetNames() throws TskCoreException { return file.getHashSetNames(); } @Nonnull public Set<String> getHashSetNamesUnchecked() { try { return getHashSetNames(); } catch (TskCoreException ex) { LOGGER.log(Level.WARNING, "Failed to get hash set names", ex); //NON-NLS return Collections.emptySet(); } } /** * Get the unique path for this DrawableFile, or if that fails, just return * the name. * * @param content * * @return */ public String getContentPathSafe() { try { return getUniquePath(); } catch (TskCoreException tskCoreException) { String contentName = this.getName(); LOGGER.log(Level.SEVERE, "Failed to get unique path for " + contentName, tskCoreException); //NOI18N NON-NLS return contentName; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.wss4j.dom.message; import java.util.ArrayList; import java.util.List; import javax.crypto.SecretKey; import org.apache.wss4j.common.WSEncryptionPart; import org.apache.wss4j.common.derivedKey.ConversationConstants; import org.apache.wss4j.common.ext.WSSecurityException; import org.apache.wss4j.common.token.Reference; import org.apache.wss4j.common.token.SecurityTokenReference; import org.apache.wss4j.common.util.KeyUtils; import org.apache.wss4j.dom.WSConstants; import org.apache.wss4j.dom.util.WSSecurityUtil; import org.apache.xml.security.encryption.Serializer; import org.apache.xml.security.keys.KeyInfo; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; /** * Encrypts and signs parts of a message with derived keys derived from a * symmetric key. This symmetric key will be included as an EncryptedKey */ public class WSSecDKEncrypt extends WSSecDerivedKeyBase { private String symEncAlgo = WSConstants.AES_128; private int derivedKeyLength = -1; private List<Element> attachmentEncryptedDataElements; private Serializer encryptionSerializer; public WSSecDKEncrypt(WSSecHeader securityHeader) { super(securityHeader); } public WSSecDKEncrypt(Document doc) { super(doc); } @Override public void prepare(byte[] ephemeralKey) throws WSSecurityException { super.prepare(ephemeralKey); attachmentEncryptedDataElements = new ArrayList<>(); } public Document build(byte[] ephemeralKey) throws WSSecurityException { // // Setup the encrypted key // prepare(ephemeralKey); // // prepend elements in the right order to the security header // prependDKElementToHeader(); Element externRefList = encrypt(); addAttachmentEncryptedDataElements(); addExternalRefElement(externRefList); return getDocument(); } public void addAttachmentEncryptedDataElements() { if (attachmentEncryptedDataElements != null) { for (int i = 0; i < attachmentEncryptedDataElements.size(); i++) { Element encryptedData = attachmentEncryptedDataElements.get(i); Element securityHeaderElement = getSecurityHeader().getSecurityHeaderElement(); WSSecurityUtil.prependChildElement(securityHeaderElement, encryptedData); } } } public Element encrypt() throws WSSecurityException { if (getParts().isEmpty()) { getParts().add(WSSecurityUtil.getDefaultEncryptionPart(getDocument())); } return encryptForExternalRef(null, getParts()); } /** * Encrypt one or more parts or elements of the message (external). * * This method takes a vector of <code>WSEncryptionPart</code> object that * contain information about the elements to encrypt. The method call the * encryption method, takes the reference information generated during * encryption and add this to the <code>xenc:Reference</code> element. * This method can be called after <code>prepare()</code> and can be * called multiple times to encrypt a number of parts or elements. * * The method generates a <code>xenc:Reference</code> element that <i>must</i> * be added to the SecurityHeader. See <code>addExternalRefElement()</code>. * * If the <code>dataRef</code> parameter is <code>null</code> the method * creates and initializes a new Reference element. * * @param dataRef A <code>xenc:Reference</code> element or <code>null</code> * @param references A list containing WSEncryptionPart objects * @return Returns the updated <code>xenc:Reference</code> element * @throws WSSecurityException */ public Element encryptForExternalRef(Element dataRef, List<WSEncryptionPart> references) throws WSSecurityException { KeyInfo keyInfo = createKeyInfo(); SecretKey key = getDerivedKey(symEncAlgo); Encryptor encryptor = new Encryptor(); encryptor.setDoc(getDocument()); encryptor.setSecurityHeader(getSecurityHeader()); encryptor.setIdAllocator(getIdAllocator()); encryptor.setCallbackLookup(callbackLookup); encryptor.setAttachmentCallbackHandler(attachmentCallbackHandler); encryptor.setStoreBytesInAttachment(storeBytesInAttachment); encryptor.setEncryptionSerializer(encryptionSerializer); encryptor.setWsDocInfo(getWsDocInfo()); List<String> encDataRefs = encryptor.doEncryption(keyInfo, key, symEncAlgo, references, attachmentEncryptedDataElements); if (dataRef == null) { dataRef = getDocument().createElementNS( WSConstants.ENC_NS, WSConstants.ENC_PREFIX + ":ReferenceList" ); } return WSSecEncrypt.createDataRefList(getDocument(), dataRef, encDataRefs); } /** * Create a KeyInfo object * @throws ConversationException */ private KeyInfo createKeyInfo() throws WSSecurityException { KeyInfo keyInfo = new KeyInfo(getDocument()); SecurityTokenReference secToken = new SecurityTokenReference(getDocument()); secToken.addWSSENamespace(); if (addWSUNamespace) { secToken.addWSUNamespace(); } Reference ref = new Reference(getDocument()); ref.setURI("#" + getId()); String ns = ConversationConstants.getWSCNs(getWscVersion()) + ConversationConstants.TOKEN_TYPE_DERIVED_KEY_TOKEN; ref.setValueType(ns); secToken.setReference(ref); keyInfo.addUnknownElement(secToken.getElement()); Element keyInfoElement = keyInfo.getElement(); keyInfoElement.setAttributeNS( WSConstants.XMLNS_NS, "xmlns:" + WSConstants.SIG_PREFIX, WSConstants.SIG_NS ); return keyInfo; } /** * Adds (prepends) the external Reference element to the Security header. * * The reference element <i>must</i> be created by the * <code>encryptForExternalRef() </code> method. The method adds the * reference element in the SecurityHeader. * * @param referenceList The external <code>enc:Reference</code> element */ public void addExternalRefElement(Element referenceList) { if (referenceList != null) { Node node = getdktElement().getNextSibling(); Element securityHeaderElement = getSecurityHeader().getSecurityHeaderElement(); if (node != null && Node.ELEMENT_NODE == node.getNodeType()) { securityHeaderElement.insertBefore(referenceList, node); } else { // If (at this moment) DerivedKeyToken is the LAST element of // the security header securityHeaderElement.appendChild(referenceList); } } } /** * Set the symmetric encryption algorithm URI to use * @param algo the symmetric encryption algorithm URI to use */ public void setSymmetricEncAlgorithm(String algo) { symEncAlgo = algo; } protected int getDerivedKeyLength() throws WSSecurityException { return derivedKeyLength > 0 ? derivedKeyLength : KeyUtils.getKeyLength(symEncAlgo); } public void setDerivedKeyLength(int keyLength) { derivedKeyLength = keyLength; } public List<Element> getAttachmentEncryptedDataElements() { return attachmentEncryptedDataElements; } public Serializer getEncryptionSerializer() { return encryptionSerializer; } public void setEncryptionSerializer(Serializer encryptionSerializer) { this.encryptionSerializer = encryptionSerializer; } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.paths; import com.intellij.codeHighlighting.HighlightDisplayLevel; import com.intellij.codeInsight.intention.IntentionAction; import com.intellij.lang.annotation.Annotation; import com.intellij.lang.annotation.AnnotationHolder; import com.intellij.lang.annotation.ExternalAnnotator; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.TextRange; import com.intellij.psi.PsiAnchor; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiReference; import com.intellij.util.io.HttpRequests; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.IOException; import java.net.UnknownHostException; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Map; /** * @author Eugene.Kudelevsky */ public abstract class WebReferencesAnnotatorBase extends ExternalAnnotator<WebReferencesAnnotatorBase.MyInfo[], WebReferencesAnnotatorBase.MyInfo[]> { private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.paths.WebReferencesAnnotatorBase"); private final Map<String, MyFetchCacheEntry> myFetchCache = new HashMap<>(); private final Object myFetchCacheLock = new Object(); private static final long FETCH_CACHE_TIMEOUT = 10000; protected static final WebReference[] EMPTY_ARRAY = new WebReference[0]; @NotNull protected abstract WebReference[] collectWebReferences(@NotNull PsiFile file); @Nullable protected static WebReference lookForWebReference(@NotNull PsiElement element) { return lookForWebReference(Arrays.asList(element.getReferences())); } @SuppressWarnings("unchecked") @Nullable private static WebReference lookForWebReference(Collection<PsiReference> references) { for (PsiReference reference : references) { if (reference instanceof WebReference) { return (WebReference)reference; } else if (reference instanceof PsiDynaReference) { final WebReference webReference = lookForWebReference(((PsiDynaReference)reference).getReferences()); if (webReference != null) { return webReference; } } } return null; } @Override public MyInfo[] collectInformation(@NotNull PsiFile file) { final WebReference[] references = collectWebReferences(file); final MyInfo[] infos = new MyInfo[references.length]; for (int i = 0; i < infos.length; i++) { final WebReference reference = references[i]; infos[i] = new MyInfo(PsiAnchor.create(reference.getElement()), reference.getRangeInElement(), reference.getValue()); } return infos; } @Override public MyInfo[] doAnnotate(MyInfo[] infos) { final MyFetchResult[] fetchResults = new MyFetchResult[infos.length]; for (int i = 0; i < fetchResults.length; i++) { fetchResults[i] = checkUrl(infos[i].myUrl); } boolean containsAvailableHosts = false; for (MyFetchResult fetchResult : fetchResults) { if (fetchResult != MyFetchResult.UNKNOWN_HOST) { containsAvailableHosts = true; } } for (int i = 0; i < fetchResults.length; i++) { final MyFetchResult result = fetchResults[i]; // if all hosts are not available, internet connection may be disabled, so it's better to not report warnings for unknown hosts if (result == MyFetchResult.OK || (!containsAvailableHosts && result == MyFetchResult.UNKNOWN_HOST)) { infos[i].myResult = true; } } return infos; } @Override public void apply(@NotNull PsiFile file, MyInfo[] infos, @NotNull AnnotationHolder holder) { if (infos == null || infos.length == 0) { return; } final HighlightDisplayLevel displayLevel = getHighlightDisplayLevel(file); for (MyInfo info : infos) { if (!info.myResult) { final PsiElement element = info.myAnchor.retrieve(); if (element != null) { final int start = element.getTextRange().getStartOffset(); final TextRange range = new TextRange(start + info.myRangeInElement.getStartOffset(), start + info.myRangeInElement.getEndOffset()); final String message = getErrorMessage(info.myUrl); final Annotation annotation; if (displayLevel == HighlightDisplayLevel.ERROR) { annotation = holder.createErrorAnnotation(range, message); } else if (displayLevel == HighlightDisplayLevel.WARNING) { annotation = holder.createWarningAnnotation(range, message); } else if (displayLevel == HighlightDisplayLevel.WEAK_WARNING) { annotation = holder.createInfoAnnotation(range, message); } else { annotation = holder.createWarningAnnotation(range, message); } for (IntentionAction action : getQuickFixes()) { annotation.registerFix(action); } } } } } @NotNull protected abstract String getErrorMessage(@NotNull String url); @NotNull protected IntentionAction[] getQuickFixes() { return IntentionAction.EMPTY_ARRAY; } @NotNull protected abstract HighlightDisplayLevel getHighlightDisplayLevel(@NotNull PsiElement context); @NotNull private MyFetchResult checkUrl(String url) { synchronized (myFetchCacheLock) { final MyFetchCacheEntry entry = myFetchCache.get(url); final long currentTime = System.currentTimeMillis(); if (entry != null && currentTime - entry.getTime() < FETCH_CACHE_TIMEOUT) { return entry.getFetchResult(); } final MyFetchResult fetchResult = doCheckUrl(url); myFetchCache.put(url, new MyFetchCacheEntry(currentTime, fetchResult)); return fetchResult; } } private static MyFetchResult doCheckUrl(@NotNull String url) { if (url.startsWith("mailto")) { return MyFetchResult.OK; } try { HttpRequests.request(url).connectTimeout(3000).readTimeout(3000).tryConnect(); } catch (UnknownHostException e) { LOG.info(e); return MyFetchResult.UNKNOWN_HOST; } catch (HttpRequests.HttpStatusException e) { LOG.info(e); return MyFetchResult.NONEXISTENCE; } catch (IOException e) { LOG.info(e); } catch (IllegalArgumentException e) { LOG.debug(e); } return MyFetchResult.OK; } private static class MyFetchCacheEntry { private final long myTime; private final MyFetchResult myFetchResult; private MyFetchCacheEntry(long time, @NotNull MyFetchResult fetchResult) { myTime = time; myFetchResult = fetchResult; } public long getTime() { return myTime; } @NotNull public MyFetchResult getFetchResult() { return myFetchResult; } } private enum MyFetchResult { OK, UNKNOWN_HOST, NONEXISTENCE } protected static class MyInfo { final PsiAnchor myAnchor; final String myUrl; final TextRange myRangeInElement; volatile boolean myResult; private MyInfo(PsiAnchor anchor, TextRange rangeInElement, String url) { myAnchor = anchor; myRangeInElement = rangeInElement; myUrl = url; } } }
package com.trifork.hotruby.runtime; import java.io.IOException; import java.math.BigInteger; import com.trifork.hotruby.classes.RubyClassObject; import com.trifork.hotruby.marshal.UnmarshalStream; import com.trifork.hotruby.objects.IRubyArray; import com.trifork.hotruby.objects.IRubyClass; import com.trifork.hotruby.objects.IRubyFixnum; import com.trifork.hotruby.objects.IRubyFloat; import com.trifork.hotruby.objects.IRubyHash; import com.trifork.hotruby.objects.IRubyInteger; import com.trifork.hotruby.objects.IRubyMethod; import com.trifork.hotruby.objects.IRubyObject; import com.trifork.hotruby.objects.IRubyRange; import com.trifork.hotruby.objects.IRubyRegexp; import com.trifork.hotruby.objects.IRubyString; import com.trifork.hotruby.objects.IRubySymbol; import com.trifork.hotruby.objects.RubyArray; import com.trifork.hotruby.objects.RubyBignum; import com.trifork.hotruby.objects.RubyClass; import com.trifork.hotruby.objects.RubyFalseClass; import com.trifork.hotruby.objects.RubyFixnum; import com.trifork.hotruby.objects.RubyFloat; import com.trifork.hotruby.objects.RubyHash; import com.trifork.hotruby.objects.RubyIO; import com.trifork.hotruby.objects.RubyInteger; import com.trifork.hotruby.objects.RubyModule; import com.trifork.hotruby.objects.RubyNilClass; import com.trifork.hotruby.objects.RubyObject; import com.trifork.hotruby.objects.RubyProc; import com.trifork.hotruby.objects.RubyRange; import com.trifork.hotruby.objects.RubyRegexp; import com.trifork.hotruby.objects.RubyString; import com.trifork.hotruby.objects.RubyStruct; import com.trifork.hotruby.objects.RubySymbol; import com.trifork.hotruby.objects.RubyTrueClass; public class LoadedRubyRuntime extends RubyRuntime { public static LoadedRubyRuntime instance; public static RubyFalseClass FALSE = null; public static RubyTrueClass TRUE = null; public static RubyNilClass NIL = null; public static MetaClass META_OBJECT = null; private MetaClass meta_class; public LoadedRubyRuntime() { instance = this; init(); } public static void main(String[] args) { new LoadedRubyRuntime(); } private void init() { meta_object = new MetaClass(this, null, null, null); meta_object.set_context(meta_object, "Object"); META_OBJECT = meta_object; MetaClass _module = new_system_class("Module"); super.meta_module = _module; meta_class = new_system_class("Class", _module); meta_object.set_base_level_class(RubyClassObject.class); meta_object.const_set("Object", meta_object.get_base_class()); MetaModule kernel = new_system_module("Kernel"); MetaModule enumerable = new_system_module("Enumerable"); MetaModule objectspace = new_system_module("ObjectSpace"); // include the kernel module in object meta_object.include(kernel); MetaClass _true = new_system_class("TrueClass"); MetaClass _false = new_system_class("FalseClass"); MetaClass _nil = new_system_class("NilClass"); TRUE = (RubyTrueClass) (the_true = _true.get_base_class().newInstance()); meta_object.const_set("TRUE", TRUE); FALSE = (RubyFalseClass) (the_false = _false.get_base_class().newInstance()); meta_object.const_set("FALSE", FALSE); NIL = (RubyNilClass) (the_nil = _nil.get_base_class().newInstance()); meta_object.const_set("NIL", NIL); MetaClass _numeric = new_system_class("Numeric"); MetaClass _integer = new_system_class("Integer", _numeric); MetaClass _bignum = new_system_class("Bignum", _integer); MetaClass _fixnum = new_system_class("Fixnum", _integer); MetaClass _float = new_system_class("Float", _numeric); MetaClass string_class = new_system_class("String"); new_system_class("Symbol"); new_system_class("Array"); new_system_class("Struct"); new_system_class("Regexp"); new_system_class("MatchData"); new_system_class("Proc"); new_system_class("Hash"); new_system_class("Time"); new_system_class("Binding"); new_system_class("Range"); new_system_class("File"); new_system_class("Method"); new_system_class("IO"); RubyString.init(string_class); } @Override public IRubyClass getObject() { return META_OBJECT.get_base_class(); } @Override public MetaClass meta_Object() { return META_OBJECT; } @Override public MetaClass meta_Class() { return meta_class; } // this is the slow lookup public static RubyMethod resolve_method(RubyObject object, Selector sel, String selector_class_name) { Class selectorClass; try { selectorClass = object.getClass().getClassLoader().loadClass(selector_class_name); } catch (ClassNotFoundException e) { throw new InternalError("cannot load selector class"); } return resolve_method(object, sel, selectorClass); } // this is the slow lookup public static RubyMethod resolve_method(RubyObject object, Selector sel, Class selectorClass) { if (object instanceof RubyModule) { return ((RubyModule)object).get_meta_module().resolve_method(object, sel, selectorClass); } /// System.out.println("resolving "+object.get_class().inspect()+"."+sel.getName()+" from "+sel.getCaller().getName()); return object.get_meta_class().resolve_method(object, sel, selectorClass); } @Override public RubyString newString(String value) { return new RubyString(value); } @Override public IRubyObject unmarshalArrayFrom(UnmarshalStream stream, CallContext ctx) throws IOException { return RubyArray.unmarshalFrom(stream, ctx); } @Override public IRubyObject unmarshalBignumFrom(UnmarshalStream stream) throws IOException { return RubyBignum.unmarshalFrom(stream); } @Override public IRubyObject unmarshalClassFrom(UnmarshalStream stream) throws IOException { return RubyClass.unmarshalFrom(stream); } @Override public IRubyObject unmarshalFixnumFrom(UnmarshalStream stream) throws IOException { return RubyFixnum.unmarshalFrom(stream); } @Override public IRubyObject unmarshalFloatFrom(UnmarshalStream stream) throws IOException { return RubyFloat.unmarshalFrom(stream); } @Override public IRubyObject unmarshalHashFrom(UnmarshalStream stream, CallContext ctx) throws IOException { return RubyHash.unmarshalFrom(stream, ctx); } @Override public IRubyObject unmarshalModuleFrom(UnmarshalStream stream) throws IOException { return RubyModule.unmarshalFrom(stream); } @Override public IRubyObject unmarshalStringFrom(UnmarshalStream stream) throws IOException { return RubyString.unmarshalFrom(stream); } @Override public IRubyObject unmarshalStructFrom(UnmarshalStream stream) throws IOException { return RubyStruct.unmarshalFrom(stream); } @Override public IRubyObject unmarshalSymbolFrom(UnmarshalStream stream) throws IOException { return RubySymbol.unmarshalFrom(stream); } @Override public IRubyArray arrayWithOneNil() { // TODO Auto-generated method stub return null; } @Override public IRubyArray newArray() { return new RubyArray(); } @Override public IRubyMethod newMethodObject(RubyMethod m, IRubyObject receiver) { return new com.trifork.hotruby.objects.RubyMethod(m, receiver); } @Override public IRubyRange newRange(IRubyObject start, IRubyObject end, boolean inclusive) { return new RubyRange().init(start, end, inclusive ? TRUE : FALSE); } @Override public IRubyArray newArray(IRubyObject[] args) { return new RubyArray(args); } @Override public IRubyArray newArray(int size) { return new RubyArray().initialize(size); } @Override public IRubyObject newIO(int i, String mode) { return new RubyIO().initialize(new RubyFixnum(i), new RubyString(mode)); } @Override public IRubyFixnum newFixnum(int intvalue) { return new RubyFixnum(intvalue); } @Override public IRubyFloat newFloat(double d) { return new RubyFloat(d); } @Override public IRubyHash newHash() { return new RubyHash(); } @Override public IRubyInteger newInteger(String text, int radix) { return RubyInteger.newInteger(new BigInteger(text, radix)); } @Override public IRubyRegexp newRegexp(IRubyObject string, int flags) { // TODO Auto-generated method stub return new RubyRegexp(string.asSymbol(), flags); } @Override public IRubyRegexp newRegexp(String string, int flags) { return new RubyRegexp(string, flags); } @Override public IRubySymbol newSymbol(String sym1) { return RubySymbol.get(sym1); } @Override public RubyProc newProc(RubyBlock block) { return new RubyProc(block); } @Override public RuntimeException newSyntaxError(String string) { throw new RuntimeException(string); } @Override public RuntimeException newLoadError(String string) { throw new RuntimeException(string); } @Override public RuntimeException newNameError(String string) { throw new RuntimeException(string); } public IRubyObject newString(CharSequence seq) { return newString(seq.toString()); } }
/* * Copyright (c) 2010-2015 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.repo.sql.data.common.any; import com.evolveum.midpoint.prism.Item; import com.evolveum.midpoint.prism.PrismContainerValue; import com.evolveum.midpoint.prism.PrismContext; import com.evolveum.midpoint.repo.sql.data.common.container.RAssignment; import com.evolveum.midpoint.repo.sql.data.common.id.RAssignmentExtensionId; import com.evolveum.midpoint.repo.sql.data.common.type.RAssignmentExtensionType; import com.evolveum.midpoint.repo.sql.query2.definition.NotQueryable; import com.evolveum.midpoint.repo.sql.util.DtoTranslationException; import com.evolveum.midpoint.repo.sql.util.RUtil; import com.evolveum.midpoint.xml.ns._public.common.common_3.ExtensionType; import org.apache.commons.lang.Validate; import org.hibernate.annotations.Cascade; import org.hibernate.annotations.ForeignKey; import javax.persistence.*; import java.io.Serializable; import java.util.HashSet; import java.util.List; import java.util.Set; /** * @author lazyman */ @Entity @IdClass(RAssignmentExtensionId.class) @Table(name = "m_assignment_extension") public class RAssignmentExtension implements Serializable { private RAssignment owner; private String ownerOid; private Integer ownerId; private Short stringsCount; private Short longsCount; private Short datesCount; private Short referencesCount; private Short polysCount; private Short booleansCount; private Set<RAExtString> strings; private Set<RAExtLong> longs; private Set<RAExtDate> dates; private Set<RAExtReference> references; private Set<RAExtPolyString> polys; private Set<RAExtBoolean> booleans; @ForeignKey(name = "none") @MapsId("owner") @ManyToOne(fetch = FetchType.LAZY) @NotQueryable public RAssignment getOwner() { return owner; } @Id @Column(name = "owner_owner_oid", length = RUtil.COLUMN_LENGTH_OID) public String getOwnerOid() { if (ownerOid == null && owner != null) { ownerOid = owner.getOwnerOid(); } return ownerOid; } @Id @Column(name = "owner_id", length = RUtil.COLUMN_LENGTH_OID) public Integer getOwnerId() { if (ownerId == null && owner != null) { ownerId = owner.getId(); } return ownerId; } @OneToMany(mappedBy = RAExtValue.ANY_CONTAINER, orphanRemoval = true) @Cascade({org.hibernate.annotations.CascadeType.ALL}) public Set<RAExtBoolean> getBooleans() { if (booleans == null) { booleans = new HashSet<>(); } return booleans; } @OneToMany(mappedBy = RAExtValue.ANY_CONTAINER, orphanRemoval = true) @Cascade({org.hibernate.annotations.CascadeType.ALL}) public Set<RAExtLong> getLongs() { if (longs == null) { longs = new HashSet<>(); } return longs; } @OneToMany(mappedBy = RAExtValue.ANY_CONTAINER, orphanRemoval = true) @Cascade({org.hibernate.annotations.CascadeType.ALL}) public Set<RAExtString> getStrings() { if (strings == null) { strings = new HashSet<>(); } return strings; } @OneToMany(mappedBy = RAExtValue.ANY_CONTAINER, orphanRemoval = true) @Cascade({org.hibernate.annotations.CascadeType.ALL}) public Set<RAExtDate> getDates() { if (dates == null) { dates = new HashSet<>(); } return dates; } @OneToMany(mappedBy = RAExtValue.ANY_CONTAINER, orphanRemoval = true) @Cascade({org.hibernate.annotations.CascadeType.ALL}) public Set<RAExtReference> getReferences() { if (references == null) { references = new HashSet<>(); } return references; } @OneToMany(mappedBy = RAExtValue.ANY_CONTAINER, orphanRemoval = true) @Cascade({org.hibernate.annotations.CascadeType.ALL}) public Set<RAExtPolyString> getPolys() { if (polys == null) { polys = new HashSet<>(); } return polys; } public Short getBooleansCount() { return booleansCount; } public Short getStringsCount() { return stringsCount; } public Short getLongsCount() { return longsCount; } public Short getDatesCount() { return datesCount; } public Short getReferencesCount() { return referencesCount; } public Short getPolysCount() { return polysCount; } public void setStringsCount(Short stringsCount) { this.stringsCount = stringsCount; } public void setLongsCount(Short longsCount) { this.longsCount = longsCount; } public void setDatesCount(Short datesCount) { this.datesCount = datesCount; } public void setReferencesCount(Short referencesCount) { this.referencesCount = referencesCount; } public void setPolysCount(Short polysCount) { this.polysCount = polysCount; } public void setPolys(Set<RAExtPolyString> polys) { this.polys = polys; } public void setReferences(Set<RAExtReference> references) { this.references = references; } public void setDates(Set<RAExtDate> dates) { this.dates = dates; } public void setLongs(Set<RAExtLong> longs) { this.longs = longs; } public void setOwnerOid(String ownerOid) { this.ownerOid = ownerOid; } public void setStrings(Set<RAExtString> strings) { this.strings = strings; } public void setOwner(RAssignment owner) { this.owner = owner; } public void setOwnerId(Integer ownerId) { this.ownerId = ownerId; } public void setBooleans(Set<RAExtBoolean> booleans) { this.booleans = booleans; } public void setBooleansCount(Short booleansCount) { this.booleansCount = booleansCount; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; RAssignmentExtension that = (RAssignmentExtension) o; if (dates != null ? !dates.equals(that.dates) : that.dates != null) return false; if (datesCount != null ? !datesCount.equals(that.datesCount) : that.datesCount != null) return false; if (longs != null ? !longs.equals(that.longs) : that.longs != null) return false; if (longsCount != null ? !longsCount.equals(that.longsCount) : that.longsCount != null) return false; if (polys != null ? !polys.equals(that.polys) : that.polys != null) return false; if (polysCount != null ? !polysCount.equals(that.polysCount) : that.polysCount != null) return false; if (references != null ? !references.equals(that.references) : that.references != null) return false; if (referencesCount != null ? !referencesCount.equals(that.referencesCount) : that.referencesCount != null) return false; if (strings != null ? !strings.equals(that.strings) : that.strings != null) return false; if (stringsCount != null ? !stringsCount.equals(that.stringsCount) : that.stringsCount != null) return false; if (booleans != null ? !booleans.equals(that.booleans) : that.booleans != null) return false; if (booleansCount != null ? !booleansCount.equals(that.booleansCount) : that.booleansCount != null) return false; return true; } @Override public int hashCode() { int result = stringsCount != null ? stringsCount.hashCode() : 0; result = 31 * result + (longsCount != null ? longsCount.hashCode() : 0); result = 31 * result + (datesCount != null ? datesCount.hashCode() : 0); result = 31 * result + (referencesCount != null ? referencesCount.hashCode() : 0); result = 31 * result + (polysCount != null ? polysCount.hashCode() : 0); result = 31 * result + (booleansCount != null ? booleansCount.hashCode() : 0); return result; } public static void copyFromJAXB(ExtensionType jaxb, RAssignmentExtension repo, RAssignmentExtensionType type, PrismContext prismContext) throws DtoTranslationException { Validate.notNull(repo, "Repo object must not be null."); Validate.notNull(jaxb, "JAXB object must not be null."); copyFromJAXB(jaxb.asPrismContainerValue(), repo, type, prismContext); } private static void copyFromJAXB(PrismContainerValue containerValue, RAssignmentExtension repo, RAssignmentExtensionType type, PrismContext prismContext) throws DtoTranslationException { RAnyConverter converter = new RAnyConverter(prismContext); Set<RAnyValue> values = new HashSet<RAnyValue>(); try { List<Item<?,?>> items = containerValue.getItems(); for (Item item : items) { values.addAll(converter.convertToRValue(item, true)); } } catch (Exception ex) { throw new DtoTranslationException(ex.getMessage(), ex); } for (RAnyValue value : values) { ((RAExtValue) value).setAnyContainer(repo); ((RAExtValue) value).setExtensionType(type); if (value instanceof RAExtDate) { repo.getDates().add((RAExtDate) value); } else if (value instanceof RAExtLong) { repo.getLongs().add((RAExtLong) value); } else if (value instanceof RAExtReference) { repo.getReferences().add((RAExtReference) value); } else if (value instanceof RAExtString) { repo.getStrings().add((RAExtString) value); } else if (value instanceof RAExtPolyString) { repo.getPolys().add((RAExtPolyString) value); } else if (value instanceof RAExtBoolean) { repo.getBooleans().add((RAExtBoolean) value); } } repo.setStringsCount((short) repo.getStrings().size()); repo.setDatesCount((short) repo.getDates().size()); repo.setPolysCount((short) repo.getPolys().size()); repo.setReferencesCount((short) repo.getReferences().size()); repo.setLongsCount((short) repo.getLongs().size()); repo.setBooleansCount((short) repo.getBooleans().size()); } }
package com.flaptor.org.apache.lucene.util; /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * This class was inspired by CGLIB, but provides a better * QuickSort algorithm without additional InsertionSort * at the end. * To use, subclass and override the four abstract methods * which compare and modify your data. * Allows custom swap so that two arrays can be sorted * at the same time. * @lucene.internal */ public abstract class SorterTemplate { private static final int MERGESORT_THRESHOLD = 12; private static final int QUICKSORT_THRESHOLD = 7; /** Implement this method, that swaps slots {@code i} and {@code j} in your data */ protected abstract void swap(int i, int j); /** Compares slots {@code i} and {@code j} of you data. * Should be implemented like <code><em>valueOf(i)</em>.compareTo(<em>valueOf(j)</em>)</code> */ protected abstract int compare(int i, int j); /** Implement this method, that stores the value of slot {@code i} as pivot value */ protected abstract void setPivot(int i); /** Implements the compare function for the previously stored pivot value. * Should be implemented like <code>pivot.compareTo(<em>valueOf(j)</em>)</code> */ protected abstract int comparePivot(int j); /** Sorts via stable in-place InsertionSort algorithm *(ideal for small collections which are mostly presorted). */ public final void insertionSort(int lo, int hi) { for (int i = lo + 1 ; i <= hi; i++) { for (int j = i; j > lo; j--) { if (compare(j - 1, j) > 0) { swap(j - 1, j); } else { break; } } } } /** Sorts via in-place, but unstable, QuickSort algorithm. * For small collections falls back to {@link #insertionSort(int,int)}. */ public final void quickSort(final int lo, final int hi) { if (hi <= lo) return; // from Integer's Javadocs: ceil(log2(x)) = 32 - numberOfLeadingZeros(x - 1) quickSort(lo, hi, (Integer.SIZE - Integer.numberOfLeadingZeros(hi - lo)) << 1); } private void quickSort(int lo, int hi, int maxDepth) { // fall back to insertion when array has short length final int diff = hi - lo; if (diff <= QUICKSORT_THRESHOLD) { insertionSort(lo, hi); return; } // fall back to merge sort when recursion depth gets too big if (--maxDepth == 0) { mergeSort(lo, hi); return; } final int mid = lo + (diff >>> 1); if (compare(lo, mid) > 0) { swap(lo, mid); } if (compare(mid, hi) > 0) { swap(mid, hi); if (compare(lo, mid) > 0) { swap(lo, mid); } } int left = lo + 1; int right = hi - 1; setPivot(mid); for (;;) { while (comparePivot(right) < 0) --right; while (left < right && comparePivot(left) >= 0) ++left; if (left < right) { swap(left, right); --right; } else { break; } } quickSort(lo, left, maxDepth); quickSort(left + 1, hi, maxDepth); } /** Sorts via stable in-place MergeSort algorithm * For small collections falls back to {@link #insertionSort(int,int)}. */ public final void mergeSort(int lo, int hi) { final int diff = hi - lo; if (diff <= MERGESORT_THRESHOLD) { insertionSort(lo, hi); return; } final int mid = lo + (diff >>> 1); mergeSort(lo, mid); mergeSort(mid, hi); merge(lo, mid, hi, mid - lo, hi - mid); } private void merge(int lo, int pivot, int hi, int len1, int len2) { if (len1 == 0 || len2 == 0) { return; } if (len1 + len2 == 2) { if (compare(pivot, lo) < 0) { swap(pivot, lo); } return; } int first_cut, second_cut; int len11, len22; if (len1 > len2) { len11 = len1 >>> 1; first_cut = lo + len11; second_cut = lower(pivot, hi, first_cut); len22 = second_cut - pivot; } else { len22 = len2 >>> 1; second_cut = pivot + len22; first_cut = upper(lo, pivot, second_cut); len11 = first_cut - lo; } rotate(first_cut, pivot, second_cut); final int new_mid = first_cut + len22; merge(lo, first_cut, new_mid, len11, len22); merge(new_mid, second_cut, hi, len1 - len11, len2 - len22); } private void rotate(int lo, int mid, int hi) { int lot = lo; int hit = mid - 1; while (lot < hit) { swap(lot++, hit--); } lot = mid; hit = hi - 1; while (lot < hit) { swap(lot++, hit--); } lot = lo; hit = hi - 1; while (lot < hit) { swap(lot++, hit--); } } private int lower(int lo, int hi, int val) { int len = hi - lo; while (len > 0) { final int half = len >>> 1, mid = lo + half; if (compare(mid, val) < 0) { lo = mid + 1; len = len - half -1; } else { len = half; } } return lo; } private int upper(int lo, int hi, int val) { int len = hi - lo; while (len > 0) { final int half = len >>> 1, mid = lo + half; if (compare(val, mid) < 0) { len = half; } else { lo = mid + 1; len = len - half -1; } } return lo; } }
package ro.mihalea.cadets.barebones.logic.units; import junit.framework.TestCase; import ro.mihalea.cadets.barebones.logic.Interpreter; import ro.mihalea.cadets.barebones.Listener; import ro.mihalea.cadets.barebones.logic.exceptions.BonesException; /** * Created by mm8g15 on 06/10/2015. */ public class InterpreterTest extends TestCase { public void testListenerCreation() throws Exception{ Interpreter interpreter = new Interpreter(); assertNotNull(interpreter); Listener listener = interpreter.setupListener(); assertNotNull(listener); } public void testIncrement() throws Exception { Interpreter interpreter = new Interpreter(); Memory memory = interpreter.run("incr x;"); assertEquals(1, memory.get("x")); memory = interpreter.run("incr x; incr y;"); assertEquals(1, memory.get("x")); assertEquals(1, memory.get("y")); memory = interpreter.run("incr x x;"); assertEquals(2, memory.get("x")); memory = interpreter.run("incr x y;"); assertEquals(1, memory.get("x")); assertEquals(1, memory.get("y")); } public void testDecrement() throws Exception{ Interpreter interpreter = new Interpreter(); Memory memory = interpreter.run("decr x;"); assertEquals(-1, memory.get("x")); memory = interpreter.run("decr x; decr y;"); assertEquals(-1, memory.get("x")); assertEquals(-1, memory.get("y")); memory = interpreter.run("decr x x;"); assertEquals(-2, memory.get("x")); memory = interpreter.run("decr x y;"); assertEquals(-1, memory.get("x")); assertEquals(-1, memory.get("y")); } public void testClear() throws Exception { Interpreter interpreter = new Interpreter(); Memory memory = interpreter.run("decr x; clear x;"); assertEquals(0, memory.get("x")); memory = interpreter.run("decr x; clear x; incr x;"); assertEquals(1, memory.get("x")); } public void testInit() throws Exception { Interpreter interpreter = new Interpreter(); Memory memory = interpreter.run("init x = 25;"); assertEquals(25, memory.get("x")); memory = interpreter.run("incr x; init x = 4761;"); assertEquals(4761, memory.get("x")); } public void testWhile() throws Exception { Interpreter interpreter = new Interpreter(); Memory memory = interpreter.run("init x = 25; clear y; while x not 0 do; incr y; decr x; end;"); assertEquals(25, memory.get("y")); memory = interpreter.run("init x = 25; clear z; " + "while x not 0 do; " + "init y = 10; " + "while y not 0 do; " + "incr z; decr y;" + "end;" + "decr x;" + "end;"); assertEquals(250, memory.get("z")); } public void testCopy() throws Exception { Interpreter interpreter = new Interpreter(); Memory memory = interpreter.run("init x = 25; copy x to y; incr y;"); assertEquals(26, memory.get("y")); memory = interpreter.run("init x = 25; copy x + 50 to a;"); assertEquals(75, memory.get("a")); memory = interpreter.run("init x = 25; copy x + x / 5 to a;"); assertEquals(30, memory.get("a")); } public void testMultiply() throws Exception { Interpreter interpreter = new Interpreter(); Memory memory = interpreter.run("init X = 24;" + "init Y = 54;\n" + "clear Z;\n" + "while X not 0 do;\n" + " clear W;\n" + " while Y not 0 do;\n" + " incr Z;\n" + " incr W;\n" + " decr Y;\n" + " end;\n" + " while W not 0 do;\n" + " incr Y;\n" + " decr W;\n" + " end;\n" + " decr X;\n" + "end;"); assertEquals(1296, memory.get("Z")); } public void testFactorial() throws Exception { Interpreter interpreter = new Interpreter(); Memory memory = interpreter.run("init N = 5;" + "clear F;\n" + "incr F;\n" + "decr N;\n" + "\n" + "while N not 0 do;\n" + " copy F to G;\n" + " while G not 0 do;\n" + " copy N to H;\n" + " while H not 0 do;\n" + " incr F;\n" + " decr H;\n" + " end;\n" + " decr G;\n" + " end;\n" + " decr N;\n" + "end;"); assertEquals(120, memory.get("F")); } public void testFibonacci() throws Exception { Interpreter interpreter = new Interpreter(); Memory memory = interpreter.run("init N = 10;" + "clear F;\n" + "clear G;\n" + "incr G;\n" + "\n" + "while N not 0 do;\n" + " copy G to H;\n" + " while F not 0 do;\n" + " incr H;\n" + " decr F;\n" + " end;\n" + "\n" + " copy G to F;\n" + " copy H to G;\n" + "\n" + " decr N;\n" + "end;"); assertEquals(55, memory.get("F")); } public void testComment() throws Exception { Interpreter interpreter = new Interpreter(); Memory memory = interpreter.run("incr x;\n" + "#incr x"); assertEquals(1, memory.get("x")); memory = interpreter.run("incr x;#incr x;"); assertEquals(1, memory.get("x")); memory = interpreter.run("#: heya\n" + "how you doing?\n" + "incr x:#\n" + "incr x;"); assertEquals(1, memory.get("x")); memory = interpreter.run("#: multiline\n" + "abcd:# incr x; #: bestie mica :# incr y; #incr x;\n" + "incr x;"); assertEquals(2, memory.get("x")); assertEquals(1, memory.get("y")); } public void testConditional() throws Exception { Interpreter interpreter = new Interpreter(); Memory memory = interpreter.run("init x = 2;" + "init y = 5;" + "if x > y;" + "incr y;" + "else;" + "incr x;" + "end;"); assertEquals(3, memory.get("x")); assertEquals(5, memory.get("y")); memory = interpreter.run("init x = 3;" + "if x < 5;" + "incr x;" + "end;"); assertEquals(4, memory.get("x")); memory = interpreter.run("init x = 3; clear y;" + "if x > 3;" + "init y = 1;" + "elif x < 3;" + "init y = 2;" + "else;" + "init y = 3;" + "end;"); assertEquals(3, memory.get("y")); memory = interpreter.run("init x = 3; clear y;" + "if x > 5;" + "init y = 1;" + "elif x > 4;" + "init y = 2;" + "elif x > 3;" + "init y = 3;" + "elif x > 2;" + "init y = 4;" + "end;"); assertEquals(4, memory.get("y")); memory = interpreter.run("init x = 2; init y = 5;" + "if x > 2;" + "init y = 0;" + "else;" + "if x < 10;" + "init y = 1;" + "else;" + "init y = 2;" + "end;" + "end;"); assertEquals(1, memory.get("y")); } public void testEmptyIfs() throws BonesException { Interpreter interpreter = new Interpreter(); Memory memory = interpreter.run("init x = 2;" + "if x >= 3;" + "elif x >= 0;" + "incr x;" + "end;"); assertEquals(3, memory.get("x")); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.extensions.sketching; import com.google.auto.value.AutoValue; import com.tdunning.math.stats.MergingDigest; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.Iterator; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.coders.ByteArrayCoder; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderException; import org.apache.beam.sdk.coders.CoderRegistry; import org.apache.beam.sdk.coders.CustomCoder; import org.apache.beam.sdk.transforms.Combine; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; /** * {@code PTransform}s for getting information about quantiles in a stream. * * <p>This class uses the T-Digest structure introduced by Ted Dunning, and more precisely * the {@link MergingDigest} implementation. * * <h2>References</h2> * * <p>The paper and implementation are available on Ted Dunning's * <a href="https://github.com/tdunning/t-digest">Github profile</a> * * <h2>Parameters</h2> * * <p>Only one parameter can be tuned in order to control the tradeoff between * the estimation accuracy and the memory use. <br> * * <p>Stream elements are compressed into a linked list of centroids. * The compression factor {@code cf} is used to limit the number of elements represented by * each centroid as well as the total number of centroids. <br> * The relative error will always be a small fraction of 1% for values at extreme quantiles * and always be less than 3/cf at middle quantiles. <br> * * <p>By default the compression factor is set to 100, * which guarantees a relative error less than 3%. * * <h2>Examples</h2> * * <p>There are 2 ways of using this class: * * <ul> * <li>Use the {@link PTransform}s that return a {@link PCollection} which contains * a {@link MergingDigest} for querying the value at a given quantile or * the approximate quantile position of an element. * <li>Use the {@link TDigestQuantilesFn} {@code CombineFn} that is exposed in order * to make advanced processing involving the {@link MergingDigest}. * </ul> * * <h3>Example 1: Default use</h3> * * <p>The simplest use is to call the {@link #globally()} or {@link #perKey()} method in * order to retrieve the digest, and then to query the structure. * * <pre><code> * {@literal PCollection<Double>} pc = ...; * {@literal PCollection<MergingDigest>} countMinSketch = pc.apply(TDigestQuantiles * .globally()); // .perKey() * </code></pre> * * <h3>Example 2: tune accuracy parameters</h3> * * <p>One can tune the compression factor {@code cf} in order to control accuracy and memory. <br> * This tuning works exactly the same for {@link #globally()} and {@link #perKey()}. * * <pre><code> * double cf = 500; * {@literal PCollection<Double>} pc = ...; * {@literal PCollection<MergingDigest>} countMinSketch = pc.apply(TDigestQuantiles * .globally() // .perKey() * .withCompression(cf); * </code></pre> * * <h3>Example 3 : Query the resulting structure</h3> * * <p>This example shows how to query the resulting structure, for example to * build {@code PCollection} of {@link KV}s with each pair corresponding to * a couple (quantile, value). * * <pre><code> * {@literal PCollection<MergingDigest>} pc = ...; * {@literal PCollection<KV<Double, Double>>} quantiles = pc.apply(ParDo.of( * {@literal new DoFn<MergingDigest, KV<Double, Double>>()} { * {@literal @ProcessElement} * public void procesElement(ProcessContext c) { * double[] quantiles = {0.01, 0.25, 0.5, 0.75, 0.99} * for (double q : quantiles) { * c.output(KV.of(q, c.element().quantile(q)); * } * }})); * </code></pre> * * <p>One can also retrieve the approximate quantile position of a given element in the stream * using {@code cdf(double)} method instead of {@code quantile(double)}. * * <h3>Example 4: Using the CombineFn</h3> * * <p>The {@code CombineFn} does the same thing as the {@code PTransform}s but * it can be used for doing stateful processing or in * {@link org.apache.beam.sdk.transforms.CombineFns.ComposedCombineFn}. * * <p>This example is not really interesting but it shows how one can properly * create a {@link TDigestQuantilesFn}. * * <pre><code> * double cf = 250; * {@literal PCollection<Double>} input = ...; * {@literal PCollection<MergingDigest>} output = input.apply(Combine * .globally(TDigestQuantilesFn.create(cf))); * </code></pre> * * <p><b>Warning: this class is experimental.</b> <br> * Its API is subject to change in future versions of Beam. * */ @Experimental public final class TDigestQuantiles { /** * Compute the stream in order to build a T-Digest structure (MergingDigest) * for keeping track of the stream distribution and returns a {@code PCollection<MergingDigest>}. * <br> The resulting structure can be queried in order to retrieve the approximate value * at a given quantile or the approximate quantile position of a given element. */ public static GlobalDigest globally() { return GlobalDigest.builder().build(); } /** * Like {@link #globally()}, but builds a digest for each key in the stream. * * @param <K> the type of the keys */ public static <K> PerKeyDigest<K> perKey() { return PerKeyDigest.<K>builder().build(); } /** Implementation of {@link #globally()}. */ @AutoValue public abstract static class GlobalDigest extends PTransform<PCollection<Double>, PCollection<MergingDigest>> { abstract double compression(); abstract Builder toBuilder(); static Builder builder() { return new AutoValue_TDigestQuantiles_GlobalDigest.Builder() .setCompression(100); } @AutoValue.Builder abstract static class Builder { abstract Builder setCompression(double cf); abstract GlobalDigest build(); } /** * Sets the compression factor {@code cf}. * * <p>Keep in mind that a compression factor {@code cf} of c guarantees * a relative error less than 3/c at mid quantiles. <br> * The accuracy will always be significantly less than 1% at extreme quantiles. * * @param cf the bound value for centroid and digest sizes. */ public GlobalDigest withCompression(double cf) { return toBuilder().setCompression(cf).build(); } @Override public PCollection<MergingDigest> expand(PCollection<Double> input) { return input.apply( "Compute T-Digest Structure", Combine.globally(TDigestQuantilesFn.create(this.compression()))); } } /** Implementation of {@link #perKey()}. */ @AutoValue public abstract static class PerKeyDigest<K> extends PTransform<PCollection<KV<K, Double>>, PCollection<KV<K, MergingDigest>>> { abstract double compression(); abstract Builder<K> toBuilder(); static <K> Builder<K> builder() { return new AutoValue_TDigestQuantiles_PerKeyDigest.Builder<K>() .setCompression(100); } @AutoValue.Builder abstract static class Builder<K> { abstract Builder<K> setCompression(double cf); abstract PerKeyDigest<K> build(); } /** * Sets the compression factor {@code cf}. * * <p>Keep in mind that a compression factor {@code cf} of c guarantees * a relative error less than 3/c at mid quantiles. <br> * The accuracy will always be significantly less than 1% at extreme quantiles. * * @param cf the bound value for centroid and digest sizes. */ public PerKeyDigest<K> withCompression(double cf) { return toBuilder().setCompression(cf).build(); } @Override public PCollection<KV<K, MergingDigest>> expand(PCollection<KV<K, Double>> input) { return input.apply( "Compute T-Digest Structure", Combine.perKey(TDigestQuantilesFn.create(this.compression()))); } } /** Implements the {@link Combine.CombineFn} of {@link TDigestQuantiles} transforms. */ public static class TDigestQuantilesFn extends Combine.CombineFn<Double, MergingDigest, MergingDigest> { private final double compression; private TDigestQuantilesFn(double compression) { this.compression = compression; } /** * Returns {@link TDigestQuantilesFn} combiner with the given compression factor. * * <p>Keep in mind that a compression factor {@code cf} of c guarantees * a relative error less than 3/c at mid quantiles. <br> * The accuracy will always be significantly less than 1% at extreme quantiles. * * @param compression the bound value for centroid and digest sizes. */ public static TDigestQuantilesFn create(double compression) { if (compression > 0) { return new TDigestQuantilesFn(compression); } throw new IllegalArgumentException("Compression factor should be greater than 0."); } @Override public MergingDigest createAccumulator() { return new MergingDigest(compression); } @Override public MergingDigest addInput(MergingDigest accum, Double value) { accum.add(value); return accum; } /** Output the whole structure so it can be queried, reused or stored easily. */ @Override public MergingDigest extractOutput(MergingDigest accum) { return accum; } @Override public MergingDigest mergeAccumulators( Iterable<MergingDigest> accumulators) { Iterator<MergingDigest> it = accumulators.iterator(); MergingDigest merged = it.next(); while (it.hasNext()) { merged.add(it.next()); } return merged; } @Override public Coder<MergingDigest> getAccumulatorCoder(CoderRegistry registry, Coder inputCoder) { return new MergingDigestCoder(); } @Override public Coder<MergingDigest> getDefaultOutputCoder(CoderRegistry registry, Coder inputCoder) { return new MergingDigestCoder(); } @Override public void populateDisplayData(DisplayData.Builder builder) { super.populateDisplayData(builder); builder.add(DisplayData .item("compression", compression) .withLabel("Compression factor")); } } /** Coder for {@link MergingDigest} class. */ static class MergingDigestCoder extends CustomCoder<MergingDigest> { private static final ByteArrayCoder BYTE_ARRAY_CODER = ByteArrayCoder.of(); @Override public void encode(MergingDigest value, OutputStream outStream) throws IOException { if (value == null) { throw new CoderException("cannot encode a null T-Digest sketch"); } ByteBuffer buf = ByteBuffer.allocate(value.byteSize()); value.asBytes(buf); BYTE_ARRAY_CODER.encode(buf.array(), outStream); } @Override public MergingDigest decode(InputStream inStream) throws IOException { byte[] bytes = BYTE_ARRAY_CODER.decode(inStream); ByteBuffer buf = ByteBuffer.wrap(bytes); return MergingDigest.fromBytes(buf); } @Override public boolean isRegisterByteSizeObserverCheap(MergingDigest value) { return true; } @Override protected long getEncodedElementByteSize(MergingDigest value) throws IOException { if (value == null) { throw new CoderException("cannot encode a null T-Digest sketch"); } return value.byteSize(); } } }
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import static com.google.common.truth.Truth.assertThat; import com.google.common.base.Objects; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import com.google.common.util.concurrent.Callables; import com.google.devtools.build.lib.actions.AbstractAction; import com.google.devtools.build.lib.actions.Action; import com.google.devtools.build.lib.actions.ActionExecutionContext; import com.google.devtools.build.lib.actions.ActionExecutionException; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.Executor; import com.google.devtools.build.lib.actions.ResourceSet; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.actions.util.DummyExecutor; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.skyframe.EvaluationProgressReceiver; import com.google.devtools.build.skyframe.EvaluationProgressReceiver.EvaluationState; import com.google.devtools.build.skyframe.SkyFunction.Environment; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.charset.StandardCharsets; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.atomic.AtomicInteger; import javax.annotation.Nullable; /** Tests for {@link SkyframeAwareAction}. */ @RunWith(JUnit4.class) public class SkyframeAwareActionTest extends TimestampBuilderTestCase { private Builder builder; private Executor executor; private TrackingEvaluationProgressReceiver invalidationReceiver; @Before public final void createBuilder() throws Exception { invalidationReceiver = new TrackingEvaluationProgressReceiver(); builder = createBuilder(inMemoryCache, 1, /*keepGoing=*/ false, invalidationReceiver); } @Before public final void createExecutor() throws Exception { executor = new DummyExecutor(rootDirectory); } private static final class TrackingEvaluationProgressReceiver implements EvaluationProgressReceiver { public static final class InvalidatedKey { public final SkyKey skyKey; public final InvalidationState state; InvalidatedKey(SkyKey skyKey, InvalidationState state) { this.skyKey = skyKey; this.state = state; } @Override public boolean equals(Object obj) { return obj instanceof InvalidatedKey && this.skyKey.equals(((InvalidatedKey) obj).skyKey) && this.state.equals(((InvalidatedKey) obj).state); } @Override public int hashCode() { return Objects.hashCode(skyKey, state); } } public static final class EvaluatedEntry { public final SkyKey skyKey; public final SkyValue value; public final EvaluationState state; EvaluatedEntry(SkyKey skyKey, SkyValue value, EvaluationState state) { this.skyKey = skyKey; this.value = value; this.state = state; } @Override public boolean equals(Object obj) { return obj instanceof EvaluatedEntry && this.skyKey.equals(((EvaluatedEntry) obj).skyKey) && this.value.equals(((EvaluatedEntry) obj).value) && this.state.equals(((EvaluatedEntry) obj).state); } @Override public int hashCode() { return Objects.hashCode(skyKey, value, state); } } public final Set<InvalidatedKey> invalidated = Sets.newConcurrentHashSet(); public final Set<SkyKey> enqueued = Sets.newConcurrentHashSet(); public final Set<EvaluatedEntry> evaluated = Sets.newConcurrentHashSet(); public void reset() { invalidated.clear(); enqueued.clear(); evaluated.clear(); } public boolean wasInvalidated(SkyKey skyKey) { for (InvalidatedKey e : invalidated) { if (e.skyKey.equals(skyKey)) { return true; } } return false; } public EvaluatedEntry getEvalutedEntry(SkyKey forKey) { for (EvaluatedEntry e : evaluated) { if (e.skyKey.equals(forKey)) { return e; } } return null; } @Override public void invalidated(SkyKey skyKey, InvalidationState state) { invalidated.add(new InvalidatedKey(skyKey, state)); } @Override public void enqueueing(SkyKey skyKey) { enqueued.add(skyKey); } @Override public void computed(SkyKey skyKey, long elapsedTimeNanos) {} @Override public void evaluated( SkyKey skyKey, Supplier<SkyValue> skyValueSupplier, EvaluationState state) { evaluated.add(new EvaluatedEntry(skyKey, skyValueSupplier.get(), state)); } } /** A mock action that counts how many times it was executed. */ private static class ExecutionCountingAction extends AbstractAction { private final AtomicInteger executionCounter; ExecutionCountingAction(Artifact input, Artifact output, AtomicInteger executionCounter) { super(ActionsTestUtil.NULL_ACTION_OWNER, ImmutableList.of(input), ImmutableList.of(output)); this.executionCounter = executionCounter; } @Override public void execute(ActionExecutionContext actionExecutionContext) throws ActionExecutionException, InterruptedException { executionCounter.incrementAndGet(); // This action first reads its input file (there can be only one). For the purpose of these // tests we assume that the input file is short, maybe just 10 bytes long. byte[] input = new byte[10]; int inputLen = 0; try (InputStream in = Iterables.getOnlyElement(getInputs()).getPath().getInputStream()) { inputLen = in.read(input); } catch (IOException e) { throw new ActionExecutionException(e, this, false); } // This action then writes the contents of the input to the (only) output file, and appends an // extra "x" character too. try (OutputStream out = getPrimaryOutput().getPath().getOutputStream()) { out.write(input, 0, inputLen); out.write('x'); } catch (IOException e) { throw new ActionExecutionException(e, this, false); } } @Override public String getMnemonic() { return null; } @Override protected String computeKey() { return getPrimaryOutput().getExecPathString() + executionCounter.get(); } @Override public ResourceSet estimateResourceConsumption(Executor executor) { return ResourceSet.ZERO; } } private static class ExecutionCountingCacheBypassingAction extends ExecutionCountingAction { ExecutionCountingCacheBypassingAction( Artifact input, Artifact output, AtomicInteger executionCounter) { super(input, output, executionCounter); } @Override public boolean executeUnconditionally() { return true; } @Override public boolean isVolatile() { return true; } } /** A mock skyframe-aware action that counts how many times it was executed. */ private static class SkyframeAwareExecutionCountingAction extends ExecutionCountingCacheBypassingAction implements SkyframeAwareAction { private final SkyKey actionDepKey; SkyframeAwareExecutionCountingAction( Artifact input, Artifact output, AtomicInteger executionCounter, SkyKey actionDepKey) { super(input, output, executionCounter); this.actionDepKey = actionDepKey; } @Override public void establishSkyframeDependencies(Environment env) throws ExceptionBase { // Establish some Skyframe dependency. A real action would then use this to compute and // cache data for the execute(...) method. env.getValue(actionDepKey); } } private interface ExecutionCountingActionFactory { ExecutionCountingAction create(Artifact input, Artifact output, AtomicInteger executionCounter); } private enum ChangeArtifact { DONT_CHANGE, CHANGE_MTIME { @Override boolean changeMtime() { return true; } }, CHANGE_MTIME_AND_CONTENT { @Override boolean changeMtime() { return true; } @Override boolean changeContent() { return true; } }; boolean changeMtime() { return false; } boolean changeContent() { return false; } } private enum ExpectActionIs { NOT_DIRTIED { @Override boolean actuallyClean() { return true; } }, DIRTIED_BUT_VERIFIED_CLEAN { @Override boolean dirtied() { return true; } @Override boolean actuallyClean() { return true; } }, // REBUILT_BUT_ACTION_CACHE_HIT, // This would be a bug, symptom of a skyframe-aware action that doesn't bypass the action cache // and is incorrectly regarded as an action cache hit when its inputs stayed the same but its // "skyframe dependencies" changed. REEXECUTED { @Override boolean dirtied() { return true; } @Override boolean reexecuted() { return true; } }; boolean dirtied() { return false; } boolean actuallyClean() { return false; } boolean reexecuted() { return false; } } private void maybeChangeFile(Artifact file, ChangeArtifact changeRequest) throws Exception { if (changeRequest == ChangeArtifact.DONT_CHANGE) { return; } if (changeRequest.changeMtime()) { // 1000000 should be larger than the filesystem timestamp granularity. file.getPath().setLastModifiedTime(file.getPath().getLastModifiedTime() + 1000000); tsgm.waitForTimestampGranularity(reporter.getOutErr()); } if (changeRequest.changeContent()) { appendToFile(file.getPath()); } // Invalidate the file state value to inform Skyframe that the file may have changed. // This will also invalidate the action execution value. differencer.invalidate( ImmutableList.of( FileStateValue.key( RootedPath.toRootedPath(file.getRoot().getPath(), file.getRootRelativePath())))); } private void assertActionExecutions( ExecutionCountingActionFactory actionFactory, ChangeArtifact changeActionInput, Callable<Void> betweenBuilds, ExpectActionIs expectActionIs) throws Exception { // Set up the action's input, output, owner and most importantly the execution counter. Artifact actionInput = createSourceArtifact("foo/action-input.txt"); Artifact actionOutput = createDerivedArtifact("foo/action-output.txt"); AtomicInteger executionCounter = new AtomicInteger(0); scratch.file(actionInput.getPath().getPathString(), "foo"); // Generating actions of artifacts are found by looking them up in the graph. The lookup value // must be present in the graph before execution. Action action = actionFactory.create(actionInput, actionOutput, executionCounter); registerAction(action); // Build the output for the first time. builder.buildArtifacts( reporter, ImmutableSet.of(actionOutput), null, null, null, null, executor, null, false, null, null); // Sanity check that our invalidation receiver is working correctly. We'll rely on it again. SkyKey actionKey = ActionExecutionValue.key(action); TrackingEvaluationProgressReceiver.EvaluatedEntry evaluatedAction = invalidationReceiver.getEvalutedEntry(actionKey); assertThat(evaluatedAction).isNotNull(); SkyValue actionValue = evaluatedAction.value; // Mutate the action input if requested. maybeChangeFile(actionInput, changeActionInput); // Execute user code before next build. betweenBuilds.call(); // Rebuild the output. invalidationReceiver.reset(); builder.buildArtifacts( reporter, ImmutableSet.of(actionOutput), null, null, null, null, executor, null, false, null, null); if (expectActionIs.dirtied()) { assertThat(invalidationReceiver.wasInvalidated(actionKey)).isTrue(); TrackingEvaluationProgressReceiver.EvaluatedEntry newEntry = invalidationReceiver.getEvalutedEntry(actionKey); assertThat(newEntry).isNotNull(); if (expectActionIs.actuallyClean()) { // Action was dirtied but verified clean. assertThat(newEntry.state).isEqualTo(EvaluationState.CLEAN); assertThat(newEntry.value).isEqualTo(actionValue); } else { // Action was dirtied and rebuilt. It was either reexecuted or was an action cache hit, // doesn't matter here. assertThat(newEntry.state).isEqualTo(EvaluationState.BUILT); assertThat(newEntry.value).isNotEqualTo(actionValue); } } else { // Action was not dirtied. assertThat(invalidationReceiver.wasInvalidated(actionKey)).isFalse(); } // Assert that the action was executed the right number of times. Whether the action execution // function was called again is up for the test method to verify. assertThat(executionCounter.get()).isEqualTo(expectActionIs.reexecuted() ? 2 : 1); } private RootedPath createSkyframeDepOfAction() throws Exception { scratch.file(rootDirectory.getRelative("action.dep").getPathString(), "blah"); return RootedPath.toRootedPath(rootDirectory, new PathFragment("action.dep")); } private void appendToFile(Path path) throws Exception { try (OutputStream stm = path.getOutputStream(/*append=*/ true)) { stm.write("blah".getBytes(StandardCharsets.UTF_8)); } } @Test public void testCacheCheckingActionWithContentChangingInput() throws Exception { assertActionWithContentChangingInput(/* unconditionalExecution */ false); } @Test public void testCacheBypassingActionWithContentChangingInput() throws Exception { assertActionWithContentChangingInput(/* unconditionalExecution */ true); } private void assertActionWithContentChangingInput(final boolean unconditionalExecution) throws Exception { // Assert that a simple, non-skyframe-aware action is executed twice // if its input's content changes between builds. assertActionExecutions( new ExecutionCountingActionFactory() { @Override public ExecutionCountingAction create( Artifact input, Artifact output, AtomicInteger executionCounter) { return unconditionalExecution ? new ExecutionCountingCacheBypassingAction(input, output, executionCounter) : new ExecutionCountingAction(input, output, executionCounter); } }, ChangeArtifact.CHANGE_MTIME_AND_CONTENT, Callables.<Void>returning(null), ExpectActionIs.REEXECUTED); } @Test public void testCacheCheckingActionWithMtimeChangingInput() throws Exception { assertActionWithMtimeChangingInput(/* unconditionalExecution */ false); } @Test public void testCacheBypassingActionWithMtimeChangingInput() throws Exception { assertActionWithMtimeChangingInput(/* unconditionalExecution */ true); } private void assertActionWithMtimeChangingInput(final boolean unconditionalExecution) throws Exception { // Assert that a simple, non-skyframe-aware action is executed only once // if its input's mtime changes but its contents stay the same between builds. assertActionExecutions( new ExecutionCountingActionFactory() { @Override public ExecutionCountingAction create( Artifact input, Artifact output, AtomicInteger executionCounter) { return unconditionalExecution ? new ExecutionCountingCacheBypassingAction(input, output, executionCounter) : new ExecutionCountingAction(input, output, executionCounter); } }, ChangeArtifact.CHANGE_MTIME, Callables.<Void>returning(null), ExpectActionIs.DIRTIED_BUT_VERIFIED_CLEAN); } public void testActionWithNonChangingInput(final boolean unconditionalExecution) throws Exception { // Assert that a simple, non-skyframe-aware action is executed only once // if its input does not change at all between builds. assertActionExecutions( new ExecutionCountingActionFactory() { @Override public ExecutionCountingAction create( Artifact input, Artifact output, AtomicInteger executionCounter) { return unconditionalExecution ? new ExecutionCountingCacheBypassingAction(input, output, executionCounter) : new ExecutionCountingAction(input, output, executionCounter); } }, ChangeArtifact.DONT_CHANGE, Callables.<Void>returning(null), ExpectActionIs.NOT_DIRTIED); } private void assertActionWithMaybeChangingInputAndChangingSkyframeDeps( ChangeArtifact changeInputFile) throws Exception { final RootedPath depPath = createSkyframeDepOfAction(); final SkyKey skyframeDep = FileStateValue.key(depPath); // Assert that an action-cache-check-bypassing action is executed twice if its skyframe deps // change while its input does not. The skyframe dependency is established by making the action // skyframe-aware and updating the value between builds. assertActionExecutions( new ExecutionCountingActionFactory() { @Override public ExecutionCountingAction create( Artifact input, Artifact output, AtomicInteger executionCounter) { return new SkyframeAwareExecutionCountingAction( input, output, executionCounter, skyframeDep); } }, changeInputFile, new Callable<Void>() { @Override public Void call() throws Exception { // Invalidate the dependency and change what its value will be in the next build. This // should enforce rebuilding of the action. appendToFile(depPath.asPath()); differencer.invalidate(ImmutableList.of(skyframeDep)); return null; } }, ExpectActionIs.REEXECUTED); } @Test public void testActionWithNonChangingInputButChangingSkyframeDeps() throws Exception { assertActionWithMaybeChangingInputAndChangingSkyframeDeps(ChangeArtifact.DONT_CHANGE); } @Test public void testActionWithChangingInputMtimeAndChangingSkyframeDeps() throws Exception { assertActionWithMaybeChangingInputAndChangingSkyframeDeps(ChangeArtifact.CHANGE_MTIME); } @Test public void testActionWithChangingInputAndChangingSkyframeDeps() throws Exception { assertActionWithMaybeChangingInputAndChangingSkyframeDeps( ChangeArtifact.CHANGE_MTIME_AND_CONTENT); } @Test public void testActionWithNonChangingInputAndNonChangingSkyframeDeps() throws Exception { final SkyKey skyframeDep = FileStateValue.key(createSkyframeDepOfAction()); // Assert that an action-cache-check-bypassing action is executed only once if neither its input // nor its Skyframe dependency changes between builds. assertActionExecutions( new ExecutionCountingActionFactory() { @Override public ExecutionCountingAction create( Artifact input, Artifact output, AtomicInteger executionCounter) { return new SkyframeAwareExecutionCountingAction( input, output, executionCounter, skyframeDep); } }, ChangeArtifact.DONT_CHANGE, new Callable<Void>() { @Override public Void call() throws Exception { // Invalidate the dependency but leave its value up-to-date, so the action should not // be rebuilt. differencer.invalidate(ImmutableList.of(skyframeDep)); return null; } }, ExpectActionIs.DIRTIED_BUT_VERIFIED_CLEAN); } private abstract static class SingleOutputAction extends AbstractAction { SingleOutputAction(@Nullable Artifact input, Artifact output) { super( ActionsTestUtil.NULL_ACTION_OWNER, input == null ? ImmutableList.<Artifact>of() : ImmutableList.of(input), ImmutableList.of(output)); } protected static final class Buffer { final int size; final byte[] data; Buffer(byte[] data, int size) { this.data = data; this.size = size; } } protected Buffer readInput() throws ActionExecutionException { byte[] input = new byte[100]; int inputLen = 0; try (InputStream in = getPrimaryInput().getPath().getInputStream()) { inputLen = in.read(input, 0, input.length); } catch (IOException e) { throw new ActionExecutionException(e, this, false); } return new Buffer(input, inputLen); } protected void writeOutput(@Nullable Buffer buf, String data) throws ActionExecutionException { try (OutputStream out = getPrimaryOutput().getPath().getOutputStream()) { if (buf != null) { out.write(buf.data, 0, buf.size); } out.write(data.getBytes(StandardCharsets.UTF_8), 0, data.length()); } catch (IOException e) { throw new ActionExecutionException(e, this, false); } } @Override public String getMnemonic() { return "MockActionMnemonic"; } @Override protected String computeKey() { return new Fingerprint().addInt(42).hexDigestAndReset(); } @Override public ResourceSet estimateResourceConsumption(Executor executor) { return ResourceSet.ZERO; } } private abstract static class SingleOutputSkyframeAwareAction extends SingleOutputAction implements SkyframeAwareAction { SingleOutputSkyframeAwareAction(@Nullable Artifact input, Artifact output) { super(input, output); } @Override public boolean executeUnconditionally() { return true; } @Override public boolean isVolatile() { return true; } } /** * Regression test to avoid a potential race condition in {@link ActionExecutionFunction}. * * <p>The test ensures that when ActionExecutionFunction executes a Skyframe-aware action * (implementor of {@link SkyframeAwareAction}), ActionExecutionFunction first requests the inputs * of the action and ensures they are built before requesting any of its Skyframe dependencies. * * <p>This strict ordering is very important to avoid the race condition, which could arise if the * compute method were too eager to request all dependencies: request input files but even if some * are missing, request also the skyframe-dependencies. The race is described in this method's * body. */ @Test public void testRaceConditionBetweenInputAcquisitionAndSkyframeDeps() throws Exception { // Sequence of events on threads A and B, showing SkyFunctions and requested SkyKeys, leading // to an InconsistentFilesystemException: // // _______________[Thread A]_________________|_______________[Thread B]_________________ // ActionExecutionFunction(gen2_action: | idle // genfiles/gen1 -> genfiles/foo/bar/gen2) | // ARTIFACT:genfiles/gen1 | // MOCK_VALUE:dummy_argument | // env.valuesMissing():yes ==> return | // | // ArtifactFunction(genfiles/gen1) | MockFunction() // CONFIGURED_TARGET://foo:gen1 | FILE:genfiles/foo // ACTION_EXECUTION:gen1_action | env.valuesMissing():yes ==> return // env.valuesMissing():yes ==> return | // | FileFunction(genfiles/foo) // ActionExecutionFunction(gen1_action) | FILE:genfiles // ARTIFACT:genfiles/gen0 | env.valuesMissing():yes ==> return // env.valuesMissing():yes ==> return | // | FileFunction(genfiles) // ArtifactFunction(genfiles/gen0) | FILE_STATE:genfiles // CONFIGURED_TARGET://foo:gen0 | env.valuesMissing():yes ==> return // ACTION_EXECUTION:gen0_action | // env.valuesMissing():yes ==> return | FileStateFunction(genfiles) // | stat genfiles // ActionExecutionFunction(gen0_action) | return FileStateValue:non-existent // create output directory: genfiles | // working | FileFunction(genfiles/foo) // | FILE:genfiles // | FILE_STATE:genfiles/foo // | env.valuesMissing():yes ==> return // | // | FileStateFunction(genfiles/foo) // | stat genfiles/foo // | return FileStateValue:non-existent // | // done, created genfiles/gen0 | FileFunction(genfiles/foo) // return ActionExecutionValue(gen0_action) | FILE:genfiles // | FILE_STATE:genfiles/foo // ArtifactFunction(genfiles/gen0) | return FileValue(genfiles/foo:non-existent) // CONFIGURED_TARGET://foo:gen0 | // ACTION_EXECUTION:gen0_action | MockFunction() // return ArtifactValue(genfiles/gen0) | FILE:genfiles/foo // | FILE:genfiles/foo/bar/gen1 // ActionExecutionFunction(gen1_action) | env.valuesMissing():yes ==> return // ARTIFACT:genfiles/gen0 | // create output directory: genfiles/foo/bar | FileFunction(genfiles/foo/bar/gen1) // done, created genfiles/foo/bar/gen1 | FILE:genfiles/foo/bar // return ActionExecutionValue(gen1_action) | env.valuesMissing():yes ==> return // | // idle | FileFunction(genfiles/foo/bar) // | FILE:genfiles/foo // | FILE_STATE:genfiles/foo/bar // | env.valuesMissing():yes ==> return // | // | FileStateFunction(genfiles/foo/bar) // | stat genfiles/foo/bar // | return FileStateValue:directory // | // | FileFunction(genfiles/foo/bar) // | FILE:genfiles/foo // | FILE_STATE:genfiles/foo/bar // | throw InconsistentFilesystemException: // | genfiles/foo doesn't exist but // | genfiles/foo/bar does! Artifact genFile1 = createDerivedArtifact("foo/bar/gen1.txt"); Artifact genFile2 = createDerivedArtifact("gen2.txt"); registerAction( new SingleOutputAction(null, genFile1) { @Override public void execute(ActionExecutionContext actionExecutionContext) throws ActionExecutionException, InterruptedException { writeOutput(null, "gen1"); } }); registerAction( new SingleOutputSkyframeAwareAction(genFile1, genFile2) { @Override public void establishSkyframeDependencies(Environment env) throws ExceptionBase { assertThat(env.valuesMissing()).isFalse(); } @Override public void execute(ActionExecutionContext actionExecutionContext) throws ActionExecutionException, InterruptedException { writeOutput(readInput(), "gen2"); } }); builder.buildArtifacts( reporter, ImmutableSet.of(genFile2), null, null, null, null, executor, null, false, null, null); } }
package mezz.jei.ingredients; import com.google.common.collect.Multimap; import mezz.jei.api.ingredients.IIngredientType; import mezz.jei.api.ingredients.ITypedIngredient; import mezz.jei.config.sorting.IngredientTypeSortingConfig; import mezz.jei.config.sorting.ModNameSortingConfig; import mezz.jei.gui.ingredients.IListElement; import net.minecraft.core.HolderSet.ListBacked; import net.minecraft.core.Registry; import net.minecraft.resources.ResourceLocation; import net.minecraft.tags.TagKey; import net.minecraft.world.entity.EquipmentSlot; import net.minecraft.world.entity.ai.attributes.Attribute; import net.minecraft.world.entity.ai.attributes.AttributeModifier; import net.minecraft.world.entity.ai.attributes.Attributes; import net.minecraft.world.item.ArmorItem; import net.minecraft.world.item.Item; import net.minecraft.world.item.ItemStack; import net.minecraft.world.item.Tier; import net.minecraft.world.item.TieredItem; import net.minecraftforge.common.TierSortingRegistry; import net.minecraftforge.common.ToolAction; import net.minecraftforge.common.ToolActions; import java.util.Collection; import java.util.Comparator; import java.util.List; import java.util.Set; import java.util.stream.Collectors; public class IngredientSorterComparators { private final IngredientFilter ingredientFilter; private final RegisteredIngredients registeredIngredients; private final ModNameSortingConfig modNameSortingConfig; private final IngredientTypeSortingConfig ingredientTypeSortingConfig; public IngredientSorterComparators( IngredientFilter ingredientFilter, RegisteredIngredients registeredIngredients, ModNameSortingConfig modNameSortingConfig, IngredientTypeSortingConfig ingredientTypeSortingConfig ) { this.ingredientFilter = ingredientFilter; this.registeredIngredients = registeredIngredients; this.modNameSortingConfig = modNameSortingConfig; this.ingredientTypeSortingConfig = ingredientTypeSortingConfig; } public Comparator<IListElementInfo<?>> getComparator(List<IngredientSortStage> ingredientSorterStages) { return ingredientSorterStages.stream() .map(this::getComparator) .reduce(Comparator::thenComparing) .orElseGet(this::getDefault); } public Comparator<IListElementInfo<?>> getComparator(IngredientSortStage ingredientSortStage) { return switch (ingredientSortStage) { case ALPHABETICAL -> getAlphabeticalComparator(); case CREATIVE_MENU -> getCreativeMenuComparator(); case INGREDIENT_TYPE -> getIngredientTypeComparator(); case MOD_NAME -> getModNameComparator(); case TOOL_TYPE -> getToolsComparator(); case TAG -> getTagComparator(); case WEAPON_DAMAGE -> getWeaponDamageComparator(); case ARMOR -> getArmorComparator(); case MAX_DURABILITY -> getMaxDurabilityComparator(); }; } public Comparator<IListElementInfo<?>> getDefault() { return getModNameComparator() .thenComparing(getIngredientTypeComparator()) .thenComparing(getCreativeMenuComparator()); } private static Comparator<IListElementInfo<?>> getCreativeMenuComparator() { return Comparator.comparingInt(o -> { IListElement<?> element = o.getElement(); return element.getOrderIndex(); }); } private static Comparator<IListElementInfo<?>> getAlphabeticalComparator() { return Comparator.comparing(IListElementInfo::getName); } private Comparator<IListElementInfo<?>> getModNameComparator() { Set<String> modNames = this.ingredientFilter.getModNamesForSorting(); return this.modNameSortingConfig.getComparatorFromMappedValues(modNames); } private Comparator<IListElementInfo<?>> getIngredientTypeComparator() { Collection<IIngredientType<?>> ingredientTypes = this.registeredIngredients.getIngredientTypes(); Set<String> ingredientTypeStrings = ingredientTypes.stream() .map(IngredientTypeSortingConfig::getIngredientTypeString) .collect(Collectors.toSet()); return this.ingredientTypeSortingConfig.getComparatorFromMappedValues(ingredientTypeStrings); } private static Comparator<IListElementInfo<?>> getMaxDurabilityComparator() { Comparator<IListElementInfo<?>> maxDamage = Comparator.comparing(o -> getItemStack(o).getMaxDamage()); return maxDamage.reversed(); } private Comparator<IListElementInfo<?>> getTagComparator() { Comparator<IListElementInfo<?>> isTagged = Comparator.comparing(this::hasTag); Comparator<IListElementInfo<?>> tag = Comparator.comparing(this::getTagForSorting); return isTagged.reversed().thenComparing(tag); } private static Comparator<IListElementInfo<?>> getToolsComparator() { Comparator<IListElementInfo<?>> toolType = Comparator.comparing(o -> getToolClass(getItemStack(o))); Comparator<IListElementInfo<?>> tier = Comparator.comparing(o -> getTier(getItemStack(o))); Comparator<IListElementInfo<?>> maxDamage = Comparator.comparing(o -> getToolDurability(getItemStack(o))); return toolType.reversed() // Sort non-tools after the tools. .thenComparing(tier.reversed()) .thenComparing(maxDamage.reversed()); } private static Comparator<IListElementInfo<?>> getWeaponDamageComparator() { Comparator<IListElementInfo<?>> isWeaponComp = Comparator.comparing(o -> isWeapon(getItemStack(o))); Comparator<IListElementInfo<?>> attackDamage = Comparator.comparing(o -> getWeaponDamage(getItemStack(o))); Comparator<IListElementInfo<?>> attackSpeed = Comparator.comparing(o -> getWeaponSpeed(getItemStack(o))); Comparator<IListElementInfo<?>> maxDamage = Comparator.comparing(o -> getWeaponDurability(getItemStack(o))); return isWeaponComp.reversed() .thenComparing(attackDamage.reversed()) .thenComparing(attackSpeed.reversed()) .thenComparing(maxDamage.reversed()); } private static Comparator<IListElementInfo<?>> getArmorComparator() { Comparator<IListElementInfo<?>> isArmorComp = Comparator.comparing(o -> isArmor(getItemStack(o))); Comparator<IListElementInfo<?>> armorSlot = Comparator.comparing(o -> getArmorSlotIndex(getItemStack(o))); Comparator<IListElementInfo<?>> armorDamage = Comparator.comparing(o -> getArmorDamageReduce(getItemStack(o))); Comparator<IListElementInfo<?>> armorToughness = Comparator.comparing(o -> getArmorToughness(getItemStack(o))); Comparator<IListElementInfo<?>> maxDamage = Comparator.comparing(o -> getArmorDurability(getItemStack(o))); return isArmorComp.reversed() .thenComparing(armorSlot.reversed()) .thenComparing(armorDamage.reversed()) .thenComparing(armorToughness.reversed()) .thenComparing(maxDamage.reversed()); } private static int getTier(ItemStack itemStack) { Item item = itemStack.getItem(); if (item instanceof TieredItem tieredItem) { Tier tier = tieredItem.getTier(); List<Tier> sortedTiers = TierSortingRegistry.getSortedTiers(); return sortedTiers.indexOf(tier); } return -1; } private static boolean isTool(ItemStack itemStack) { return getToolActions(itemStack).stream() .anyMatch(itemStack::canPerformAction); } private static int getToolDurability(ItemStack itemStack) { if (!isTool(itemStack)) { return 0; } return itemStack.getMaxDamage(); } private static boolean isWeapon(ItemStack itemStack) { //Sort Weapons apart from tools, armor, and other random things.. //AttackDamage also filters out Tools and Armor. Anything that deals extra damage is a weapon. return getWeaponDamage(itemStack) > 0; } private static double getWeaponDamage(ItemStack itemStack) { if (isTool(itemStack) || isArmor(itemStack)) { return 0; } Multimap<Attribute, AttributeModifier> multimap = itemStack.getAttributeModifiers(EquipmentSlot.MAINHAND); return max(multimap, Attributes.ATTACK_DAMAGE); } private static double getWeaponSpeed(ItemStack itemStack) { if (!isWeapon(itemStack)) { return 0; } Multimap<Attribute, AttributeModifier> multimap = itemStack.getAttributeModifiers(EquipmentSlot.MAINHAND); return max(multimap, Attributes.ATTACK_SPEED); } private static double max(Multimap<Attribute, AttributeModifier> multimap, Attribute attribute) { Collection<AttributeModifier> modifiers = multimap.get(attribute); return max(modifiers); } private static double max(Collection<AttributeModifier> modifiers) { return modifiers.stream() .mapToDouble(AttributeModifier::getAmount) .max() .orElse(0); } private static int getWeaponDurability(ItemStack itemStack) { if (isWeapon(itemStack)) { return itemStack.getMaxDamage(); } return 0; } private static boolean isArmor(ItemStack itemStack) { Item item = itemStack.getItem(); return item instanceof ArmorItem; } private static int getArmorSlotIndex(ItemStack itemStack) { Item item = itemStack.getItem(); if (item instanceof ArmorItem armorItem) { return armorItem.getSlot().getFilterFlag(); } return 0; } private static int getArmorDamageReduce(ItemStack itemStack) { Item item = itemStack.getItem(); if (item instanceof ArmorItem armorItem) { return armorItem.getDefense(); } return 0; } private static float getArmorToughness(ItemStack itemStack) { Item item = itemStack.getItem(); if (item instanceof ArmorItem armorItem) { return armorItem.getToughness(); } return 0; } private static int getArmorDurability(ItemStack itemStack) { if (isArmor(itemStack)) { return itemStack.getMaxDamage(); } return 0; } private String getTagForSorting(IListElementInfo<?> elementInfo) { Collection<ResourceLocation> tagIds = elementInfo.getTagIds(registeredIngredients); // Choose the most popular tag it has. return tagIds.stream() .max(Comparator.comparing(IngredientSorterComparators::tagCount)) .map(ResourceLocation::getPath) .orElse(""); } private static int tagCount(ResourceLocation tagId) { //TODO: make a tag blacklist. if (tagId.toString().equals("itemfilters:check_nbt")) { return 0; } TagKey<Item> tagKey = TagKey.create(Registry.ITEM_REGISTRY, tagId); return Registry.ITEM.getTag(tagKey) .map(ListBacked::size) .orElse(0); } private boolean hasTag(IListElementInfo<?> elementInfo) { return !getTagForSorting(elementInfo).isEmpty(); } private static String getToolClass(ItemStack itemStack) { if (itemStack.isEmpty()) { return ""; } return getToolActions(itemStack).stream() .filter(itemStack::canPerformAction) .findFirst() .map(ToolAction::name) .orElse(""); } private static Collection<ToolAction> getToolActions(ItemStack itemStack) { // HACK: ensure the actions for the itemStack get loaded before we call ToolAction.getActions(), // so the ToolAction.getActions() map is populated with whatever actions the itemStack uses. itemStack.canPerformAction(ToolActions.AXE_DIG); return ToolAction.getActions(); } public static <V> ItemStack getItemStack(IListElementInfo<V> ingredientInfo) { ITypedIngredient<V> ingredient = ingredientInfo.getTypedIngredient(); if (ingredient.getIngredient() instanceof ItemStack itemStack) { return itemStack; } return ItemStack.EMPTY; } }
/* * Copyright 2020 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.server.service; import com.google.common.collect.Streams; import com.thoughtworks.go.config.*; import com.thoughtworks.go.config.exceptions.*; import com.thoughtworks.go.config.update.AgentUpdateValidator; import com.thoughtworks.go.config.update.AgentsUpdateValidator; import com.thoughtworks.go.domain.AgentInstance; import com.thoughtworks.go.domain.AllConfigErrors; import com.thoughtworks.go.domain.ConfigErrors; import com.thoughtworks.go.domain.NullAgentInstance; import com.thoughtworks.go.listener.AgentChangeListener; import com.thoughtworks.go.listener.DatabaseEntityChangeListener; import com.thoughtworks.go.remote.AgentIdentifier; import com.thoughtworks.go.security.Registration; import com.thoughtworks.go.server.domain.AgentInstances; import com.thoughtworks.go.server.domain.ElasticAgentMetadata; import com.thoughtworks.go.server.domain.Username; import com.thoughtworks.go.server.messaging.notifications.AgentStatusChangeNotifier; import com.thoughtworks.go.server.persistence.AgentDao; import com.thoughtworks.go.server.ui.AgentViewModel; import com.thoughtworks.go.server.ui.AgentsViewModel; import com.thoughtworks.go.server.util.UuidGenerator; import com.thoughtworks.go.serverhealth.HealthStateScope; import com.thoughtworks.go.serverhealth.HealthStateType; import com.thoughtworks.go.serverhealth.ServerHealthService; import com.thoughtworks.go.serverhealth.ServerHealthState; import com.thoughtworks.go.util.SystemEnvironment; import com.thoughtworks.go.util.TriState; import com.thoughtworks.go.utils.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.util.LinkedMultiValueMap; import java.util.*; import java.util.function.Function; import static com.google.common.base.Strings.isNullOrEmpty; import static com.thoughtworks.go.CurrentGoCDVersion.docsUrl; import static com.thoughtworks.go.domain.AgentConfigStatus.Pending; import static com.thoughtworks.go.domain.AgentInstance.createFromAgent; import static com.thoughtworks.go.util.CommaSeparatedString.append; import static com.thoughtworks.go.util.CommaSeparatedString.remove; import static com.thoughtworks.go.util.ExceptionUtils.bombIfNull; import static com.thoughtworks.go.util.TriState.TRUE; import static java.lang.String.format; import static java.util.Arrays.asList; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static java.util.stream.Collectors.*; import static java.util.stream.StreamSupport.stream; import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; import static org.apache.commons.collections4.ListUtils.union; import static org.springframework.util.CollectionUtils.isEmpty; @Service @SuppressWarnings("deprecation") public class AgentService implements DatabaseEntityChangeListener<Agent> { private final SystemEnvironment systemEnvironment; private final UuidGenerator uuidGenerator; private final ServerHealthService serverHealthService; private AgentStatusChangeNotifier agentStatusChangeNotifier; private final AgentDao agentDao; private AgentInstances agentInstances; private Set<AgentChangeListener> listeners = new HashSet<>(); private static final Logger LOGGER = LoggerFactory.getLogger(AgentService.class); @Autowired public AgentService(SystemEnvironment systemEnvironment, AgentDao agentDao, UuidGenerator uuidGenerator, ServerHealthService serverHealthService, AgentStatusChangeNotifier agentStatusChangeNotifier) { this(systemEnvironment, null, agentDao, uuidGenerator, serverHealthService, agentStatusChangeNotifier); this.agentInstances = new AgentInstances(agentStatusChangeNotifier); } AgentService(SystemEnvironment systemEnvironment, AgentInstances agentInstances, AgentDao agentDao, UuidGenerator uuidGenerator, ServerHealthService serverHealthService, AgentStatusChangeNotifier agentStatusChangeNotifier) { this.systemEnvironment = systemEnvironment; this.agentInstances = agentInstances; this.agentDao = agentDao; this.uuidGenerator = uuidGenerator; this.serverHealthService = serverHealthService; this.agentStatusChangeNotifier = agentStatusChangeNotifier; } public void initialize() { this.syncAgentInstanceCacheFromAgentsInDB(); agentDao.registerDatabaseAgentEntityChangeListener(this); } /** * not for use externally, created for testing whether listeners are correctly registered or not */ void setAgentChangeListeners(Set<AgentChangeListener> setOfListener) { if (setOfListener == null) { this.listeners = new HashSet<>(); } else { this.listeners = setOfListener; } } public AgentInstances getAgentInstances() { return agentInstances; } public Agents agents() { return stream(agentInstances.spliterator(), false) .map(AgentInstance::getAgent) .collect(toCollection(Agents::new)); } public Map<AgentInstance, Collection<String>> getAgentInstanceToSortedEnvMap() { return Streams.stream(agentInstances.getAllAgents()).collect(toMap(Function.identity(), AgentService::getSortedEnvironmentList)); } public AgentsViewModel getRegisteredAgentsViewModel() { return toAgentViewModels(agentInstances.findRegisteredAgents()); } public AgentInstances findRegisteredAgents() { return agentInstances.findRegisteredAgents(); } public AgentInstance updateAgentAttributes(String uuid, String hostname, String resources, String environments, TriState state) { AgentInstance agentInstance = agentInstances.findAgent(uuid); validateThatAgentExists(agentInstance); Agent agent = getPendingAgentOrFromDB(agentInstance); if (validateAnyOperationPerformedOnAgent(hostname, environments, resources, state)) { new AgentUpdateValidator(agentInstance, state).validate(); setAgentAttributes(hostname, resources, environments, state, agent); saveOrUpdate(agent); return createFromAgent(agent, systemEnvironment, agentStatusChangeNotifier); } return null; } public void bulkUpdateAgentAttributes(List<String> uuids, List<String> resourcesToAdd, List<String> resourcesToRemove, List<String> envsToAdd, List<String> envsToRemove, TriState state, EnvironmentConfigService environmentConfigService) { if (isAnyOperationPerformedOnBulkAgents(resourcesToAdd, resourcesToRemove, envsToAdd, envsToRemove, state)) { AgentsUpdateValidator validator = new AgentsUpdateValidator(agentInstances, uuids, state, resourcesToAdd, resourcesToRemove); validator.validate(); List<Agent> agents = agentDao.getAgentsByUUIDs(uuids); if (isTriStateSet(state)) { agents.addAll(agentInstances.filterPendingAgents(uuids)); } agents.forEach(agent -> setResourcesEnvsAndState(agent, resourcesToAdd, resourcesToRemove, envsToAdd, envsToRemove, state, environmentConfigService)); updateIdsAndGenerateCookiesForPendingAgents(agents, state); agentDao.bulkUpdateAgents(agents); } } public void updateAgentsAssociationOfEnvironment(EnvironmentConfig envConfig, List<String> uuids) { if (envConfig == null) { return; } AgentsUpdateValidator validator = new AgentsUpdateValidator(agentInstances, uuids, TRUE, emptyList(), emptyList()); if (isAnyOperationPerformedOnBulkAgents(emptyList(), emptyList(), singletonList(envConfig.name().toString()), emptyList(), TRUE)) { validator.validate(); List<String> uuidsToAssociate = (uuids == null) ? emptyList() : uuids; List<Agent> agents = getAgentsToAddEnvToOrRemoveEnvFrom(envConfig, uuidsToAssociate); if (agents.isEmpty()) { return; } agentDao.bulkUpdateAgents(agents); } } public void updateAgentsAssociationOfEnvironment(EnvironmentConfig envConfig, List<String> agentUUIDsToAssociate, List<String> agentUUIDsToRemove) { if (envConfig == null) { return; } AgentsUpdateValidator validator = new AgentsUpdateValidator(agentInstances, union(agentUUIDsToAssociate, agentUUIDsToRemove), TRUE, emptyList(), emptyList()); if (isAnyOperationPerformedOnBulkAgents(emptyList(), emptyList(), singletonList(envConfig.name().toString()), emptyList(), TRUE)) { validator.validate(); List<Agent> agents = getAgentsToAddEnvToOrRemoveEnvFrom(envConfig.name().toString(), agentUUIDsToAssociate, agentUUIDsToRemove); if (agents.isEmpty()) { return; } agentDao.bulkUpdateAgents(agents); } } public void deleteAgents(List<String> uuids) { if (validateThatAllAgentsExistAndCanBeDeleted(uuids)) { agentDao.bulkSoftDelete(uuids); } } public void deleteAgentsWithoutValidations(List<String> uuids) { if (!isEmpty(uuids)) { agentDao.bulkSoftDelete(uuids); } } public void updateRuntimeInfo(AgentRuntimeInfo agentRuntimeInfo) { bombIfAgentDoesNotHaveCookie(agentRuntimeInfo); bombIfAgentHasDuplicateCookie(agentRuntimeInfo); AgentInstance agentInstance = findAgentAndRefreshStatus(agentRuntimeInfo.getUUId()); if (agentInstance.isIpChangeRequired(agentRuntimeInfo.getIpAdress())) { LOGGER.warn("Agent with UUID [{}] changed IP Address from [{}] to [{}]", agentRuntimeInfo.getUUId(), agentInstance.getAgent().getIpaddress(), agentRuntimeInfo.getIpAdress()); Agent agent = (agentInstance.isRegistered() ? agentInstance.getAgent() : null); bombIfNull(agent, "Unable to set agent ipAddress; Agent [" + agentInstance.getAgent().getUuid() + "] not found."); agent.setIpaddress(agentRuntimeInfo.getIpAdress()); saveOrUpdate(agent); } agentInstances.updateAgentRuntimeInfo(agentRuntimeInfo); } public Username createAgentUsername(String uuId, String ipAddress, String hostNameForDisplay) { return new Username(format("agent_%s_%s_%s", uuId, ipAddress, hostNameForDisplay)); } public Registration requestRegistration(AgentRuntimeInfo agentRuntimeInfo) { LOGGER.debug("Agent is requesting registration {}", agentRuntimeInfo); AgentInstance agentInstance = agentInstances.register(agentRuntimeInfo); Registration registration = agentInstance.assignCertification(); Agent agent = agentInstance.getAgent(); if (agentInstance.isRegistered() && !agent.cookieAssigned()) { generateAndAddCookie(agent); saveOrUpdate(agentInstance.getAgent()); bombIfAgentHasErrors(agent); LOGGER.debug("New Agent approved {}", agentRuntimeInfo); } return registration; } @Deprecated public void approve(String uuid) { AgentInstance agentInstance = findAgentAndRefreshStatus(uuid); boolean doesAgentExistAndIsRegistered = isRegistered(agentInstance.getUuid()); agentInstance.enable(); if (doesAgentExistAndIsRegistered) { LOGGER.warn("Registered agent with the same uuid [{}] already approved.", agentInstance); } else { Agent agent = agentInstance.getAgent(); if (!agent.cookieAssigned()) { generateAndAddCookie(agent); } saveOrUpdate(agent); } } public void notifyJobCancelledEvent(String uuid) { agentInstances.updateAgentAboutCancelledBuild(uuid, true); } public AgentInstance findAgentAndRefreshStatus(String uuid) { return agentInstances.findAgentAndRefreshStatus(uuid); } public AgentInstance findAgent(String uuid) { return agentInstances.findAgent(uuid); } public void clearAll() { agentInstances.clearAll(); } /** * called from spring timer */ public void refresh() { agentInstances.refresh(); } public void building(String uuid, AgentBuildingInfo agentBuildingInfo) { agentInstances.building(uuid, agentBuildingInfo); } public String assignCookie(AgentIdentifier identifier) { String cookie = uuidGenerator.randomUuid(); agentDao.associateCookie(identifier, cookie); return cookie; } public Agent findAgentByUUID(String uuid) { if (isNullOrEmpty(uuid)) { return null; } AgentInstance agentInstance = agentInstances.findAgent(uuid); Agent agent; if (agentInstance != null && !agentInstance.isNullAgent()) { agent = agentInstance.getAgent(); } else { agent = agentDao.fetchAgentFromDBByUUIDIncludingDeleted(uuid); } return agent; } public AgentsViewModel filterAgentsViewModel(List<String> uuids) { return agentInstances.filter(uuids).stream().map(AgentViewModel::new).collect(toCollection(AgentsViewModel::new)); } public AgentViewModel findAgentViewModel(String uuid) { return toAgentViewModel(findAgentAndRefreshStatus(uuid)); } public LinkedMultiValueMap<String, ElasticAgentMetadata> allElasticAgents() { return agentInstances.getAllElasticAgentsGroupedByPluginId(); } public AgentInstance findElasticAgent(String elasticAgentId, String elasticPluginId) { return agentInstances.findElasticAgent(elasticAgentId, elasticPluginId); } public void register(Agent agent) { generateAndAddCookie(agent); saveOrUpdate(agent); } public boolean isRegistered(String uuid) { AgentInstance agentInstance = agentInstances.findAgent(uuid); return !agentInstance.isNullAgent() && agentInstance.isRegistered(); } public Agent getAgentByUUID(String uuid) { return agentInstances.findAgent(uuid).getAgent(); } public List<String> getAllRegisteredAgentUUIDs() { return stream(agentInstances.spliterator(), false) .filter(AgentInstance::isRegistered) .map(AgentInstance::getUuid) .collect(toList()); } public void disableAgents(List<String> uuids) { if (!isEmpty(uuids)) { agentDao.disableAgents(uuids); } } public void disableAgents(String... uuids) { if (uuids != null) { agentDao.disableAgents(asList(uuids)); } } public void saveOrUpdate(Agent agent) { agent.validate(); if (!agent.hasErrors()) { agentDao.saveOrUpdate(agent); } } public List<String> getListOfResourcesAcrossAgents() { return agents().stream() .map(Agent::getResourcesAsList) .flatMap(Collection::stream) .distinct() .collect(toList()); } @Override public void bulkEntitiesChanged(List<Agent> agents) { agents.forEach(this::entityChanged); } @Override public void bulkEntitiesDeleted(List<String> deletedUuids) { deletedUuids.forEach(this::entityDeleted); } @Override public void entityChanged(Agent agentAfterUpdate) { AgentInstance agentInstanceBeforeUpdate = agentInstances.findAgent(agentAfterUpdate.getUuid()); if (agentInstanceBeforeUpdate instanceof NullAgentInstance) { createNewAgentInstanceAndAddToCache(agentAfterUpdate); } else { notifyAgentChangeListenersAndSyncAgentFromUpdatedAgent(agentAfterUpdate, agentInstanceBeforeUpdate); } } private void notifyAgentChangeListenersAndSyncAgentFromUpdatedAgent(Agent agentAfterUpdate, AgentInstance agentInstanceBeforeUpdate) { notifyAgentChangeListeners(agentAfterUpdate); agentInstanceBeforeUpdate.syncAgentFrom(agentAfterUpdate); } private void createNewAgentInstanceAndAddToCache(Agent agentAfterUpdate) { AgentInstance agentInstanceBeforeUpdate = createFromAgent(agentAfterUpdate, new SystemEnvironment(), agentStatusChangeNotifier); this.agentInstances.add(agentInstanceBeforeUpdate); } void registerAgentChangeListeners(AgentChangeListener listener) { if (listener != null) { this.listeners.add(listener); } } void entityDeleted(String uuid) { notifyAgentDeleteListeners(uuid); this.agentInstances.removeAgent(uuid); } private void syncAgentInstanceCacheFromAgentsInDB() { Agents allAgentsFromDB = new Agents(agentDao.getAllAgents()); agentInstances.syncAgentInstancesFrom(allAgentsFromDB); } private void setResourcesEnvsAndState(Agent agent, List<String> resourcesToAdd, List<String> resourcesToRemove, List<String> envsToAdd, List<String> envsToRemove, TriState state, EnvironmentConfigService environmentConfigService) { addRemoveEnvsAndResources(agent, envsToAdd, envsToRemove, resourcesToAdd, resourcesToRemove, environmentConfigService); enableOrDisableAgent(agent, state); } private void generateAndAddCookie(Agent agent) { String cookie = uuidGenerator.randomUuid(); agent.setCookie(cookie); } private void bombIfAgentHasErrors(Agent agent) { if (agent.hasErrors()) { List<ConfigErrors> errors = agent.errorsAsList(); throw new GoConfigInvalidException(null, new AllConfigErrors(errors)); } } private Agent getPendingAgentOrFromDB(AgentInstance agentInstance) { if (agentInstance.isPending()) { Agent agent = new Agent(agentInstance.getAgent()); generateAndAddCookie(agent); return agent; } return agentDao.fetchAgentFromDBByUUID(agentInstance.getUuid()); } private void setAgentAttributes(String newHostname, String resources, String environments, TriState state, Agent agent) { if (state.isTrue()) { agent.enable(); } if (state.isFalse()) { agent.disable(); } if (newHostname != null) { agent.setHostname(newHostname); } if (resources != null) { agent.setResources(resources); } if (environments != null) { agent.setEnvironments(environments); } } private void notifyAgentChangeListeners(Agent agentAfterUpdate) { listeners.forEach(listener -> listener.agentChanged(agentAfterUpdate)); } private void notifyAgentDeleteListeners(String uuid) { listeners.forEach(listener -> listener.agentDeleted(agentInstances.findAgent(uuid).getAgent())); } private static Collection<String> getSortedEnvironmentList(AgentInstance agentInstance) { return agentInstance.getAgent().getEnvironmentsAsList().stream().sorted().collect(toList()); } private void bombIfAgentHasDuplicateCookie(AgentRuntimeInfo agentRuntimeInfo) { if (agentRuntimeInfo.hasDuplicateCookie(agentDao.cookieFor(agentRuntimeInfo.getIdentifier()))) { LOGGER.warn("Found agent [{}] with duplicate uuid. Please check the agent installation.", agentRuntimeInfo.agentInfoDebugString()); serverHealthService.update( ServerHealthState.warning(format("[%s] has duplicate unique identifier which conflicts with [%s]", agentRuntimeInfo.agentInfoForDisplay(), findAgentAndRefreshStatus(agentRuntimeInfo.getUUId()).agentInfoForDisplay()), "Please check the agent installation. Click <a href='" + docsUrl("/faq/agent_guid_issue.html") + "' target='_blank'>here</a> for more info.", HealthStateType.duplicateAgent(HealthStateScope.forAgent(agentRuntimeInfo.getCookie())), Timeout.THIRTY_SECONDS)); throw new AgentWithDuplicateUUIDException(format("Agent [%s] has invalid cookie", agentRuntimeInfo.agentInfoDebugString())); } } private void bombIfAgentDoesNotHaveCookie(AgentRuntimeInfo agentRuntimeInfo) { if (!agentRuntimeInfo.hasCookie()) { LOGGER.warn("Agent [{}] has no cookie set", agentRuntimeInfo.agentInfoDebugString()); throw new AgentNoCookieSetException(format("Agent [%s] has no cookie set", agentRuntimeInfo.agentInfoDebugString())); } } private List<String> getUUIDsToAddEnvTo(List<String> uuidsToAssociateWithEnv, List<String> uuidsAssociatedWithEnv) { return uuidsToAssociateWithEnv.stream() .filter(uuid -> !uuidsAssociatedWithEnv.contains(uuid)) .collect(toList()); } private List<String> getUUIDsToRemoveEnvFrom(List<String> uuidsToAssociateWithEnv, List<String> uuidsAssociatedWithEnv) { return uuidsAssociatedWithEnv.stream() .filter(uuid -> !uuidsToAssociateWithEnv.contains(uuid)) .collect(toList()); } private List<Agent> getAgentsFromDBToAddEnvTo(List<String> UUIDs, List<String> associatedUUIDs, String env) { List<String> UUIDsToAddEnvTo = getUUIDsToAddEnvTo(UUIDs, associatedUUIDs); return UUIDsToAddEnvTo.stream() .map(uuid -> getAgentFromDBAfterAddingEnvToExistingEnvs(env, uuid)) .collect(toList()); } private Agent getAgentFromDBAfterAddingEnvToExistingEnvs(String env, String uuid) { Agent agent = agentDao.getAgentByUUIDFromCacheOrDB(uuid); String envsToSet = append(agent.getEnvironments(), singletonList(env)); agent.setEnvironments(envsToSet); return agent; } private List<Agent> getAgentsFromDBToRemoveEnvFrom(List<String> UUIDs, List<String> associatedUUIDs, String env) { List<String> uuidsToRemoveEnvFrom = getUUIDsToRemoveEnvFrom(UUIDs, associatedUUIDs); return uuidsToRemoveEnvFrom.stream() .map(uuid -> getAgentFromDBAfterRemovingEnvFromExistingEnvs(env, uuid)) .collect(toList()); } private Agent getAgentFromDBAfterRemovingEnvFromExistingEnvs(String env, String uuid) { Agent agent = agentDao.getAgentByUUIDFromCacheOrDB(uuid); String envsToSet = remove(agent.getEnvironments(), singletonList(env)); agent.setEnvironments(envsToSet); return agent; } private List<String> toUUIDList(EnvironmentAgentsConfig asssociatedAgents) { List<String> associatedUUIDs = emptyList(); if (!isEmpty(asssociatedAgents)) { associatedUUIDs = asssociatedAgents.stream().map(EnvironmentAgentConfig::getUuid).collect(toList()); } return associatedUUIDs; } private void enableOrDisableAgent(Agent agent, TriState triState) { if (triState.isTrue()) { agent.setDisabled(false); } else if (triState.isFalse()) { agent.setDisabled(true); } } private void addOnlyThoseEnvsThatAreNotAssociatedWithAgentFromConfigRepo(List<String> envsToAdd, Agent agent, EnvironmentConfigService environmentConfigService) { if (envsToAdd != null) { String uuid = agent.getUuid(); envsToAdd.forEach(envName -> { EnvironmentConfig env = environmentConfigService.find(envName); if (env != null && env.containsAgentRemotely(uuid)) { LOGGER.info(format("Not adding Agent [%s] to Environment [%s] as it is already associated from a Config Repo", uuid, envName)); } else { agent.addEnvironment(envName); } }); } } private void addRemoveEnvsAndResources(Agent agent, List<String> envsToAdd, List<String> envsToRemove, List<String> resourcesToAdd, List<String> resourcesToRemove, EnvironmentConfigService environmentConfigService) { addOnlyThoseEnvsThatAreNotAssociatedWithAgentFromConfigRepo(envsToAdd, agent, environmentConfigService); agent.removeEnvironments(envsToRemove); agent.addResources(resourcesToAdd); agent.removeResources(resourcesToRemove); } private boolean validateThatAllAgentsExistAndCanBeDeleted(List<String> uuids) { if (isEmpty(uuids)) { return true; } return uuids.stream().allMatch(uuid -> validateThatAgentExistAndCanBeDeleted(uuid, uuids.size())); } private boolean validateThatAgentExists(AgentInstance agentInstance) { if (agentInstance.isNullAgent()) { throw new RecordNotFoundException(EntityType.Agent, agentInstance.getUuid()); } return true; } private boolean validateThatAgentExistAndCanBeDeleted(String uuid, int totalAgentsToValidate) { AgentInstance agentInstance = findAgentAndRefreshStatus(uuid); return validateThatAgentExists(agentInstance) && validateThatAgentCanBeDeleted(agentInstance, totalAgentsToValidate); } private boolean validateThatAgentCanBeDeleted(AgentInstance agentInstance, int totalAgentToValidate) { if (!agentInstance.canBeDeleted()) { throw new UnprocessableEntityException(getFailedToDeleteMessage(totalAgentToValidate)); } return true; } private AgentsViewModel toAgentViewModels(AgentInstances agentInstances) { return stream(agentInstances.spliterator(), false) .map(this::toAgentViewModel) .collect(toCollection(AgentsViewModel::new)); } private AgentViewModel toAgentViewModel(AgentInstance instance) { return new AgentViewModel(instance, instance.getAgent().getEnvironmentsAsList()); } private String getFailedToDeleteMessage(int numOfAgents) { if (numOfAgents == 1) { return "Failed to delete an agent, as it is not in a disabled state or is still building."; } else { return "Could not delete any agents, as one or more agents might not be disabled or are still building."; } } boolean validateAnyOperationPerformedOnAgent(String hostname, String environments, String resources, TriState state) { boolean anyOperationPerformed = (resources != null || environments != null || hostname != null || isTriStateSet(state)); if (!anyOperationPerformed) { throw new BadRequestException("Bad Request. No operation is specified in the request to be performed on agent."); } return true; } private boolean isTriStateSet(TriState state) { return state.isTrue() || state.isFalse(); } boolean isAnyOperationPerformedOnBulkAgents(List<String> resourcesToAdd, List<String> resourcesToRemove, List<String> envsToAdd, List<String> envsToRemove, TriState state) { boolean anyOperationPerformed = isNotEmpty(resourcesToAdd) || isNotEmpty(resourcesToRemove) || isNotEmpty(envsToAdd) || isNotEmpty(envsToRemove) || isTriStateSet(state); if (!anyOperationPerformed) { throw new BadRequestException("Bad Request. No operation is specified in the request to be performed on agents."); } return true; } private List<Agent> getAgentsToAddEnvToOrRemoveEnvFrom(EnvironmentConfig envConfig, List<String> uuids) { List<String> associatedUUIDs = toUUIDList(envConfig.getAgents()); String envName = envConfig.name().toString(); List<Agent> removeEnvFromAgents = getAgentsFromDBToRemoveEnvFrom(uuids, associatedUUIDs, envName); List<Agent> addEnvToAgents = getAgentsFromDBToAddEnvTo(uuids, associatedUUIDs, envName); return union(removeEnvFromAgents, addEnvToAgents); } private List<Agent> getAgentsToAddEnvToOrRemoveEnvFrom(String envName, List<String> agentUUIDsToAssociate, List<String> agentUUIDsToRemove) { List<Agent> removeEnvFromAgents = getAgentsFromDBToRemoveEnvFrom(Collections.emptyList(), agentUUIDsToRemove, envName); List<Agent> addEnvToAgents = getAgentsFromDBToAddEnvTo(agentUUIDsToAssociate, Collections.emptyList(), envName); return union(removeEnvFromAgents, addEnvToAgents); } void updateIdsAndGenerateCookiesForPendingAgents(List<Agent> agents, TriState state) { if (isTriStateSet(state)) { agents.stream() .filter(agent -> findAgent(agent.getUuid()).getStatus().getConfigStatus() == Pending) .forEach(this::updateIdAndGenerateCookieForPendingAgent); } } private void updateIdAndGenerateCookieForPendingAgent(Agent pendingAgent) { agentDao.updateAgentIdFromDBIfAgentDoesNotHaveAnIdAndAgentExistInDB(pendingAgent); generateAndAddCookie(pendingAgent); } }
package com.google.android.apps.common.testing.ui.espresso.matcher; import static com.jameswald.skinnylatte.common.base.Preconditions.checkNotNull; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.anyOf; import com.google.android.apps.common.testing.testrunner.ActivityLifecycleMonitor; import com.google.android.apps.common.testing.testrunner.ActivityLifecycleMonitorRegistry; import com.google.android.apps.common.testing.testrunner.Stage; import com.google.android.apps.common.testing.ui.espresso.NoActivityResumedException; import com.google.android.apps.common.testing.ui.espresso.Root; import com.jameswald.skinnylatte.common.collect.Lists; import android.app.Activity; import android.os.IBinder; import android.view.View; import android.view.WindowManager; import org.hamcrest.Description; import org.hamcrest.Matcher; import org.hamcrest.TypeSafeMatcher; import java.util.Collection; import java.util.List; /** * A collection of matchers for {@link Root} objects. */ public final class RootMatchers { private RootMatchers() {} /** * Espresso's default {@link Root} matcher. */ @SuppressWarnings("unchecked") public static final Matcher<Root> DEFAULT = allOf( hasWindowLayoutParams(), allOf( anyOf( allOf(isDialog(), withDecorView(hasWindowFocus())), isSubwindowOfCurrentActivity()), isFocusable())); /** * Matches {@link Root}s that can take window focus. */ public static Matcher<Root> isFocusable() { return new TypeSafeMatcher<Root>() { @Override public void describeTo(Description description) { description.appendText("is focusable"); } @Override public boolean matchesSafely(Root root) { int flags = root.getWindowLayoutParams().get().flags; boolean r = !((flags & WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE) != 0); return r; } }; } /** * Matches {@link Root}s that can receive touch events. */ public static Matcher<Root> isTouchable() { return new TypeSafeMatcher<Root>() { @Override public void describeTo(Description description) { description.appendText("is touchable"); } @Override public boolean matchesSafely(Root root) { int flags = root.getWindowLayoutParams().get().flags; boolean r = !((flags & WindowManager.LayoutParams.FLAG_NOT_TOUCHABLE) != 0); return r; } }; } /** * Matches {@link Root}s that are dialogs (i.e. is not a window of the currently resumed * activity). */ public static Matcher<Root> isDialog() { return new TypeSafeMatcher<Root>() { @Override public void describeTo(Description description) { description.appendText("is dialog"); } @Override public boolean matchesSafely(Root root) { int type = root.getWindowLayoutParams().get().type; if ((type != WindowManager.LayoutParams.TYPE_BASE_APPLICATION && type < WindowManager.LayoutParams.LAST_APPLICATION_WINDOW)) { IBinder windowToken = root.getDecorView().getWindowToken(); IBinder appToken = root.getDecorView().getApplicationWindowToken(); if (windowToken == appToken) { // windowToken == appToken means this window isn't contained by any other windows. // if it was a window for an activity, it would have TYPE_BASE_APPLICATION. // therefore it must be a dialog box. return true; } } return false; } }; } /** * Matches {@link Root}s with decor views that match the given view matcher. */ public static Matcher<Root> withDecorView(final Matcher<View> decorViewMatcher) { checkNotNull(decorViewMatcher); return new TypeSafeMatcher<Root>() { @Override public void describeTo(Description description) { description.appendText("with decor view "); decorViewMatcher.describeTo(description); } @Override public boolean matchesSafely(Root root) { return decorViewMatcher.matches(root.getDecorView()); } }; } private static Matcher<View> hasWindowFocus() { return new TypeSafeMatcher<View>() { @Override public void describeTo(Description description) { description.appendText("has window focus"); } @Override public boolean matchesSafely(View view) { return view.hasWindowFocus(); } }; } private static Matcher<Root> hasWindowLayoutParams() { return new TypeSafeMatcher<Root>() { @Override public void describeTo(Description description) { description.appendText("has window layout params"); } @Override public boolean matchesSafely(Root root) { if (!root.getWindowLayoutParams().isPresent()) { return false; } return true; } }; } private static Matcher<Root> isSubwindowOfCurrentActivity() { return new TypeSafeMatcher<Root>() { @Override public void describeTo(Description description) { description.appendText("is subwindow of current activity"); } @Override public boolean matchesSafely(Root root) { boolean r = getResumedActivityTokens().contains(root.getDecorView().getApplicationWindowToken()); return r; } }; } private static List<IBinder> getResumedActivityTokens() { ActivityLifecycleMonitor activityLifecycleMonitor = ActivityLifecycleMonitorRegistry.getInstance(); Collection<Activity> resumedActivities = activityLifecycleMonitor.getActivitiesInStage(Stage.RESUMED); if (resumedActivities.isEmpty()) { throw new NoActivityResumedException("At least one activity should be in RESUMED stage."); } List<IBinder> tokens = Lists.newArrayList(); for (Activity activity : resumedActivities) { tokens.add(activity.getWindow().getDecorView().getApplicationWindowToken()); } return tokens; } }
package com.communote.server.persistence.tag; import com.communote.server.model.tag.GlobalTagCategoryConstants; /** * <p> * Base Spring DAO Class: is able to create, update, remove, load, and find objects of type * <code>com.communote.server.persistence.tag.GlobalTagCategory</code>. * </p> * * @see com.communote.server.model.tag.GlobalTagCategory * @author Communote GmbH - <a href="http://www.communote.com/">http://www.communote.com/</a> */ public abstract class GlobalTagCategoryDaoBase extends org.springframework.orm.hibernate3.support.HibernateDaoSupport implements com.communote.server.persistence.tag.GlobalTagCategoryDao { /** * This anonymous transformer is designed to transform entities or report query results (which * result in an array of objects) to * {@link com.communote.server.core.vo.tag.GlobalTagCategoryVO} using the Jakarta * Commons-Collections Transformation API. */ private final org.apache.commons.collections.Transformer GLOBALTAGCATEGORYVO_TRANSFORMER = new org.apache.commons.collections.Transformer() { @Override public Object transform(Object input) { Object result = null; if (input instanceof com.communote.server.model.tag.GlobalTagCategory) { result = toGlobalTagCategoryVO((com.communote.server.model.tag.GlobalTagCategory) input); } else if (input instanceof Object[]) { result = toGlobalTagCategoryVO((Object[]) input); } return result; } }; private final org.apache.commons.collections.Transformer GlobalTagCategoryVOToEntityTransformer = new org.apache.commons.collections.Transformer() { @Override public Object transform(Object input) { return globalTagCategoryVOToEntity((com.communote.server.core.vo.tag.GlobalTagCategoryVO) input); } }; /** * @see com.communote.server.persistence.tag.GlobalTagCategoryDao#create(com.communote.server.model.tag.GlobalTagCategory) */ @Override public com.communote.server.model.tag.GlobalTagCategory create( com.communote.server.model.tag.GlobalTagCategory globalTagCategory) { return (com.communote.server.model.tag.GlobalTagCategory) this.create(TRANSFORM_NONE, globalTagCategory); } /** * @see com.communote.server.persistence.tag.GlobalTagCategoryDao#create(int transform, * com.communote.server.persistence.tag.GlobalTagCategory) */ @Override public Object create(final int transform, final com.communote.server.model.tag.GlobalTagCategory globalTagCategory) { if (globalTagCategory == null) { throw new IllegalArgumentException( "GlobalTagCategory.create - 'globalTagCategory' can not be null"); } this.getHibernateTemplate().save(globalTagCategory); return this.transformEntity(transform, globalTagCategory); } /** * @see com.communote.server.persistence.tag.GlobalTagCategoryDao#create(int, * java.util.Collection<com.communote.server.persistence.tag.GlobalTagCategory>) */ @Override public java.util.Collection<com.communote.server.model.tag.GlobalTagCategory> create( final int transform, final java.util.Collection<com.communote.server.model.tag.GlobalTagCategory> entities) { if (entities == null) { throw new IllegalArgumentException( "GlobalTagCategory.create - 'entities' can not be null"); } this.getHibernateTemplate() .executeWithNativeSession( new org.springframework.orm.hibernate3.HibernateCallback<com.communote.server.model.tag.GlobalTagCategory>() { @Override public com.communote.server.model.tag.GlobalTagCategory doInHibernate( org.hibernate.Session session) throws org.hibernate.HibernateException { for (java.util.Iterator<com.communote.server.model.tag.GlobalTagCategory> entityIterator = entities .iterator(); entityIterator.hasNext();) { create(transform, entityIterator.next()); } return null; } }); return entities; } /** * @see * com.communote.server.persistence.tag.GlobalTagCategoryDao#create(java.util.Collection<de. * communardo.kenmei.core.api.bo.tag.GlobalTagCategory>) */ @Override @SuppressWarnings({ "unchecked" }) public java.util.Collection<com.communote.server.model.tag.GlobalTagCategory> create( final java.util.Collection<com.communote.server.model.tag.GlobalTagCategory> entities) { return create(TRANSFORM_NONE, entities); } /** * {@inheritDoc} */ @Override public void evict(com.communote.server.model.tag.GlobalTagCategory entity) { this.getHibernateTemplate().evict(entity); } /** * @see com.communote.server.persistence.tag.GlobalTagCategoryDao#findByName(int, String) */ @Override public Object findByName(final int transform, final String name) { return this.findByName(transform, "from " + GlobalTagCategoryConstants.CLASS_NAME + " as globalTagCategory where globalTagCategory.name = :name", name); } /** * @see com.communote.server.persistence.tag.GlobalTagCategoryDao#findByName(int, String, * String) */ @Override @SuppressWarnings("unchecked") public Object findByName(final int transform, final String queryString, final String name) { try { org.hibernate.Query queryObject = super.getSession(false).createQuery(queryString); queryObject.setParameter("name", name); java.util.Set results = new java.util.LinkedHashSet(queryObject.list()); Object result = null; if (results != null) { if (results.size() > 1) { throw new org.springframework.dao.InvalidDataAccessResourceUsageException( "More than one instance of 'GlobalTagCategory" + "' was found when executing query --> '" + queryString + "'"); } else if (results.size() == 1) { result = results.iterator().next(); } } result = transformEntity(transform, (com.communote.server.model.tag.GlobalTagCategory) result); return result; } catch (org.hibernate.HibernateException ex) { throw super.convertHibernateAccessException(ex); } } /** * @see com.communote.server.persistence.tag.GlobalTagCategoryDao#findByName(String) */ @Override public com.communote.server.model.tag.GlobalTagCategory findByName(String name) { return (com.communote.server.model.tag.GlobalTagCategory) this.findByName(TRANSFORM_NONE, name); } /** * @see com.communote.server.persistence.tag.GlobalTagCategoryDao#findByName(String, String) */ @Override public com.communote.server.model.tag.GlobalTagCategory findByName(final String queryString, final String name) { return (com.communote.server.model.tag.GlobalTagCategory) this.findByName(TRANSFORM_NONE, queryString, name); } /** * @see com.communote.server.persistence.tag.GlobalTagCategoryDao#findByPrefix(int, String) */ @Override public Object findByPrefix(final int transform, final String prefix) { return this.findByPrefix(transform, "from " + GlobalTagCategoryConstants.CLASS_NAME + " as globalTagCategory where globalTagCategory.prefix = :prefix", prefix); } /** * @see com.communote.server.persistence.tag.GlobalTagCategoryDao#findByPrefix(int, String, * String) */ @Override @SuppressWarnings("unchecked") public Object findByPrefix(final int transform, final String queryString, final String prefix) { try { org.hibernate.Query queryObject = super.getSession(false).createQuery(queryString); queryObject.setParameter("prefix", prefix); java.util.Set results = new java.util.LinkedHashSet(queryObject.list()); Object result = null; if (results != null) { if (results.size() > 1) { throw new org.springframework.dao.InvalidDataAccessResourceUsageException( "More than one instance of 'GlobalTagCategory" + "' was found when executing query --> '" + queryString + "'"); } else if (results.size() == 1) { result = results.iterator().next(); } } result = transformEntity(transform, (com.communote.server.model.tag.GlobalTagCategory) result); return result; } catch (org.hibernate.HibernateException ex) { throw super.convertHibernateAccessException(ex); } } /** * @see com.communote.server.persistence.tag.GlobalTagCategoryDao#findByPrefix(String) */ @Override public com.communote.server.model.tag.GlobalTagCategory findByPrefix(String prefix) { return (com.communote.server.model.tag.GlobalTagCategory) this.findByPrefix(TRANSFORM_NONE, prefix); } /** * @see com.communote.server.persistence.tag.GlobalTagCategoryDao#findByPrefix(String, String) */ @Override public com.communote.server.model.tag.GlobalTagCategory findByPrefix(final String queryString, final String prefix) { return (com.communote.server.model.tag.GlobalTagCategory) this.findByPrefix(TRANSFORM_NONE, queryString, prefix); } /** * @see com.communote.server.persistence.tag.GlobalTagCategoryDao#globalTagCategoryVOToEntity(com.communote.server.core.vo.tag.GlobalTagCategoryVO, * com.communote.server.model.tag.GlobalTagCategory) */ @Override public void globalTagCategoryVOToEntity( com.communote.server.core.vo.tag.GlobalTagCategoryVO source, com.communote.server.model.tag.GlobalTagCategory target, boolean copyIfNull) { if (copyIfNull || source.getName() != null) { target.setName(source.getName()); } if (copyIfNull || source.getPrefix() != null) { target.setPrefix(source.getPrefix()); } if (copyIfNull || source.getDescription() != null) { target.setDescription(source.getDescription()); } if (copyIfNull || source.isMultipleTags() != false) { target.setMultipleTags(source.isMultipleTags()); } } /** * @see com.communote.server.persistence.tag.GlobalTagCategoryDao#globalTagCategoryVOToEntityCollection(java.util.Collection) */ @Override public final void globalTagCategoryVOToEntityCollection(java.util.Collection instances) { if (instances != null) { for (final java.util.Iterator iterator = instances.iterator(); iterator.hasNext();) { // - remove an objects that are null or not of the correct instance if (!(iterator.next() instanceof com.communote.server.core.vo.tag.GlobalTagCategoryVO)) { iterator.remove(); } } org.apache.commons.collections.CollectionUtils.transform(instances, GlobalTagCategoryVOToEntityTransformer); } } /** * @see com.communote.server.persistence.tag.GlobalTagCategoryDao#load(int, Long) */ @Override public Object load(final int transform, final Long id) { if (id == null) { throw new IllegalArgumentException("GlobalTagCategory.load - 'id' can not be null"); } final Object entity = this.getHibernateTemplate().get( com.communote.server.model.tag.GlobalTagCategoryImpl.class, id); return transformEntity(transform, (com.communote.server.model.tag.GlobalTagCategory) entity); } /** * @see com.communote.server.persistence.tag.GlobalTagCategoryDao#load(Long) */ @Override public com.communote.server.model.tag.GlobalTagCategory load(Long id) { return (com.communote.server.model.tag.GlobalTagCategory) this.load(TRANSFORM_NONE, id); } /** * @see com.communote.server.persistence.tag.GlobalTagCategoryDao#loadAll() */ @Override @SuppressWarnings({ "unchecked" }) public java.util.Collection<com.communote.server.model.tag.GlobalTagCategory> loadAll() { return (java.util.Collection<com.communote.server.model.tag.GlobalTagCategory>) this .loadAll(TRANSFORM_NONE); } /** * @see com.communote.server.persistence.tag.GlobalTagCategoryDao#loadAll(int) */ @Override public java.util.Collection<?> loadAll(final int transform) { final java.util.Collection<?> results = this.getHibernateTemplate().loadAll( com.communote.server.model.tag.GlobalTagCategoryImpl.class); this.transformEntities(transform, results); return results; } /** * @see com.communote.server.persistence.tag.GlobalTagCategoryDao#remove(com.communote.server.model.tag.GlobalTagCategory) */ @Override public void remove(com.communote.server.model.tag.GlobalTagCategory globalTagCategory) { if (globalTagCategory == null) { throw new IllegalArgumentException( "GlobalTagCategory.remove - 'globalTagCategory' can not be null"); } this.getHibernateTemplate().delete(globalTagCategory); } /** * @see * com.communote.server.persistence.tag.GlobalTagCategoryDao#remove(java.util.Collection<de. * communardo.kenmei.core.api.bo.tag.GlobalTagCategory>) */ @Override public void remove( java.util.Collection<com.communote.server.model.tag.GlobalTagCategory> entities) { if (entities == null) { throw new IllegalArgumentException( "GlobalTagCategory.remove - 'entities' can not be null"); } this.getHibernateTemplate().deleteAll(entities); } /** * @see com.communote.server.persistence.tag.GlobalTagCategoryDao#remove(Long) */ @Override public void remove(Long id) { if (id == null) { throw new IllegalArgumentException("GlobalTagCategory.remove - 'id' can not be null"); } com.communote.server.model.tag.GlobalTagCategory entity = this.load(id); if (entity != null) { this.remove(entity); } } /** * @see com.communote.server.persistence.tag.GlobalTagCategoryDao#toGlobalTagCategoryVO(com.communote.server.model.tag.GlobalTagCategory) */ @Override public com.communote.server.core.vo.tag.GlobalTagCategoryVO toGlobalTagCategoryVO( final com.communote.server.model.tag.GlobalTagCategory entity) { final com.communote.server.core.vo.tag.GlobalTagCategoryVO target = new com.communote.server.core.vo.tag.GlobalTagCategoryVO(); this.toGlobalTagCategoryVO(entity, target); return target; } /** * @see com.communote.server.persistence.tag.GlobalTagCategoryDao#toGlobalTagCategoryVO(com.communote.server.model.tag.GlobalTagCategory, * com.communote.server.core.vo.tag.GlobalTagCategoryVO) */ @Override public void toGlobalTagCategoryVO(com.communote.server.model.tag.GlobalTagCategory source, com.communote.server.core.vo.tag.GlobalTagCategoryVO target) { target.setName(source.getName()); target.setPrefix(source.getPrefix()); target.setDescription(source.getDescription()); target.setMultipleTags(source.isMultipleTags()); } /** * Default implementation for transforming the results of a report query into a value object. * This implementation exists for convenience reasons only. It needs only be overridden in the * {@link GlobalTagCategoryDaoImpl} class if you intend to use reporting queries. * * @see com.communote.server.persistence.tag.GlobalTagCategoryDao#toGlobalTagCategoryVO(com.communote.server.model.tag.GlobalTagCategory) */ protected com.communote.server.core.vo.tag.GlobalTagCategoryVO toGlobalTagCategoryVO( Object[] row) { com.communote.server.core.vo.tag.GlobalTagCategoryVO target = null; if (row != null) { final int numberOfObjects = row.length; for (int ctr = 0; ctr < numberOfObjects; ctr++) { final Object object = row[ctr]; if (object instanceof com.communote.server.model.tag.GlobalTagCategory) { target = this .toGlobalTagCategoryVO((com.communote.server.model.tag.GlobalTagCategory) object); break; } } } return target; } /** * @see com.communote.server.persistence.tag.GlobalTagCategoryDao#toGlobalTagCategoryVOCollection(java.util.Collection) */ @Override public final void toGlobalTagCategoryVOCollection(java.util.Collection entities) { if (entities != null) { org.apache.commons.collections.CollectionUtils.transform(entities, GLOBALTAGCATEGORYVO_TRANSFORMER); } } /** * Transforms a collection of entities using the * {@link #transformEntity(int,com.communote.server.model.tag.GlobalTagCategory)} method. This * method does not instantiate a new collection. * <p/> * This method is to be used internally only. * * @param transform * one of the constants declared in * <code>com.communote.server.persistence.tag.GlobalTagCategoryDao</code> * @param entities * the collection of entities to transform * @see #transformEntity(int,com.communote.server.model.tag.GlobalTagCategory) */ protected void transformEntities(final int transform, final java.util.Collection<?> entities) { switch (transform) { case TRANSFORM_GLOBALTAGCATEGORYVO: toGlobalTagCategoryVOCollection(entities); break; case TRANSFORM_NONE: // fall-through default: // do nothing; } } /** * Allows transformation of entities into value objects (or something else for that matter), * when the <code>transform</code> flag is set to one of the constants defined in * <code>com.communote.server.persistence.tag.GlobalTagCategoryDao</code>, please note that the * {@link #TRANSFORM_NONE} constant denotes no transformation, so the entity itself will be * returned. * <p/> * This method will return instances of these types: * <ul> * <li>{@link com.communote.server.model.tag.GlobalTagCategory} - {@link #TRANSFORM_NONE}</li> * <li>{@link com.communote.server.core.vo.tag.GlobalTagCategoryVO} - * {@link TRANSFORM_GLOBALTAGCATEGORYVO}</li> * </ul> * * If the integer argument value is unknown {@link #TRANSFORM_NONE} is assumed. * * @param transform * one of the constants declared in * {@link com.communote.server.persistence.tag.GlobalTagCategoryDao} * @param entity * an entity that was found * @return the transformed entity (i.e. new value object, etc) * @see #transformEntities(int,java.util.Collection) */ protected Object transformEntity(final int transform, final com.communote.server.model.tag.GlobalTagCategory entity) { Object target = null; if (entity != null) { switch (transform) { case TRANSFORM_GLOBALTAGCATEGORYVO: target = toGlobalTagCategoryVO(entity); break; case TRANSFORM_NONE: // fall-through default: target = entity; } } return target; } /** * @see com.communote.server.persistence.tag.GlobalTagCategoryDao#update(com.communote.server.model.tag.GlobalTagCategory) */ @Override public void update(com.communote.server.model.tag.GlobalTagCategory globalTagCategory) { if (globalTagCategory == null) { throw new IllegalArgumentException( "GlobalTagCategory.update - 'globalTagCategory' can not be null"); } this.getHibernateTemplate().update(globalTagCategory); } /** * @see * com.communote.server.persistence.tag.GlobalTagCategoryDao#update(java.util.Collection<de. * communardo.kenmei.core.api.bo.tag.GlobalTagCategory>) */ @Override public void update( final java.util.Collection<com.communote.server.model.tag.GlobalTagCategory> entities) { if (entities == null) { throw new IllegalArgumentException( "GlobalTagCategory.update - 'entities' can not be null"); } this.getHibernateTemplate() .executeWithNativeSession( new org.springframework.orm.hibernate3.HibernateCallback<com.communote.server.model.tag.GlobalTagCategory>() { @Override public com.communote.server.model.tag.GlobalTagCategory doInHibernate( org.hibernate.Session session) throws org.hibernate.HibernateException { for (java.util.Iterator<com.communote.server.model.tag.GlobalTagCategory> entityIterator = entities .iterator(); entityIterator.hasNext();) { update(entityIterator.next()); } return null; } }); } }
package pt.promatik.moss; import java.net.Socket; import java.net.SocketException; import java.net.SocketTimeoutException; import java.nio.charset.StandardCharsets; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.Observable; import pt.promatik.moss.utils.Utils; import pt.promatik.moss.vo.UserVO; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; public class User extends Observable { public static final int PIPE = 124; public static final int CARDINAL = 35; public static final String MSG_DELIMITER = "&!"; public static final String MSG_USER_DELIMITER = "&;"; public static final int GET_USERS_FILTER_ALL = 0; public static final int GET_USERS_FILTER_ONLINE = 1; public static final int GET_USERS_FILTER_OFFLINE = 2; private static Pattern patternMessage = Pattern.compile("^#MOSS#<!(.+)!>#<!(.+)?!>#<!(.+)?!>#$"); private static Pattern patternPingPong = Pattern.compile("p[i|o]ng"); // Commands private static final String CONNECT = "_connect"; private static final String DISCONNECT = "_disconnect"; private static final String UPDATE_STATUS = "_updateStatus"; private static final String UPDATE_AVAILABILITY = "_updateAvailability"; private static final String GET_USER = "_getUser"; private static final String GET_USERS = "_getUsers"; private static final String GET_USERS_COUNT = "_getUsersCount"; private static final String SET_DATA = "_setData"; private static final String GET_ROOMS = "_getRooms"; private static final String UPDATE_ROOM = "_updateRoom"; private static final String RANDOM_PLAYER = "_randomPlayer"; private static final String INVOKE = "_invoke"; private static final String INVOKE_ON_ROOM = "_invokeOnRoom"; private static final String INVOKE_ON_ALL = "_invokeOnAll"; private static final String SET_TIME_OUT = "_setTimeOut"; private static final String LOG = "_log"; private static final String PING = "_ping"; private static final String PONG = "_pong"; // Response messages private static final String OK = "ok"; private static final String ERROR = "error"; private static final String AUTH_ERROR = "auth_error"; private static final String AUTH_REQUIRED = "AUTH_REQUIRED"; private static final String WAITING = "waiting"; private static final String DOUBLE_LOGIN = "doublelogin"; private static final String ALREADY_LOGIN = "alreadyLogin"; public static final String ON = "on"; public static final String OFF = "off"; public static final String AVAILABLE = "1"; public static final String UNAVAILABLE = "0"; private Moss MOSS; private Socket socket = null; private InputStream in; private OutputStream out; protected String id = null; protected String room = ""; protected String status = ""; private boolean connected = false; private boolean available = true; private boolean waiting = false; private HashMap<String, Object> data = new HashMap<String, Object>(); private boolean validConn = false; private boolean encodeMessages = false; private Matcher match; // Getters public String id(){ return id; } public String room(){ return room; } public String status(){ return status; } public boolean isWaiting(){ return waiting; } public boolean isAvailable(){ return available; } public boolean isConnected(){ return connected; } public HashMap<String, Object> data(){ return data; } public Object privateData; public User(Moss instance, Socket newSocket, boolean waiting_status) { MOSS = instance; socket = newSocket; waiting = waiting_status; encodeMessages = MOSS.charset_in != StandardCharsets.UTF_8; start(); } public User(Moss instance, Socket newSocket) { this(instance, newSocket, false); } public User() { } public User(String id) { this.id = id; } public User(String id, String room, String status) { this.id = id; this.room = room; this.status = status; } public void start(Moss instance, Socket newSocket) { MOSS = instance; socket = newSocket; start(); } public void start() { if(socket != null) { connected = true; new Inport().start(); } else { Utils.error("Socket isn't defined!"); } } public String toString() { String[] user = {id, room, status, (connected ? ON : OFF), (available ? AVAILABLE : UNAVAILABLE), Utils.JSONStringify(data)}; return String.join(MSG_USER_DELIMITER, user); } public UserVO getVO() { return new UserVO(id, room); } public boolean invoke(String command) { return invoke(null, command, "", ""); } public boolean invoke(User from, String command) { return invoke(from, command, "", ""); } public boolean invoke(String command, String message) { return invoke(null, command, message, ""); } public boolean invoke(String command, String message, String request) { return invoke(null, command, message, request); } public boolean invoke(User from, String command, String message) { return invoke(from, command, message, ""); } public boolean invoke(User from, String command, String message, String request) { logMessage(command, message, ">"); return sendMessage("#MOSS#<!" + command + "!>#<!" + (from != null ? from.toString() : "") + "!>#<!" + message + "!>#<!" + request + "!>#|"); } private boolean sendMessage(String message) { boolean sent = false; try { if(connected) { out.write(message.getBytes(MOSS.charset_out)); sent = true; } } catch (IOException e) { Utils.error(id + ", connection IOException"); disconnect(); } return sent; } // GETS & SETS public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } public Boolean getAvailability() { return available; } public void setAvailability(Boolean available) { this.available = available; } public void setAvailability(String available) { this.available = available.equals(AVAILABLE); } private class Inport extends Thread { public void run() { try { in = socket.getInputStream(); out = socket.getOutputStream(); } catch(IOException e) { Utils.log(e); return; } try { String result = ""; int k = in.read(); while( connected ) { result += (char) k; if(k == PIPE) { if(encodeMessages) result = new String(result.getBytes(MOSS.charset_in), StandardCharsets.UTF_8); validConn = true; if(result.charAt(0) == CARDINAL) // Easily pre-validates moss message processMessage(result); result = ""; } // Flash privacy policy if(!validConn && result.equals("<policy-file-request/>")) { sendMessage("<?xml version=\"1.0\"?><cross-domain-policy><allow-access-from domain=\"*\" to-ports=\"" + MOSS.server_port + "\" /></cross-domain-policy>\0"); } if((k = in.read()) < 0) break; } } catch (SocketTimeoutException e) { if(id != null) Utils.error(id + ", connection reset SocketTimeoutException"); } catch (SocketException e) { Utils.error(id + ", connection reset SocketException"); } catch (Exception e) { Utils.log(id + ", connection reset Exception: " + e.toString(), e); } finally { disconnect(); } } } private void logMessage(String command, String message, String separator) { if(Utils.log_level >= Utils.LOG_FULL) { match = patternPingPong.matcher(command + message); if (!match.find()) Utils.log(this.id + separator + " " + command.replaceFirst("^_", "") + "(" + message.replaceAll(MSG_DELIMITER, ", ") + ")"); } } public void processMessage(String msg) { // Protocol: #MOSS#<!(command)!>#<!(messages)!>#<!(request)!>#| // #MOSS#<!_connect !>#<!(id)&!(room)&!(status)?&!(login)?&!(password)? !>#<!(request)!>#| // #MOSS#<!_disconnect !>#<! !>#<!(request)!>#| // #MOSS#<!_updateStatus !>#<!(status) !>#<!(request)!>#| // #MOSS#<!_updateAvailability !>#<!(available) !>#<!(request)!>#| // #MOSS#<!_getUser !>#<!(id)&!(room) !>#<!(request)!>#| // #MOSS#<!_getRooms !>#<! !>#<!(request)!>#| // #MOSS#<!_updateRoom !>#<!(room) !>#<!(request)!>#| // #MOSS#<!_getUsers !>#<!(room)&!(limit)?&!(page)?&!(available)?&!(search)? !>#<!(request)!>#| // #MOSS#<!_getUsersCount !>#<!(room) !>#<!(request)!>#| // #MOSS#<!_invoke !>#<!(id)&!(room)&!(command)&!(message) !>#<!(request)!>#| // #MOSS#<!_invokeOnRoom !>#<!(room)&!(command)&!(message) !>#<!(request)!>#| // #MOSS#<!_invokeOnAll !>#<!(command)&!(message) !>#<!(request)!>#| // #MOSS#<!_setTimeOut !>#<!(milliseconds) !>#<!(request)!>#| // #MOSS#<!_randomPlayer !>#<!(room) !>#<!(request)!>#| // #MOSS#<!_setData !>#<!(attribute)&!(data) !>#<!(request)!>#| // #MOSS#<!_log !>#<!(data) !>#<!(request)!>#| msg = msg.replaceAll("\\|", ""); match = patternMessage.matcher(msg); if (match.matches()) { String command = match.group(1) + ""; String message = match.group(2) + ""; String request = match.group(3) + ""; String[] messages = null; if(!message.equals("")) messages = message.split(MSG_DELIMITER); if(!command.equals(CONNECT) && id == null) return; String result = ""; boolean opStatus = false; boolean first = true; // Log logMessage(command, message, "<"); // Reserved commands start with underscore if(command.charAt(0) == '_') { switch(command) { case CONNECT: // Validate user login if((MOSS.validateLogin && messages.length >= 4) || (!MOSS.validateLogin && messages.length >= 2)) { this.id = MOSS.validateLogin ? MOSS.validateLogin(messages[3], messages[4], messages[5]): messages[0]; if(this.id != null) { this.room = messages[1]; if (messages.length >= 3) this.status = messages[2]; // Check double login boolean found = MOSS.server.checkDoubleLogin(this.id) != null; if(MOSS.autoLogoutOnDoubleLogin || !found) { MOSS.server.getRoom(this.room).add(this.id, this); if(!this.id.equals("0")) MOSS.userConnected(this); approveLogin(!waiting, request); } else { invoke(CONNECT, ALREADY_LOGIN, request); } } else { invoke(CONNECT, AUTH_ERROR, request); } } else { invoke(CONNECT, AUTH_REQUIRED, request); } break; case DISCONNECT: invoke(DISCONNECT, OK, request); disconnect(); break; case UPDATE_STATUS: if (messages.length == 1) { status = messages[0]; invoke(UPDATE_STATUS, messages[0], request); MOSS.userUpdatedStatus(this, status); } break; case UPDATE_AVAILABILITY: if (messages.length == 1) { available = messages[0].equals(AVAILABLE); invoke(UPDATE_AVAILABILITY, messages[0], request); MOSS.userUpdatedAvailability(this, this.available); } break; case GET_ROOMS: Collection<Room> rooms = MOSS.server.getRooms(); if(rooms != null) { for (Room room : rooms) { result += (!first ? MSG_DELIMITER : "") + room.name; first = false; } } invoke(GET_ROOMS, result, request); break; case UPDATE_ROOM: if (messages.length == 1 && messages[0] != null) { MOSS.server.getRoom(this.room).remove(this.id); MOSS.server.getRoom(messages[0]).add(this.id, this); MOSS.userUpdatedRoom(this, messages[0]); } invoke(UPDATE_ROOM, OK, request); break; case GET_USER: if (messages.length == 2) { UserVO uvo = new UserVO(messages[0], messages[1]); User user = MOSS.getUserByID(uvo); if(user != null) result = user.toString(); else result = uvo.toString(); } invoke(GET_USER, result, request); break; case GET_USERS: List<User> users = null; String room = ""; int limit = 20, page = 0, available = 0; HashMap<String, Object> search = null; if(messages.length > 0) room = messages[0]; if(messages.length > 1) limit = Integer.parseInt(messages[1]); if(messages.length > 2) page = Integer.parseInt(messages[2]); if(messages.length > 3) available = Integer.parseInt(messages[3]); if(messages.length > 4) search = (HashMap<String, Object>) Utils.map(messages[4].split(",")); users = MOSS.getUsers(room, limit, page, available, search); if(users != null) { for (User user : users) { result += (!first ? MSG_DELIMITER : "") + user.toString(); first = false; } } invoke(GET_USERS, result, request); break; case GET_USERS_COUNT: if (messages.length == 1) { int total = MOSS.getUsersCount(messages[0]); invoke(GET_USERS_COUNT, String.valueOf(total), request); } break; case SET_DATA: if (messages.length == 2) { data.put(messages[0], messages[1]); } else if (messages.length == 1) { data.remove(messages[0]); } invoke(SET_DATA, OK, request); break; case RANDOM_PLAYER: if (messages.length == 1) { User player = MOSS.pickRandomPlayer(id, messages[0]); invoke(RANDOM_PLAYER, (player != null ? player.toString() : "null"), request); } break; case INVOKE: String optionalMessage = (messages.length == 4 ? messages[3] : ""); if (messages.length >= 3) opStatus = MOSS.invoke(this, messages[0], messages[1], messages[2], optionalMessage); invoke(INVOKE, opStatus ? OK : ERROR, request); break; case INVOKE_ON_ROOM: if (messages.length == 3) { MOSS.invokeOnRoom(this, messages[0], messages[1], messages[2]); opStatus = true; } invoke(INVOKE_ON_ROOM, opStatus ? OK : ERROR, request); break; case INVOKE_ON_ALL: if (messages.length >= 1) { MOSS.invokeOnAll(this, messages[0], messages[1]); opStatus = true; } invoke(INVOKE_ON_ALL, opStatus ? OK : ERROR, request); break; case SET_TIME_OUT: try { int timeout = Integer.valueOf(message); if(timeout >= 0 && timeout <= 600000) { // 10 minutes max socket.setSoTimeout(timeout); opStatus = true; } } catch (SocketException e) { e.printStackTrace(); } invoke(SET_TIME_OUT, opStatus ? OK : ERROR, request); break; case LOG: MOSS.filelog.add(id(), message); invoke(LOG, OK, request); break; case PING: invoke(PONG, OK, request); break; case PONG: case "": break; default: invoke(command, ERROR, request); break; } } else { dispatchMessage(command, message, request); } } } protected void dispatchMessage(String command) { dispatchMessage(command, "", ""); } protected void dispatchMessage(String command, String message) { dispatchMessage(command, message, ""); } protected void dispatchMessage(String command, String message, String request) { UserNotification u = new UserNotification(UserNotification.MESSAGE, this, command, message, request); dispatchNotification(u); MOSS.userMessage(this, command, message, request); } private void approveLogin(boolean approved, String request) { if(id != null) invoke(CONNECT, approved ? OK : WAITING, request); } protected void approveLogin() { approveLogin(true, ""); } private void dispatchNotification(UserNotification notification) { setChanged(); notifyObservers(notification); } protected void doubleLogin() { invoke(CONNECT, DOUBLE_LOGIN); disconnect(); } public void ping() { invoke(User.PING); } public void disconnect() { if(!connected) return; connected = false; dispatchNotification(new UserNotification(UserNotification.DISCONNECTED, this)); Utils.log(this.id + ", " + socket + " has disconnected."); try { MOSS.server.removeUser(this); MOSS.server.getRoom(this.room).remove(this.id); if(this.id != null) MOSS.userDisconnected(this); in.close(); out.close(); socket.close(); socket = null; } catch(IOException e) { Utils.log("Could not purge " + socket + ".", e); } } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.Version; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptEngine; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.Aggregation.CommonFields; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree; import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.function.Predicate; import java.util.function.Supplier; import static java.util.Collections.singletonList; import static org.hamcrest.Matchers.equalTo; public class InternalScriptedMetricTests extends InternalAggregationTestCase<InternalScriptedMetric> { private static final String REDUCE_SCRIPT_NAME = "reduceScript"; private boolean hasReduceScript; private Supplier<Object>[] valueTypes; private final Supplier<Object>[] leafValueSuppliers = new Supplier[] { () -> randomInt(), () -> randomLong(), () -> randomDouble(), () -> randomFloat(), () -> randomBoolean(), () -> randomAlphaOfLength(5), () -> new GeoPoint(randomDouble(), randomDouble()), () -> null }; private final Supplier<Object>[] nestedValueSuppliers = new Supplier[] { () -> new HashMap<String, Object>(), () -> new ArrayList<>() }; @Override public void setUp() throws Exception { super.setUp(); hasReduceScript = randomBoolean(); // we want the same value types (also for nested lists, maps) for all random aggregations int levels = randomIntBetween(1, 3); valueTypes = new Supplier[levels]; for (int i = 0; i < levels; i++) { if (i < levels - 1) { valueTypes[i] = randomFrom(nestedValueSuppliers); } else { // the last one needs to be a leaf value, not map or list valueTypes[i] = randomFrom(leafValueSuppliers); } } } @Override protected InternalScriptedMetric createTestInstance(String name, Map<String, Object> metadata) { Map<String, Object> params = new HashMap<>(); if (randomBoolean()) { params.put(randomAlphaOfLength(5), randomAlphaOfLength(5)); } Script reduceScript = null; if (hasReduceScript) { reduceScript = new Script(ScriptType.INLINE, MockScriptEngine.NAME, REDUCE_SCRIPT_NAME, params); } return new InternalScriptedMetric(name, randomAggregations(), reduceScript, metadata); } private List<Object> randomAggregations() { return randomList(randomBoolean() ? 1 : 5, this::randomAggregation); } @SuppressWarnings({ "rawtypes", "unchecked" }) private Object randomAggregation() { int levels = randomIntBetween(1, 3); Supplier[] valueTypes = new Supplier[levels]; for (int l = 0; l < levels; l++) { if (l < levels - 1) { valueTypes[l] = randomFrom(nestedValueSuppliers); } else { // the last one needs to be a leaf value, not map or // list valueTypes[l] = randomFrom(leafValueSuppliers); } } return randomValue(valueTypes, 0); } @SuppressWarnings("unchecked") private static Object randomValue(Supplier<Object>[] valueTypes, int level) { Object value = valueTypes[level].get(); if (value instanceof Map) { int elements = randomIntBetween(1, 5); Map<String, Object> map = (Map<String, Object>) value; for (int i = 0; i < elements; i++) { map.put(randomAlphaOfLength(5), randomValue(valueTypes, level + 1)); } } else if (value instanceof List) { int elements = randomIntBetween(1,5); List<Object> list = (List<Object>) value; for (int i = 0; i < elements; i++) { list.add(randomValue(valueTypes, level + 1)); } } return value; } /** * Mock of the script service. The script that is run looks at the * "states" context variable visible when executing the script and simply returns the count. * This should be equal to the number of input InternalScriptedMetrics that are reduced * in total. */ @Override protected ScriptService mockScriptService() { // mock script always returns the size of the input aggs list as result @SuppressWarnings("unchecked") MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, Collections.singletonMap(REDUCE_SCRIPT_NAME, script -> ((List<Object>) script.get("states")).size()), Collections.emptyMap()); Map<String, ScriptEngine> engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine); return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); } @Override protected void assertReduced(InternalScriptedMetric reduced, List<InternalScriptedMetric> inputs) { InternalScriptedMetric firstAgg = inputs.get(0); assertEquals(firstAgg.getName(), reduced.getName()); assertEquals(firstAgg.getMetadata(), reduced.getMetadata()); int size = (int) inputs.stream().mapToLong(i -> i.aggregationsList().size()).sum(); if (hasReduceScript) { assertEquals(size, reduced.aggregation()); } else { assertEquals(size, ((List<?>) reduced.aggregation()).size()); } } @Override public InternalScriptedMetric createTestInstanceForXContent() { InternalScriptedMetric aggregation = createTestInstance(); return (InternalScriptedMetric) aggregation.reduce( singletonList(aggregation), ReduceContext.forFinalReduction(null, mockScriptService(), null, PipelineTree.EMPTY) ); } @Override protected void assertFromXContent(InternalScriptedMetric aggregation, ParsedAggregation parsedAggregation) { assertTrue(parsedAggregation instanceof ParsedScriptedMetric); ParsedScriptedMetric parsed = (ParsedScriptedMetric) parsedAggregation; assertValues(aggregation.aggregation(), parsed.aggregation()); } private static void assertValues(Object expected, Object actual) { if (expected instanceof Long) { // longs that fit into the integer range are parsed back as integer if (actual instanceof Integer) { assertEquals(((Long) expected).intValue(), actual); } else { assertEquals(expected, actual); } } else if (expected instanceof Float) { // based on the xContent type, floats are sometimes parsed back as doubles if (actual instanceof Double) { assertEquals(expected, ((Double) actual).floatValue()); } else { assertEquals(expected, actual); } } else if (expected instanceof GeoPoint) { assertTrue(actual instanceof Map); GeoPoint point = (GeoPoint) expected; Map<String, Object> pointMap = (Map<String, Object>) actual; assertEquals(point.getLat(), pointMap.get("lat")); assertEquals(point.getLon(), pointMap.get("lon")); } else if (expected instanceof Map) { Map<String, Object> expectedMap = (Map<String, Object>) expected; Map<String, Object> actualMap = (Map<String, Object>) actual; assertEquals(expectedMap.size(), actualMap.size()); for (String key : expectedMap.keySet()) { assertValues(expectedMap.get(key), actualMap.get(key)); } } else if (expected instanceof List) { List<Object> expectedList = (List<Object>) expected; List<Object> actualList = (List<Object>) actual; assertEquals(expectedList.size(), actualList.size()); Iterator<Object> actualIterator = actualList.iterator(); for (Object element : expectedList) { assertValues(element, actualIterator.next()); } } else { assertEquals(expected, actual); } } @Override protected Predicate<String> excludePathsFromXContentInsertion() { return path -> path.contains(CommonFields.VALUE.getPreferredName()); } @Override protected InternalScriptedMetric mutateInstance(InternalScriptedMetric instance) throws IOException { String name = instance.getName(); List<Object> aggregationsList = instance.aggregationsList(); Script reduceScript = instance.reduceScript; Map<String, Object> metadata = instance.getMetadata(); switch (between(0, 3)) { case 0: name += randomAlphaOfLength(5); break; case 1: aggregationsList = randomValueOtherThan(aggregationsList, this::randomAggregations); break; case 2: reduceScript = new Script(ScriptType.INLINE, MockScriptEngine.NAME, REDUCE_SCRIPT_NAME + "-mutated", Collections.emptyMap()); break; case 3: if (metadata == null) { metadata = new HashMap<>(1); } else { metadata = new HashMap<>(instance.getMetadata()); } metadata.put(randomAlphaOfLength(15), randomInt()); break; default: throw new AssertionError("Illegal randomisation branch"); } return new InternalScriptedMetric(name, aggregationsList, reduceScript, metadata); } public void testOldSerialization() throws IOException { // A single element list looks like a fully reduced agg InternalScriptedMetric original = new InternalScriptedMetric("test", List.of("foo"), new Script("test"), null); InternalScriptedMetric roundTripped = (InternalScriptedMetric) copyNamedWriteable( original, getNamedWriteableRegistry(), InternalAggregation.class, VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, VersionUtils.getPreviousVersion(Version.V_7_8_0)) ); assertThat(roundTripped, equalTo(original)); // A multi-element list looks like a non-reduced agg InternalScriptedMetric unreduced = new InternalScriptedMetric("test", List.of("foo", "bar"), new Script("test"), null); Exception e = expectThrows(IllegalArgumentException.class, () -> copyNamedWriteable( unreduced, getNamedWriteableRegistry(), InternalAggregation.class, VersionUtils.randomVersionBetween(random(), Version.V_7_0_0, VersionUtils.getPreviousVersion(Version.V_7_8_0)) )); assertThat(e.getMessage(), equalTo("scripted_metric doesn't support cross cluster search until 7.8.0")); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.nexmark.queries; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; import javax.annotation.Nullable; import org.apache.beam.sdk.nexmark.NexmarkConfiguration; import org.apache.beam.sdk.nexmark.NexmarkUtils; import org.apache.beam.sdk.nexmark.model.Auction; import org.apache.beam.sdk.nexmark.model.AuctionBid; import org.apache.beam.sdk.nexmark.model.Bid; import org.apache.beam.sdk.nexmark.model.Event; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.values.TimestampedValue; import org.joda.time.Instant; /** * A simulator of the {@code WinningBids} query. */ public class WinningBidsSimulator extends AbstractSimulator<Event, AuctionBid> { /** Auctions currently still open, indexed by auction id. */ private final Map<Long, Auction> openAuctions; /** The ids of auctions known to be closed. */ private final Set<Long> closedAuctions; /** Current best valid bids for open auctions, indexed by auction id. */ private final Map<Long, Bid> bestBids; /** Bids for auctions we havn't seen yet. */ private final List<Bid> bidsWithoutAuctions; /** * Timestamp of last new auction or bid event (ms since epoch). */ private long lastTimestamp; public WinningBidsSimulator(NexmarkConfiguration configuration) { super(NexmarkUtils.standardEventIterator(configuration)); openAuctions = new TreeMap<>(); closedAuctions = new TreeSet<>(); bestBids = new TreeMap<>(); bidsWithoutAuctions = new ArrayList<>(); lastTimestamp = BoundedWindow.TIMESTAMP_MIN_VALUE.getMillis(); } /** * Try to account for {@code bid} in state. Return true if bid has now been * accounted for by {@code bestBids}. */ private boolean captureBestBid(Bid bid, boolean shouldLog) { if (closedAuctions.contains(bid.auction)) { // Ignore bids for known, closed auctions. if (shouldLog) { NexmarkUtils.info("closed auction: %s", bid); } return true; } Auction auction = openAuctions.get(bid.auction); if (auction == null) { // We don't have an auction for this bid yet, so can't determine if it is // winning or not. if (shouldLog) { NexmarkUtils.info("pending auction: %s", bid); } return false; } if (bid.price < auction.reserve) { // Bid price is too low. if (shouldLog) { NexmarkUtils.info("below reserve: %s", bid); } return true; } Bid existingBid = bestBids.get(bid.auction); if (existingBid == null || Bid.PRICE_THEN_DESCENDING_TIME.compare(existingBid, bid) < 0) { // We've found a (new) best bid for a known auction. bestBids.put(bid.auction, bid); if (shouldLog) { NexmarkUtils.info("new winning bid: %s", bid); } } else { if (shouldLog) { NexmarkUtils.info("ignoring low bid: %s", bid); } } return true; } /** * Try to match bids without auctions to auctions. */ private void flushBidsWithoutAuctions() { Iterator<Bid> itr = bidsWithoutAuctions.iterator(); while (itr.hasNext()) { Bid bid = itr.next(); if (captureBestBid(bid, false)) { NexmarkUtils.info("bid now accounted for: %s", bid); itr.remove(); } } } /** * Return the next winning bid for an expired auction relative to {@code timestamp}. * Return null if no more winning bids, in which case all expired auctions will * have been removed from our state. Retire auctions in order of expire time. */ @Nullable private TimestampedValue<AuctionBid> nextWinningBid(long timestamp) { Map<Long, List<Long>> toBeRetired = new TreeMap<>(); for (Map.Entry<Long, Auction> entry : openAuctions.entrySet()) { if (entry.getValue().expires <= timestamp) { List<Long> idsAtTime = toBeRetired.get(entry.getValue().expires); if (idsAtTime == null) { idsAtTime = new ArrayList<>(); toBeRetired.put(entry.getValue().expires, idsAtTime); } idsAtTime.add(entry.getKey()); } } for (Map.Entry<Long, List<Long>> entry : toBeRetired.entrySet()) { for (long id : entry.getValue()) { Auction auction = openAuctions.get(id); NexmarkUtils.info("retiring auction: %s", auction); openAuctions.remove(id); Bid bestBid = bestBids.get(id); if (bestBid != null) { TimestampedValue<AuctionBid> result = TimestampedValue.of(new AuctionBid(auction, bestBid), new Instant(auction.expires)); NexmarkUtils.info("winning: %s", result); return result; } } } return null; } @Override protected void run() { if (lastTimestamp > BoundedWindow.TIMESTAMP_MIN_VALUE.getMillis()) { // We may have finally seen the auction a bid was intended for. flushBidsWithoutAuctions(); TimestampedValue<AuctionBid> result = nextWinningBid(lastTimestamp); if (result != null) { addResult(result); return; } } TimestampedValue<Event> timestampedEvent = nextInput(); if (timestampedEvent == null) { // No more events. Flush any still open auctions. TimestampedValue<AuctionBid> result = nextWinningBid(BoundedWindow.TIMESTAMP_MAX_VALUE.getMillis()); if (result == null) { // We are done. allDone(); return; } addResult(result); return; } Event event = timestampedEvent.getValue(); if (event.newPerson != null) { // Ignore new person events. return; } lastTimestamp = timestampedEvent.getTimestamp().getMillis(); if (event.newAuction != null) { // Add this new open auction to our state. openAuctions.put(event.newAuction.id, event.newAuction); } else { if (!captureBestBid(event.bid, true)) { // We don't know what to do with this bid yet. NexmarkUtils.info("bid not yet accounted for: %s", event.bid); bidsWithoutAuctions.add(event.bid); } } // Keep looking for winning bids. } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.verify; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Comparator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.function.Consumer; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.compute.ComputeJob; import org.apache.ignite.compute.ComputeJobResult; import org.apache.ignite.compute.ComputeJobResultPolicy; import org.apache.ignite.compute.ComputeTaskAdapter; import org.apache.ignite.internal.processors.task.GridInternal; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.SB; import org.apache.ignite.internal.visor.verify.VisorIdleVerifyDumpTaskArg; import org.apache.ignite.internal.visor.verify.VisorIdleVerifyTaskArg; import org.apache.ignite.resources.IgniteInstanceResource; import org.apache.ignite.resources.LoggerResource; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; /** * Task for collection checksums primary and backup partitions of specified caches. <br> Argument: Set of cache names, * 'null' will trigger verification for all caches. <br> Result: {@link IdleVerifyDumpResult} with all found partitions. * <br> Works properly only on idle cluster - there may be false positive conflict reports if data in cluster is being * concurrently updated. */ @GridInternal public class VerifyBackupPartitionsDumpTask extends ComputeTaskAdapter<VisorIdleVerifyTaskArg, String> { /** */ private static final long serialVersionUID = 0L; /** Time formatter for dump file name. */ private static final DateTimeFormatter TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH-mm-ss_SSS"); /** Visible for testing. */ public static final String IDLE_DUMP_FILE_PREFIX = "idle-dump-"; /** Delegate for map execution */ private final VerifyBackupPartitionsTaskV2 delegate = new VerifyBackupPartitionsTaskV2(); /** */ private VisorIdleVerifyDumpTaskArg taskArg; /** */ @IgniteInstanceResource private Ignite ignite; /** Injected logger. */ @LoggerResource private IgniteLogger log; /** {@inheritDoc} */ @NotNull @Override public Map<? extends ComputeJob, ClusterNode> map( List<ClusterNode> subgrid, VisorIdleVerifyTaskArg arg ) throws IgniteException { if (arg instanceof VisorIdleVerifyDumpTaskArg) taskArg = (VisorIdleVerifyDumpTaskArg)arg; return delegate.map(subgrid, arg); } /** {@inheritDoc} */ @Override public @Nullable String reduce(List<ComputeJobResult> results) throws IgniteException { Map<PartitionKeyV2, List<PartitionHashRecordV2>> clusterHashes = new TreeMap<>(buildPartitionKeyComparator()); for (ComputeJobResult res : results) { if (res.getException() != null) continue; Map<PartitionKeyV2, PartitionHashRecordV2> nodeHashes = res.getData(); for (Map.Entry<PartitionKeyV2, PartitionHashRecordV2> e : nodeHashes.entrySet()) { clusterHashes .computeIfAbsent(e.getKey(), k -> new ArrayList<>()) .add(e.getValue()); } } Comparator<PartitionHashRecordV2> recordComp = buildRecordComparator().reversed(); Map<PartitionKeyV2, List<PartitionHashRecordV2>> partitions = new LinkedHashMap<>(); int skippedRecords = 0; for (Map.Entry<PartitionKeyV2, List<PartitionHashRecordV2>> entry : clusterHashes.entrySet()) { if (needToAdd(entry.getValue())) { entry.getValue().sort(recordComp); partitions.put(entry.getKey(), entry.getValue()); } else skippedRecords++; } return writeHashes(partitions, delegate.reduce(results), skippedRecords); } /** {@inheritDoc} */ @Override public ComputeJobResultPolicy result( ComputeJobResult res, List<ComputeJobResult> rcvd ) throws IgniteException { return delegate.result(res, rcvd); } /** * Checking conditions for adding given record to result. * * @param records records to check. * @return {@code true} if this records should be add to result and {@code false} otherwise. */ private boolean needToAdd(List<PartitionHashRecordV2> records) { if (records.isEmpty() || (taskArg != null && !taskArg.skipZeros())) return true; PartitionHashRecordV2 record = records.get(0); if (record.size() != 0) return true; int firstHash = record.partitionHash(); for (int i = 1; i < records.size(); i++) { record = records.get(i); if (record.partitionHash() != firstHash || record.size() != 0) return true; } return false; } /** * @param partitions Dump result. * @param conflictRes Conflict results. * @param skippedRecords Number of empty partitions. * @return Path where results are written. * @throws IgniteException If failed to write the file. */ private String writeHashes( Map<PartitionKeyV2, List<PartitionHashRecordV2>> partitions, IdleVerifyResultV2 conflictRes, int skippedRecords ) throws IgniteException { String wd = ignite.configuration().getWorkDirectory(); File workDir = wd == null ? new File("/tmp") : new File(wd); File out = new File(workDir, IDLE_DUMP_FILE_PREFIX + LocalDateTime.now().format(TIME_FORMATTER) + ".txt"); if (ignite.log().isInfoEnabled()) ignite.log().info("IdleVerifyDumpTask will write output to " + out.getAbsolutePath()); try (PrintWriter writer = new PrintWriter(new FileWriter(out))) { writeResult(partitions, conflictRes, skippedRecords, writer); writer.flush(); if (ignite.log().isInfoEnabled()) ignite.log().info("IdleVerifyDumpTask successfully written dump to '" + out.getAbsolutePath() + "'"); } catch (IOException | IgniteException e) { ignite.log().error("Failed to write dump file: " + out.getAbsolutePath(), e); throw new IgniteException(e); } return out.getAbsolutePath(); } /** */ private void writeResult( Map<PartitionKeyV2, List<PartitionHashRecordV2>> partitions, IdleVerifyResultV2 conflictRes, int skippedRecords, PrintWriter writer ) { Map<ClusterNode, Exception> exceptions = conflictRes.exceptions(); if (!F.isEmpty(exceptions)) { boolean noMatchingCaches = false; for (Exception e : exceptions.values()) { if (e instanceof NoMatchingCachesException) { noMatchingCaches = true; break; } } int size = exceptions.size(); writer.write("idle_verify failed on " + size + " node" + (size == 1 ? "" : "s") + ".\n"); if (noMatchingCaches) writer.write("There are no caches matching given filter options."); } if (!partitions.isEmpty()) writer.write("idle_verify check has finished, found " + partitions.size() + " partitions\n"); logParsedArgs(taskArg, writer::write); if (skippedRecords > 0) writer.write(skippedRecords + " partitions was skipped\n"); if (!F.isEmpty(partitions)) { writer.write("Cluster partitions:\n"); for (Map.Entry<PartitionKeyV2, List<PartitionHashRecordV2>> entry : partitions.entrySet()) { writer.write("Partition: " + entry.getKey() + "\n"); writer.write("Partition instances: " + entry.getValue() + "\n"); } writer.write("\n\n-----------------------------------\n\n"); conflictRes.print(writer::write); } } /** * @return Comparator for {@link PartitionHashRecordV2}. */ private Comparator<PartitionHashRecordV2> buildRecordComparator() { return (o1, o2) -> { int compare = Boolean.compare(o1.isPrimary(), o2.isPrimary()); if (compare != 0) return compare; return o1.consistentId().toString().compareTo(o2.consistentId().toString()); }; } /** * @return Comparator for {@link PartitionKeyV2}. */ private Comparator<PartitionKeyV2> buildPartitionKeyComparator() { return (o1, o2) -> { int compare = Integer.compare(o1.groupId(), o2.groupId()); if (compare != 0) return compare; return Integer.compare(o1.partitionId(), o2.partitionId()); }; } /** * Passes idle_verify parsed arguments to given log consumer. * * @param args idle_verify arguments. * @param logConsumer Logger. */ public static void logParsedArgs(VisorIdleVerifyTaskArg args, Consumer<String> logConsumer) { SB options = new SB("idle_verify task was executed with the following args: "); options .a("caches=[") .a(args.caches() == null ? "" : String.join(", ", args.caches())) .a("], excluded=[") .a(args.excludeCaches() == null ? "" : String.join(", ", args.excludeCaches())) .a("]") .a(", cacheFilter=[") .a(args.cacheFilterEnum().toString()) .a("]\n"); logConsumer.accept(options.toString()); } }
/* * Copyright 2009-2013 by The Regents of the University of California * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * you may obtain a copy of the License from * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.uci.ics.hyracks.storage.am.lsm.invertedindex.util; import java.io.File; import java.util.List; import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory; import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits; import edu.uci.ics.hyracks.api.io.FileReference; import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.BloomFilterFactory; import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException; import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType; import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory; import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils; import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager; import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory; import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory; import edu.uci.ics.hyracks.storage.am.common.api.IndexException; import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory; import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManagerFactory; import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory; import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback; import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler; import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicy; import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTracker; import edu.uci.ics.hyracks.storage.am.lsm.common.api.IVirtualBufferCache; import edu.uci.ics.hyracks.storage.am.lsm.common.frames.LSMComponentFilterFrameFactory; import edu.uci.ics.hyracks.storage.am.lsm.common.impls.BTreeFactory; import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMComponentFilterFactory; import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMComponentFilterManager; import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListBuilder; import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedListBuilderFactory; import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.impls.LSMInvertedIndex; import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.impls.LSMInvertedIndexFileManager; import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.impls.PartitionedLSMInvertedIndex; import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.inmemory.InMemoryInvertedIndex; import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.inmemory.PartitionedInMemoryInvertedIndex; import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.FixedSizeElementInvertedListBuilder; import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.FixedSizeElementInvertedListBuilderFactory; import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.OnDiskInvertedIndex; import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.OnDiskInvertedIndexFactory; import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.PartitionedOnDiskInvertedIndex; import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.ondisk.PartitionedOnDiskInvertedIndexFactory; import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory; import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache; import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider; public class InvertedIndexUtils { public static InMemoryInvertedIndex createInMemoryBTreeInvertedindex(IBufferCache memBufferCache, IFreePageManager virtualFreePageManager, ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenCmpFactories, IBinaryTokenizerFactory tokenizerFactory, FileReference btreeFileRef) throws BTreeException { return new InMemoryInvertedIndex(memBufferCache, virtualFreePageManager, invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory, btreeFileRef); } public static InMemoryInvertedIndex createPartitionedInMemoryBTreeInvertedindex(IBufferCache memBufferCache, IFreePageManager virtualFreePageManager, ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenCmpFactories, IBinaryTokenizerFactory tokenizerFactory, FileReference btreeFileRef) throws BTreeException { return new PartitionedInMemoryInvertedIndex(memBufferCache, virtualFreePageManager, invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory, btreeFileRef); } public static OnDiskInvertedIndex createOnDiskInvertedIndex(IBufferCache bufferCache, IFileMapProvider fileMapProvider, ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenCmpFactories, FileReference invListsFile) throws IndexException { IInvertedListBuilder builder = new FixedSizeElementInvertedListBuilder(invListTypeTraits); FileReference btreeFile = getBTreeFile(invListsFile); return new OnDiskInvertedIndex(bufferCache, fileMapProvider, builder, invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, btreeFile, invListsFile); } public static PartitionedOnDiskInvertedIndex createPartitionedOnDiskInvertedIndex(IBufferCache bufferCache, IFileMapProvider fileMapProvider, ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenCmpFactories, FileReference invListsFile) throws IndexException { IInvertedListBuilder builder = new FixedSizeElementInvertedListBuilder(invListTypeTraits); FileReference btreeFile = getBTreeFile(invListsFile); return new PartitionedOnDiskInvertedIndex(bufferCache, fileMapProvider, builder, invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, btreeFile, invListsFile); } public static FileReference getBTreeFile(FileReference invListsFile) { return new FileReference(new File(invListsFile.getFile().getPath() + "_btree")); } public static BTreeFactory createDeletedKeysBTreeFactory(IFileMapProvider diskFileMapProvider, ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories, IBufferCache diskBufferCache) throws BTreeException { TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(invListTypeTraits); ITreeIndexFrameFactory leafFrameFactory = BTreeUtils.getLeafFrameFactory(tupleWriterFactory, BTreeLeafFrameType.REGULAR_NSM); ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory); ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory(); LinkedListFreePageManagerFactory freePageManagerFactory = new LinkedListFreePageManagerFactory(diskBufferCache, metaFrameFactory); BTreeFactory deletedKeysBTreeFactory = new BTreeFactory(diskBufferCache, diskFileMapProvider, freePageManagerFactory, interiorFrameFactory, leafFrameFactory, invListCmpFactories, invListCmpFactories.length); return deletedKeysBTreeFactory; } public static LSMInvertedIndex createLSMInvertedIndex(List<IVirtualBufferCache> virtualBufferCaches, IFileMapProvider diskFileMapProvider, ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenCmpFactories, IBinaryTokenizerFactory tokenizerFactory, IBufferCache diskBufferCache, String onDiskDir, double bloomFilterFalsePositiveRate, ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker, ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallback ioOpCallback, int[] invertedIndexFields, ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields, int[] filterFieldsForNonBulkLoadOps, int[] invertedIndexFieldsForNonBulkLoadOps) throws IndexException { BTreeFactory deletedKeysBTreeFactory = createDeletedKeysBTreeFactory(diskFileMapProvider, invListTypeTraits, invListCmpFactories, diskBufferCache); int[] bloomFilterKeyFields = new int[invListCmpFactories.length]; for (int i = 0; i < invListCmpFactories.length; i++) { bloomFilterKeyFields[i] = i; } BloomFilterFactory bloomFilterFactory = new BloomFilterFactory(diskBufferCache, diskFileMapProvider, bloomFilterKeyFields); FileReference onDiskDirFileRef = new FileReference(new File(onDiskDir)); LSMInvertedIndexFileManager fileManager = new LSMInvertedIndexFileManager(diskFileMapProvider, onDiskDirFileRef, deletedKeysBTreeFactory); IInvertedListBuilderFactory invListBuilderFactory = new FixedSizeElementInvertedListBuilderFactory( invListTypeTraits); OnDiskInvertedIndexFactory invIndexFactory = new OnDiskInvertedIndexFactory(diskBufferCache, diskFileMapProvider, invListBuilderFactory, invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, fileManager); LSMComponentFilterFactory filterFactory = null; LSMComponentFilterFrameFactory filterFrameFactory = null; LSMComponentFilterManager filterManager = null; if (filterCmpFactories != null) { TypeAwareTupleWriterFactory filterTupleWriterFactory = new TypeAwareTupleWriterFactory(filterTypeTraits); filterFactory = new LSMComponentFilterFactory(filterTupleWriterFactory, filterCmpFactories); filterFrameFactory = new LSMComponentFilterFrameFactory(filterTupleWriterFactory, diskBufferCache.getPageSize()); filterManager = new LSMComponentFilterManager(diskBufferCache, filterFrameFactory); } LSMInvertedIndex invIndex = new LSMInvertedIndex(virtualBufferCaches, invIndexFactory, deletedKeysBTreeFactory, bloomFilterFactory, filterFactory, filterFrameFactory, filterManager, bloomFilterFalsePositiveRate, fileManager, diskFileMapProvider, invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory, mergePolicy, opTracker, ioScheduler, ioOpCallback, invertedIndexFields, filterFields, filterFieldsForNonBulkLoadOps, invertedIndexFieldsForNonBulkLoadOps); return invIndex; } public static PartitionedLSMInvertedIndex createPartitionedLSMInvertedIndex( List<IVirtualBufferCache> virtualBufferCaches, IFileMapProvider diskFileMapProvider, ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenCmpFactories, IBinaryTokenizerFactory tokenizerFactory, IBufferCache diskBufferCache, String onDiskDir, double bloomFilterFalsePositiveRate, ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker, ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallback ioOpCallback, int[] invertedIndexFields, ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields, int[] filterFieldsForNonBulkLoadOps, int[] invertedIndexFieldsForNonBulkLoadOps) throws IndexException { BTreeFactory deletedKeysBTreeFactory = createDeletedKeysBTreeFactory(diskFileMapProvider, invListTypeTraits, invListCmpFactories, diskBufferCache); int[] bloomFilterKeyFields = new int[invListCmpFactories.length]; for (int i = 0; i < invListCmpFactories.length; i++) { bloomFilterKeyFields[i] = i; } BloomFilterFactory bloomFilterFactory = new BloomFilterFactory(diskBufferCache, diskFileMapProvider, bloomFilterKeyFields); FileReference onDiskDirFileRef = new FileReference(new File(onDiskDir)); LSMInvertedIndexFileManager fileManager = new LSMInvertedIndexFileManager(diskFileMapProvider, onDiskDirFileRef, deletedKeysBTreeFactory); IInvertedListBuilderFactory invListBuilderFactory = new FixedSizeElementInvertedListBuilderFactory( invListTypeTraits); PartitionedOnDiskInvertedIndexFactory invIndexFactory = new PartitionedOnDiskInvertedIndexFactory( diskBufferCache, diskFileMapProvider, invListBuilderFactory, invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, fileManager); LSMComponentFilterFactory filterFactory = null; LSMComponentFilterFrameFactory filterFrameFactory = null; LSMComponentFilterManager filterManager = null; if (filterCmpFactories != null) { TypeAwareTupleWriterFactory filterTupleWriterFactory = new TypeAwareTupleWriterFactory(filterTypeTraits); filterFactory = new LSMComponentFilterFactory(filterTupleWriterFactory, filterCmpFactories); filterFrameFactory = new LSMComponentFilterFrameFactory(filterTupleWriterFactory, diskBufferCache.getPageSize()); filterManager = new LSMComponentFilterManager(diskBufferCache, filterFrameFactory); } PartitionedLSMInvertedIndex invIndex = new PartitionedLSMInvertedIndex(virtualBufferCaches, invIndexFactory, deletedKeysBTreeFactory, bloomFilterFactory, filterFactory, filterFrameFactory, filterManager, bloomFilterFalsePositiveRate, fileManager, diskFileMapProvider, invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory, mergePolicy, opTracker, ioScheduler, ioOpCallback, invertedIndexFields, filterFields, filterFieldsForNonBulkLoadOps, invertedIndexFieldsForNonBulkLoadOps); return invIndex; } }
/* * IzPack - Copyright 2001-2013 Julien Ponge, All Rights Reserved. * * http://izpack.org/ * http://izpack.codehaus.org/ * * Copyright 2002 Jan Blok * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.izforge.izpack.panels.userinput; import java.io.PrintWriter; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Properties; import java.util.Set; import com.izforge.izpack.api.adaptator.IXMLElement; import com.izforge.izpack.api.data.InstallData; import com.izforge.izpack.api.data.Panel; import com.izforge.izpack.api.factory.ObjectFactory; import com.izforge.izpack.api.handler.Prompt; import com.izforge.izpack.api.resource.Resources; import com.izforge.izpack.api.rules.Condition; import com.izforge.izpack.api.rules.RulesEngine; import com.izforge.izpack.installer.console.AbstractConsolePanel; import com.izforge.izpack.installer.console.ConsolePanel; import com.izforge.izpack.installer.panel.PanelView; import com.izforge.izpack.panels.userinput.console.ConsoleField; import com.izforge.izpack.panels.userinput.console.ConsoleFieldFactory; import com.izforge.izpack.panels.userinput.field.ElementReader; import com.izforge.izpack.panels.userinput.field.Field; import com.izforge.izpack.panels.userinput.field.FieldHelper; import com.izforge.izpack.panels.userinput.field.UserInputPanelSpec; import com.izforge.izpack.util.Console; import com.izforge.izpack.util.PlatformModelMatcher; /** * The user input panel console implementation. * * @author Mounir El Hajj */ public class UserInputConsolePanel extends AbstractConsolePanel { private static final String DISPLAY_HIDDEN = "displayHidden"; private static final String DISPLAY_HIDDEN_CONDITION = "displayHiddenCondition"; private static final String READONLY = "readonly"; private static final String READONLY_CONDITION = "readonlyCondition"; /** * The resources. */ private final Resources resources; /** * The factory for creating field validators. */ private final ObjectFactory factory; /** * The rules. */ private final RulesEngine rules; /** * The platform-model matcher. */ private final PlatformModelMatcher matcher; /** * The console. */ private final Console console; /** * The prompt. */ private final Prompt prompt; private final Panel panel; /** * The fields. */ private List<ConsoleField> fields = new ArrayList<ConsoleField>(); private final InstallData installData; /** * Constructs an {@code UserInputConsolePanel}. * * @param resources the resources * @param factory the object factory * @param rules the rules * @param matcher the platform-model matcher * @param console the console * @param prompt the prompt * @param panelView the parent panel/view * @param installData the install data */ public UserInputConsolePanel(Resources resources, ObjectFactory factory, RulesEngine rules, PlatformModelMatcher matcher, Console console, Prompt prompt, PanelView<ConsolePanel> panelView, InstallData installData) { super(panelView); this.installData = installData; this.resources = resources; this.factory = factory; this.rules = rules; this.matcher = matcher; this.console = console; this.prompt = prompt; UserInputPanelSpec model = new UserInputPanelSpec(resources, installData, factory, matcher); this.panel = getPanel(); IXMLElement spec = model.getPanelSpec(panel); boolean isDisplayingHidden = false; try { isDisplayingHidden = Boolean.parseBoolean(spec.getAttribute(DISPLAY_HIDDEN)); } catch (Exception ignore) { isDisplayingHidden = false; } panel.setDisplayHidden(isDisplayingHidden); String condition = spec.getAttribute(DISPLAY_HIDDEN_CONDITION); if (condition != null && !condition.isEmpty()) { panel.setDisplayHiddenCondition(condition); } // Prevent activating on certain global conditions ElementReader reader = new ElementReader(model.getConfig()); Condition globalConstraint = reader.getComplexPanelCondition(spec, matcher, installData, rules); if (globalConstraint != null) { rules.addPanelCondition(panel, globalConstraint); } boolean readonly = false; try { readonly = Boolean.parseBoolean(spec.getAttribute(READONLY)); } catch (Exception ignore) { readonly = false; } panel.setReadonly(readonly); condition = spec.getAttribute(READONLY_CONDITION); if (condition != null && !condition.isEmpty()) { panel.setReadonlyCondition(condition); } collectInputs(installData); } @Override public boolean run(InstallData installData, Properties properties) { for (ConsoleField field : fields) { String name = field.getVariable(); if (name != null) { String value = properties.getProperty(name); if (value != null) { installData.setVariable(name, value); } } } return true; } @Override public boolean generateProperties(InstallData installData, PrintWriter printWriter) { for (ConsoleField field : fields) { String name = field.getVariable(); if (name != null) { printWriter.println(name + "="); } } return true; } /** * Runs the panel using the specified console. * * @param installData the installation data * @param console the console * @return {@code true} if the panel ran successfully, otherwise {@code false} */ @Override public boolean run(InstallData installData, Console console) { printHeadLine(installData, console); boolean result = true; if (fields != null && !fields.isEmpty()) { boolean rerun = false; Set<String> variables = new HashSet<String>(); for (ConsoleField field : fields) { Field fieldDefinition = field.getField(); boolean readonly = false; boolean addToPanel = false; boolean required = FieldHelper.isRequired(fieldDefinition, installData, matcher); if (required && fieldDefinition.isConditionTrue()) { readonly = fieldDefinition.isEffectiveReadonly( panel.isReadonly() || (panel.getReadonlyCondition() != null && rules.isConditionTrue(panel.getReadonlyCondition())), rules); addToPanel = true; } else if (required && ( fieldDefinition.isEffectiveDisplayHidden( panel.isDisplayHidden() || (panel.getDisplayHiddenCondition() != null && rules.isConditionTrue(panel.getDisplayHiddenCondition())), rules) ) ) { readonly = true; addToPanel = true; } else { readonly = true; addToPanel = false; } if (addToPanel) { field.setReadonly(readonly); if (!field.display()) { // field is invalid rerun = true; break; } String var = fieldDefinition.getVariable(); if (var != null) { variables.add(var); } } } panel.setAffectedVariableNames(variables); if (rerun) { // prompt to rerun the panel or quit result = promptRerunPanel(installData, console); } else { result = promptEndPanel(installData, console); } } return result; } private void collectInputs(InstallData installData) { UserInputPanelSpec model = new UserInputPanelSpec(resources, installData, factory, matcher); Panel panel = getPanel(); IXMLElement spec = model.getPanelSpec(panel); fields.clear(); ConsoleFieldFactory factory = new ConsoleFieldFactory(console, prompt); for (Field fieldDefinition : model.createFields(spec)) { ConsoleField consoleField = factory.create(fieldDefinition, model, spec); fields.add(consoleField); } } /** * Creates an installation record for unattended installations on {@link UserInputPanel}, * created during GUI installations. */ @Override public void createInstallationRecord(IXMLElement rootElement) { new UserInputPanelAutomationHelper(fields).createInstallationRecord(installData, rootElement); } @Override public boolean handlePanelValidationResult(boolean valid) { if (!valid) { return promptRerunPanel(installData, console); } return true; } }
package io.github.xwz.base.views; import android.annotation.TargetApi; import android.content.Context; import android.graphics.Point; import android.os.Handler; import android.support.v17.leanback.widget.BaseCardView; import android.support.v17.leanback.widget.ImageCardView; import android.util.Log; import android.view.KeyEvent; import android.view.MotionEvent; import android.view.Surface; import android.view.SurfaceView; import android.view.View; import android.view.accessibility.CaptioningManager; import android.widget.MediaController; import android.widget.TextView; import com.google.android.exoplayer.AspectRatioFrameLayout; import com.google.android.exoplayer.ExoPlayer; import com.google.android.exoplayer.text.CaptionStyleCompat; import com.google.android.exoplayer.text.Cue; import com.google.android.exoplayer.text.SubtitleLayout; import com.google.android.exoplayer.util.DebugTextViewHelper; import com.google.android.exoplayer.util.Util; import java.util.Arrays; import java.util.List; import io.github.xwz.base.R; import io.github.xwz.base.api.EpisodeBaseModel; import io.github.xwz.base.player.VideoPlayer; public class VideoPlayerView { private static final String TAG = "VideoPlayerView"; private final View shutterView; private final AspectRatioFrameLayout videoFrame; private final SurfaceView surfaceView; private final TextView debugTextView; private final TextView playerStateTextView; private final View debugView; private final TextView statusTextView; private final SubtitleLayout subtitleLayout; private final EpisodeCardView nextEpisode; private final View nextEpisodeCard; private final View episodeDetails; private final TextView episodeTitle; private final TextView seriesTitle; private final TextView duration; private DebugTextViewHelper debugViewHelper; private final Context mContext; private final PlaybackControls mediaController; private MediaController.MediaPlayerControl mPlayer; private static final List<Integer> PLAY_PAUSE_EVENTS = Arrays.asList( KeyEvent.KEYCODE_MEDIA_PLAY_PAUSE, KeyEvent.KEYCODE_MEDIA_PLAY, KeyEvent.KEYCODE_MEDIA_STOP, KeyEvent.KEYCODE_DPAD_CENTER ); private static final List<Integer> PLAYER_EVENTS = Arrays.asList( KeyEvent.KEYCODE_VOLUME_DOWN, KeyEvent.KEYCODE_VOLUME_UP, KeyEvent.KEYCODE_VOLUME_MUTE, KeyEvent.KEYCODE_CAMERA ); public VideoPlayerView(Context context, PlaybackControls controller, View root) { mContext = context; mediaController = controller; shutterView = root.findViewById(R.id.shutter); videoFrame = (AspectRatioFrameLayout) root.findViewById(R.id.video_frame); surfaceView = (SurfaceView) root.findViewById(R.id.surface_view); debugTextView = (TextView) root.findViewById(R.id.debug_text_view); debugView = root.findViewById(R.id.debug_view); statusTextView = (TextView) root.findViewById(R.id.status); playerStateTextView = (TextView) root.findViewById(R.id.player_state_view); subtitleLayout = (SubtitleLayout) root.findViewById(R.id.subtitles); nextEpisodeCard = root.findViewById(R.id.next_episode_card); episodeDetails = root.findViewById(R.id.episode_details); episodeTitle = (TextView) root.findViewById(R.id.episode_title); seriesTitle = (TextView) root.findViewById(R.id.series_title); duration = (TextView) root.findViewById(R.id.duration); ImageCardView card = (ImageCardView) root.findViewById(R.id.next_episode); card.setFocusable(true); card.setFocusableInTouchMode(true); card.setInfoVisibility(View.VISIBLE); card.setExtraVisibility(View.VISIBLE); card.setInfoAreaBackgroundColor(context.getResources().getColor(R.color.black_900)); Point size = new Point(context.getResources().getDimensionPixelSize(R.dimen.card_width), context.getResources().getDimensionPixelSize(R.dimen.card_height)); nextEpisode = new EpisodeCardView(context, card, size, false); nextEpisode.getImageCardView().setCardType(BaseCardView.CARD_TYPE_INFO_OVER); debugView.setVisibility(View.GONE); root.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View view, MotionEvent motionEvent) { return handleTouchEvents(view, motionEvent); } }); root.setOnKeyListener(new View.OnKeyListener() { @Override public boolean onKey(View v, int keyCode, KeyEvent event) { return handleKeyEvents(v, keyCode, event); } }); } private boolean handleKeyEvents(View v, int keyCode, KeyEvent event) { final boolean uniqueDown = event.getRepeatCount() == 0 && event.getAction() == KeyEvent.ACTION_DOWN; if (uniqueDown) { Log.d(TAG, "keyCode:" + keyCode + ", event:" + event); } if (PLAY_PAUSE_EVENTS.contains(keyCode) && uniqueDown) { doPauseResume(); return true; } else if (PLAYER_EVENTS.contains(keyCode)) { return mediaController.dispatchKeyEvent(event, 0); } return false; } private boolean handleTouchEvents(View view, MotionEvent motionEvent) { if (motionEvent.getAction() == MotionEvent.ACTION_DOWN) { toggleControlsVisibility(); } else if (motionEvent.getAction() == MotionEvent.ACTION_UP) { view.performClick(); } return true; } private void doPauseResume() { Log.d(TAG, "doPauseResume:" + mPlayer); if (mPlayer != null) { if (mPlayer.isPlaying()) { mPlayer.pause(); showControls(); } else { mPlayer.start(); hideControlsDelayed(); } } } public void setEpisode(EpisodeBaseModel episode) { episodeTitle.setText(episode.getTitle()); seriesTitle.setText(episode.getSeriesTitle()); duration.setText(episode.getDurationText()); showShutter(true); showEpisodeDetails(); } public void setMediaPlayer(MediaController.MediaPlayerControl player) { mPlayer = player; } public void resetView() { nextEpisodeCard.setVisibility(View.GONE); } public void setVideoFrameAspectRatio(float ratio) { videoFrame.setAspectRatio(ratio); } public void showShutter(boolean show) { shutterView.setVisibility(show ? View.VISIBLE : View.GONE); } public void setCues(List<Cue> cues) { subtitleLayout.setCues(cues); } public void startDebugView(VideoPlayer player) { debugViewHelper = new DebugTextViewHelper(player, debugTextView); debugViewHelper.start(); } public void stopDebugView() { debugViewHelper.stop(); debugViewHelper = null; } private void toggleControlsVisibility() { if (mediaController.isShowing()) { hideControls(); } else { showControls(); } } public void showControls() { Log.d(TAG, "Show controls"); mediaController.show(0); showEpisodeDetails(); } private void showEpisodeDetails() { episodeDetails.setVisibility(View.VISIBLE); } private void hideControls() { Log.d(TAG, "Hide controls"); mediaController.hide(); episodeDetails.setVisibility(View.GONE); } public void configureSubtitleView() { CaptionStyleCompat captionStyle; float captionFontScale; if (Util.SDK_INT >= 19) { captionStyle = getUserCaptionStyleV19(); captionFontScale = getUserCaptionFontScaleV19(); } else { captionStyle = CaptionStyleCompat.DEFAULT; captionFontScale = 1.0f; } subtitleLayout.setStyle(captionStyle); subtitleLayout.setFontScale(captionFontScale); } public Surface getVideoSurface() { return surfaceView.getHolder().getSurface(); } public void onStateChanged(boolean playWhenReady, int playbackState) { if (playbackState == ExoPlayer.STATE_ENDED) { showControls(); } String text = "playWhenReady=" + playWhenReady + ", playbackState="; switch (playbackState) { case ExoPlayer.STATE_BUFFERING: text += "buffering"; showStatusText(R.string.buffering); break; case ExoPlayer.STATE_ENDED: text += "ended"; break; case ExoPlayer.STATE_IDLE: text += "idle"; break; case ExoPlayer.STATE_PREPARING: text += "preparing"; showStatusText(R.string.loading); break; case ExoPlayer.STATE_READY: text += "ready"; hideStatusText(); showShutter(false); if (mPlayer != null && mPlayer.isPlaying()) { hideControlsDelayed(); } break; default: text += "unknown"; break; } playerStateTextView.setText(text); } private void hideControlsDelayed() { final Handler handler = new Handler(); Log.d(TAG, "hideControlsDelayed"); handler.postDelayed(new Runnable() { @Override public void run() { hideControls(); } }, 3000); } public void suggestNextEpisode(EpisodeBaseModel episode) { nextEpisode.setEpisode(episode); nextEpisodeCard.setVisibility(View.VISIBLE); } private void showStatusText(int resId) { showStatusText(mContext.getResources().getString(resId)); } private void hideStatusText() { statusTextView.setVisibility(View.GONE); } public void showStatusText(String text) { showShutter(false); statusTextView.setVisibility(View.VISIBLE); statusTextView.setText(text); } @TargetApi(19) private float getUserCaptionFontScaleV19() { CaptioningManager captioningManager = (CaptioningManager) mContext.getSystemService(Context.CAPTIONING_SERVICE); return captioningManager.getFontScale(); } @TargetApi(19) private CaptionStyleCompat getUserCaptionStyleV19() { CaptioningManager captioningManager = (CaptioningManager) mContext.getSystemService(Context.CAPTIONING_SERVICE); return CaptionStyleCompat.createFromCaptionStyle(captioningManager.getUserStyle()); } }
/* * Copyright (c) 2007-2018 Siemens AG * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * */ package com.siemens.ct.exi.main.datatype; import java.io.IOException; import com.siemens.ct.exi.core.io.channel.DecoderChannel; import com.siemens.ct.exi.core.io.channel.EncoderChannel; import com.siemens.ct.exi.core.values.IntegerValue; import com.siemens.ct.exi.core.values.IntegerValueType; import com.siemens.ct.exi.core.values.ValueType; public class IntegerTest extends AbstractTestCase { public IntegerTest(String testName) { super(testName); } public void testInteger0() throws IOException { // Bit EncoderChannel bitEC = getBitEncoder(); bitEC.encodeInteger(0); bitEC.flush(); IntegerValue dec1 = getBitDecoder().decodeIntegerValue(); assertTrue(dec1.getIntegerValueType() == IntegerValueType.INT); assertTrue(dec1.intValue() == 0); // Byte getByteEncoder().encodeInteger(0); IntegerValue dec2 = getByteDecoder().decodeIntegerValue(); assertTrue(dec2.getIntegerValueType() == IntegerValueType.INT); assertTrue(dec2.intValue() == 0); } public void testInteger1() throws IOException { // Bit EncoderChannel bitEC = getBitEncoder(); bitEC.encodeInteger(1); bitEC.flush(); IntegerValue dec1 = getBitDecoder().decodeIntegerValue(); assertTrue(dec1.getIntegerValueType() == IntegerValueType.INT); assertTrue(dec1.intValue() == 1); // Byte getByteEncoder().encodeInteger(1); IntegerValue dec2 = getByteDecoder().decodeIntegerValue(); assertTrue(dec2.getIntegerValueType() == IntegerValueType.INT); assertTrue(dec2.intValue() == 1); } public void testIntegerMaxNegativeInteger() throws IOException { // Bit EncoderChannel bitEC = getBitEncoder(); bitEC.encodeInteger(Integer.MIN_VALUE); bitEC.flush(); IntegerValue dec1 = getBitDecoder().decodeIntegerValue(); assertTrue(dec1.getValueType() == ValueType.INTEGER); assertTrue(dec1.intValue() == Integer.MIN_VALUE); // Byte getByteEncoder().encodeInteger(Integer.MIN_VALUE); IntegerValue dec2 = getByteDecoder().decodeIntegerValue(); assertTrue(dec2.getValueType() == ValueType.INTEGER); assertTrue(dec2.intValue() == Integer.MIN_VALUE); } public void testInteger0S() throws IOException { String s = "0"; int xmlInteger = Integer.parseInt(s); // Bit EncoderChannel bitEC = getBitEncoder(); bitEC.encodeInteger(xmlInteger); bitEC.flush(); assertTrue(s.equals(getBitDecoder().decodeIntegerValue().toString())); // Byte getByteEncoder().encodeInteger(xmlInteger); assertTrue(s.equals(getByteDecoder().decodeIntegerValue().toString())); } public void testInteger1S() throws IOException { String s = "1"; int xmlInteger = Integer.parseInt(s); // Bit EncoderChannel bitEC = getBitEncoder(); bitEC.encodeInteger(xmlInteger); bitEC.flush(); assertTrue(s.equals(getBitDecoder().decodeIntegerValue().toString())); // Byte getByteEncoder().encodeInteger(xmlInteger); assertTrue(s.equals(getByteDecoder().decodeIntegerValue().toString())); } public void testIntegerM128S() throws IOException { String s = "-128"; int xmlInteger = Integer.parseInt(s); // Bit EncoderChannel bitEC = getBitEncoder(); bitEC.encodeInteger(xmlInteger); bitEC.flush(); assertTrue(s.equals(getBitDecoder().decodeIntegerValue().toString())); // Byte getByteEncoder().encodeInteger(xmlInteger); assertTrue(s.equals(getByteDecoder().decodeIntegerValue().toString())); } // public void testIntegerSpace35S() throws IOException // { // String s = "35 "; // String sDec = "35"; // // XMLInteger xmlInteger = XMLInteger.newInstance(); // xmlInteger.parse ( s ); // // // Bit // EncoderChannel bitEC = getBitEncoder(); // bitEC.encodeInteger( xmlInteger ); // bitEC.flush(); // assertTrue(getBitDecoder().decodeIntegerAsString().equals( sDec ) ); // // Byte // getByteEncoder().encodeInteger( xmlInteger ); // assertTrue(getByteDecoder().decodeIntegerAsString().equals( sDec ) ); // } public void testIntegerLong1() throws IOException { String s = "12131321321"; // long xmlInteger = Long.parseLong(s); IntegerValue iv = IntegerValue.parse(s); // Bit EncoderChannel bitEC = getBitEncoder(); bitEC.encodeIntegerValue(iv); bitEC.flush(); assertTrue(s.equals(getBitDecoder().decodeIntegerValue().toString())); // Byte getByteEncoder().encodeIntegerValue(iv); assertTrue(s.equals(getByteDecoder().decodeIntegerValue().toString())); } public void testIntegerBig2() throws IOException { String s = "2137000000000000000000000000001"; // BigInteger xmlInteger = new BigInteger(s); IntegerValue xmlInteger = IntegerValue.parse(s); // Bit EncoderChannel bitEC = getBitEncoder(); bitEC.encodeIntegerValue(xmlInteger); bitEC.flush(); assertTrue(s.equals(getBitDecoder().decodeIntegerValue().toString())); // Byte getByteEncoder().encodeIntegerValue(xmlInteger); assertTrue(s.equals(getByteDecoder().decodeIntegerValue().toString())); } public void testIntegerBig1() throws IOException { String s = "12678967543233"; // BigInteger xmlInteger = new BigInteger(s); IntegerValue xmlInteger = IntegerValue.parse(s); // Bit EncoderChannel bitEC = getBitEncoder(); bitEC.encodeIntegerValue(xmlInteger); bitEC.flush(); assertTrue(s.equals(getBitDecoder().decodeIntegerValue().toString())); // Byte getByteEncoder().encodeIntegerValue(xmlInteger); assertTrue(s.equals(getByteDecoder().decodeIntegerValue().toString())); } public void testIntegerBig3() throws IOException { String s = "-5153135115135135135135153153135135153"; // BigInteger xmlInteger = new BigInteger(s); IntegerValue xmlInteger = IntegerValue.parse(s); // Bit EncoderChannel bitEC = getBitEncoder(); bitEC.encodeIntegerValue(xmlInteger); bitEC.flush(); assertTrue(s.equals(getBitDecoder().decodeIntegerValue().toString())); // Byte getByteEncoder().encodeIntegerValue(xmlInteger); assertTrue(s.equals(getByteDecoder().decodeIntegerValue().toString())); } public void testIntegerSequence() throws IOException { // Bit / Byte EncoderChannel ecBit = getBitEncoder(); EncoderChannel ecByte = getByteEncoder(); for (int i = 0; i < 100000; i++) { ecBit.encodeInteger(i); ecByte.encodeInteger(i); } DecoderChannel dcBit = getBitDecoder(); DecoderChannel dcByte = getByteDecoder(); for (int i = 0; i < 100000; i++) { IntegerValue dec1 = dcBit.decodeIntegerValue(); assertTrue(dec1.getIntegerValueType() == IntegerValueType.INT); assertEquals(dec1.intValue(), i); IntegerValue dec2 = dcByte.decodeIntegerValue(); assertTrue(dec2.getIntegerValueType() == IntegerValueType.INT); assertEquals(dec2.intValue(), i); } } }
/* * Copyright 2012 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.apps.iosched.ui.phone; import com.google.analytics.tracking.android.EasyTracker; import com.google.android.apps.iosched.provider.ScheduleContract; import com.google.android.apps.iosched.ui.BaseActivity; import com.google.android.apps.iosched.ui.SessionsFragment; import com.google.android.apps.iosched.ui.SocialStreamFragment; import com.google.android.apps.iosched.ui.TrackInfoHelperFragment; import com.google.android.apps.iosched.ui.VendorsFragment; import com.google.android.apps.iosched.util.UIUtils; import com.actionbarsherlock.app.ActionBar; import com.actionbarsherlock.view.Menu; import com.actionbarsherlock.view.MenuItem; import android.content.Intent; import android.net.Uri; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentPagerAdapter; import android.support.v4.app.FragmentTransaction; import android.support.v4.view.ViewPager; import at.devfest.Setup; import at.devfest.app.R; import static com.google.android.apps.iosched.util.LogUtils.LOGD; /** * A single-pane activity that shows a {@link SessionsFragment} in one tab and a * {@link VendorsFragment} in another tab, representing the sessions and developer sandbox companies * for the given conference track (Android, Chrome, etc.). */ public class TrackDetailActivity extends BaseActivity implements ActionBar.TabListener, ViewPager.OnPageChangeListener, SessionsFragment.Callbacks, VendorsFragment.Callbacks, TrackInfoHelperFragment.Callbacks { private ViewPager mViewPager; private String mTrackId; private Uri mTrackUri; private boolean mShowVendors = true; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_track_detail); mTrackUri = getIntent().getData(); mTrackId = ScheduleContract.Tracks.getTrackId(mTrackUri); mViewPager = (ViewPager) findViewById(R.id.pager); mViewPager.setAdapter(new TrackDetailPagerAdapter(getSupportFragmentManager())); mViewPager.setOnPageChangeListener(this); mViewPager.setPageMarginDrawable(R.drawable.grey_border_inset_lr); mViewPager.setPageMargin(getResources().getDimensionPixelSize(R.dimen.page_margin_width)); mShowVendors = !ScheduleContract.Tracks.CODELABS_TRACK_ID.equals(mTrackId) && !ScheduleContract.Tracks.TECH_TALK_TRACK_ID.equals(mTrackId); if (mShowVendors) { final ActionBar actionBar = getSupportActionBar(); actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_TABS); actionBar.addTab(actionBar.newTab() .setText(R.string.title_sessions) .setTabListener(this)); actionBar.addTab(actionBar.newTab() .setText(R.string.title_vendors) .setTabListener(this)); } if (savedInstanceState == null) { getSupportFragmentManager().beginTransaction() .add(TrackInfoHelperFragment.newFromTrackUri(mTrackUri), "track_info") .commit(); } } @Override public void onTabSelected(ActionBar.Tab tab, FragmentTransaction fragmentTransaction) { mViewPager.setCurrentItem(tab.getPosition()); } @Override public void onTabUnselected(ActionBar.Tab tab, FragmentTransaction fragmentTransaction) { } @Override public void onTabReselected(ActionBar.Tab tab, FragmentTransaction fragmentTransaction) { } @Override public void onPageScrolled(int i, float v, int i1) { } @Override public void onPageSelected(int position) { getSupportActionBar().setSelectedNavigationItem(position); int titleId = -1; switch (position) { case 0: titleId = R.string.title_sessions; break; case 1: titleId = R.string.title_vendors; break; } String title = getString(titleId); EasyTracker.getTracker().trackView(title + ": " + getTitle()); LOGD("Tracker", title + ": " + getTitle()); } @Override public void onPageScrollStateChanged(int i) { } private class TrackDetailPagerAdapter extends FragmentPagerAdapter { public TrackDetailPagerAdapter(FragmentManager fm) { super(fm); } @Override public Fragment getItem(int position) { boolean allTracks = (ScheduleContract.Tracks.ALL_TRACK_ID.equals(mTrackId)); if (position == 0) { Fragment fragment = new SessionsFragment(); fragment.setArguments(BaseActivity.intentToFragmentArguments(new Intent( Intent.ACTION_VIEW, allTracks ? ScheduleContract.Sessions.CONTENT_URI : ScheduleContract.Tracks.buildSessionsUri(mTrackId)))); return fragment; } else { Fragment fragment = new VendorsFragment(); fragment.setArguments(BaseActivity.intentToFragmentArguments(new Intent( Intent.ACTION_VIEW, allTracks ? ScheduleContract.Vendors.CONTENT_URI : ScheduleContract.Tracks.buildVendorsUri(mTrackId)))); return fragment; } } @Override public int getCount() { return mShowVendors ? 2 : 1; } } @Override public boolean onCreateOptionsMenu(Menu menu) { super.onCreateOptionsMenu(menu); getSupportMenuInflater().inflate(R.menu.track_detail, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.menu_social_stream: Intent intent = new Intent(this, Setup.SocialStreamActivityClass); intent.putExtra(SocialStreamFragment.EXTRA_QUERY, UIUtils.getSessionHashtagsString(mTrackId)); startActivity(intent); break; } return super.onOptionsItemSelected(item); } @Override public void onTrackInfoAvailable(String trackId, String trackName, int trackColor) { setTitle(trackName); setActionBarColor(trackColor); EasyTracker.getTracker().trackView(getString(R.string.title_sessions) + ": " + getTitle()); LOGD("Tracker", getString(R.string.title_sessions) + ": " + getTitle()); } @Override public boolean onSessionSelected(String sessionId) { startActivity(new Intent(Intent.ACTION_VIEW, ScheduleContract.Sessions.buildSessionUri(sessionId))); return false; } @Override public boolean onVendorSelected(String vendorId) { startActivity(new Intent(Intent.ACTION_VIEW, ScheduleContract.Vendors.buildVendorUri(vendorId))); return false; } }
/* * Copyright 2005 MH-Software-Entwicklung. All rights reserved. * Use is subject to license terms. */ package com.jtattoo.plaf.graphite; import java.awt.*; import javax.swing.*; import javax.swing.border.*; import javax.swing.plaf.*; import com.jtattoo.plaf.*; import java.awt.geom.AffineTransform; import java.awt.geom.Rectangle2D; import java.awt.image.BufferedImage; /** * @author Michael Hagen */ public class GraphiteBorders extends BaseBorders { private static Border buttonBorder; private static Border toggleButtonBorder; private static Border rolloverToolButtonBorder; private static Border menuItemBorder = null; private static Border popupMenuBorder = null; private static Border internalFrameBorder; //------------------------------------------------------------------------------------ // Lazy access methods //------------------------------------------------------------------------------------ public static Border getButtonBorder() { if (buttonBorder == null) buttonBorder = new ButtonBorder(); return buttonBorder; } public static Border getToggleButtonBorder() { if (toggleButtonBorder == null) toggleButtonBorder = new ToggleButtonBorder(); return toggleButtonBorder; } public static Border getRolloverToolButtonBorder() { if (rolloverToolButtonBorder == null) rolloverToolButtonBorder = new RolloverToolButtonBorder(); return rolloverToolButtonBorder; } public static Border getMenuItemBorder() { if (menuItemBorder == null) { menuItemBorder = new MenuItemBorder(); } return menuItemBorder; } public static Border getPopupMenuBorder() { if (popupMenuBorder == null) { if (AbstractLookAndFeel.getTheme().isMenuOpaque()) { popupMenuBorder = new PopupMenuBorder(); } else { popupMenuBorder = new PopupMenuShadowBorder(); } } return popupMenuBorder; } public static Border getInternalFrameBorder() { if (internalFrameBorder == null) internalFrameBorder = new InternalFrameBorder(); return internalFrameBorder; } //------------------------------------------------------------------------------------ // Inner classes //------------------------------------------------------------------------------------ public static class ButtonBorder implements Border, UIResource { private static final Color defaultColorHi = new Color(220, 230, 245); private static final Color defaultColorMed = new Color(212, 224, 243); private static final Color defaultColorLo = new Color(200, 215, 240); private static final Insets insets = new Insets(3, 8, 3, 8); public void paintBorder(Component c, Graphics g, int x, int y, int w, int h) { Graphics2D g2D = (Graphics2D) g; AbstractButton b = (AbstractButton)c; Color frameColor = ColorHelper.brighter(AbstractLookAndFeel.getTheme().getFrameColor(), 30); if (AbstractLookAndFeel.getTheme().doShowFocusFrame() && b.hasFocus()) { frameColor = AbstractLookAndFeel.getTheme().getFocusFrameColor(); } Object savedRederingHint = g2D.getRenderingHint(RenderingHints.KEY_ANTIALIASING); g2D.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); Composite composite = g2D.getComposite(); AlphaComposite alpha = AlphaComposite.getInstance(AlphaComposite.SRC_OVER, 0.5f); g2D.setComposite(alpha); g2D.setColor(Color.white); g2D.drawRoundRect(x, y, w - 1, h - 1, 6, 6); g2D.setComposite(composite); if (b.getRootPane() != null && b.equals(b.getRootPane().getDefaultButton()) && !b.hasFocus()) { g2D.setColor(ColorHelper.darker(frameColor, 20)); g2D.drawRoundRect(x, y, w - 1, h - 2, 6, 6); if (!b.getModel().isRollover()) { g2D.setColor(defaultColorHi); g2D.drawRoundRect(x + 1, y + 1, w - 3, h - 4, 6, 6); g2D.setColor(defaultColorMed); g2D.drawRoundRect(x + 2, y + 2, w - 5, h - 6, 6, 6); g2D.setColor(defaultColorLo); g2D.drawLine(x + 3, h - 3, w - 3, h - 3); g2D.drawLine(w - 2, y + 4, w - 2, h - 4); } } else { g2D.setColor(frameColor); g2D.drawRoundRect(x, y, w - 1, h - 2, 6, 6); } g2D.setRenderingHint(RenderingHints.KEY_ANTIALIASING, savedRederingHint); } public Insets getBorderInsets(Component c) { return new Insets(insets.top, insets.left, insets.bottom, insets.right); } public Insets getBorderInsets(Component c, Insets borderInsets) { borderInsets.left = insets.left; borderInsets.top = insets.top; borderInsets.right = insets.right; borderInsets.bottom = insets.bottom; return borderInsets; } public boolean isBorderOpaque() { return true; } } // class ButtonBorder public static class ToggleButtonBorder implements Border, UIResource { private static final Insets insets = new Insets(3, 6, 4, 6); public void paintBorder(Component c, Graphics g, int x, int y, int w, int h) { Graphics2D g2D = (Graphics2D) g; Color frameColor = ColorHelper.brighter(AbstractLookAndFeel.getTheme().getFrameColor(), 30); Object savedRederingHint = g2D.getRenderingHint(RenderingHints.KEY_ANTIALIASING); g2D.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); g2D.setColor(Color.white); g2D.drawRoundRect(x, y, w - 1, h - 1, 4, 4); g2D.setColor(frameColor); g2D.drawRoundRect(x, y, w - 1, h - 2, 4, 4); g2D.setRenderingHint(RenderingHints.KEY_ANTIALIASING, savedRederingHint); } public Insets getBorderInsets(Component c) { return new Insets(insets.top, insets.left, insets.bottom, insets.right); } public Insets getBorderInsets(Component c, Insets borderInsets) { borderInsets.left = insets.left; borderInsets.top = insets.top; borderInsets.right = insets.right; borderInsets.bottom = insets.bottom; return borderInsets; } public boolean isBorderOpaque() { return true; } } // class ToggleButtonBorder public static class RolloverToolButtonBorder implements Border, UIResource { private static final Insets insets = new Insets(1, 1, 1, 1); public void paintBorder(Component c, Graphics g, int x, int y, int w, int h) { AbstractButton button = (AbstractButton)c; ButtonModel model = button.getModel(); Color loColor = ColorHelper.brighter(AbstractLookAndFeel.getFrameColor(), 50); if (model.isEnabled()) { if ((model.isPressed() && model.isArmed()) || model.isSelected()) { Graphics2D g2D = (Graphics2D)g; Composite composite = g2D.getComposite(); g.setColor(loColor); g.drawRect(x, y, w - 1, h - 1); AlphaComposite alpha = AlphaComposite.getInstance(AlphaComposite.SRC_OVER, 0.15f); g2D.setComposite(alpha); g.setColor(Color.black); g.fillRect(x + 1, y + 1, w - 2, h - 2); g2D.setComposite(composite); } else if (model.isRollover()) { Graphics2D g2D = (Graphics2D)g; Composite composite = g2D.getComposite(); g.setColor(loColor); g.drawRect(x, y, w - 1, h - 1); AlphaComposite alpha = AlphaComposite.getInstance(AlphaComposite.SRC_OVER, 0.4f); g2D.setComposite(alpha); g.setColor(Color.white); g.fillRect(x + 1, y + 1, w - 2, h - 2); g2D.setComposite(composite); } } } public Insets getBorderInsets(Component c) { return new Insets(insets.top, insets.left, insets.bottom, insets.right); } public Insets getBorderInsets(Component c, Insets borderInsets) { borderInsets.left = insets.left; borderInsets.top = insets.top; borderInsets.right = insets.right; borderInsets.bottom = insets.bottom; return borderInsets; } public boolean isBorderOpaque() { return true; } } // class RolloverToolButtonBorder public static class MenuItemBorder extends AbstractBorder implements UIResource { private static final Insets insets = new Insets(2, 2, 2, 2); public void paintBorder(Component c, Graphics g, int x, int y, int w, int h) { JMenuItem b = (JMenuItem) c; ButtonModel model = b.getModel(); Color borderColor = ColorHelper.darker(AbstractLookAndFeel.getMenuSelectionBackgroundColor(), 20); g.setColor(borderColor); if (c.getParent() instanceof JMenuBar) { if (model.isArmed() || model.isSelected()) { g.drawLine(x, y, x + w - 1, y); g.drawLine(x, y + 1, x, y + h - 1); g.drawLine(x + w - 1, y + 1, x + w - 1, y + h - 1); } } else { if (model.isArmed() || (c instanceof JMenu && model.isSelected())) { g.drawLine(x, y, x + w - 1, y); g.drawLine(x, y + h - 1, x + w - 1, y + h - 1); } } } public Insets getBorderInsets(Component c) { return new Insets(insets.top, insets.left, insets.bottom, insets.right); } public Insets getBorderInsets(Component c, Insets borderInsets) { borderInsets.left = insets.left; borderInsets.top = insets.top; borderInsets.right = insets.right; borderInsets.bottom = insets.bottom; return borderInsets; } } // class MenuItemBorder public static class PopupMenuBorder extends AbstractBorder implements UIResource { protected static final Font logoFont = new Font("Dialog", Font.BOLD, 12); protected Insets logoInsets = new Insets(2, 18, 1, 1); protected Insets insets = new Insets(2, 1, 1, 1); public boolean hasLogo() { return ((AbstractLookAndFeel.getTheme().getLogoString() != null) && (AbstractLookAndFeel.getTheme().getLogoString().length() > 0)); } public void paintLogo(Graphics2D g2D, int w, int h) { BufferedImage image = new BufferedImage(w, h, BufferedImage.TYPE_INT_ARGB); Graphics2D imageGraphics = image.createGraphics(); Color logoColorHi = AbstractLookAndFeel.getTheme().getMenuSelectionBackgroundColorDark(); Color logoColorLo = AbstractLookAndFeel.getTheme().getMenuSelectionBackgroundColor(); Color colors[] = ColorHelper.createColorArr(logoColorHi, logoColorLo, 32); JTattooUtilities.fillHorGradient(imageGraphics, colors, 0, 0, w, h); imageGraphics.setFont(logoFont); FontMetrics fm = imageGraphics.getFontMetrics(); AffineTransform at = new AffineTransform(); at.setToRotation(Math.PI + (Math.PI / 2)); imageGraphics.setTransform(at); int xs = -h + 4; int ys = fm.getAscent() + 2; imageGraphics.setColor(ColorHelper.darker(logoColorLo, 20)); imageGraphics.drawString(JTattooUtilities.getClippedText(AbstractLookAndFeel.getTheme().getLogoString(), fm, h - 16), xs - 1, ys + 1); imageGraphics.setColor(Color.white); imageGraphics.drawString(JTattooUtilities.getClippedText(AbstractLookAndFeel.getTheme().getLogoString(), fm, h - 16), xs, ys); Rectangle2D r2D = new Rectangle2D.Double(0, 0, w, h); TexturePaint texturePaint = new TexturePaint(image, r2D); g2D.setPaint(texturePaint); g2D.fillRect(0, 0, w, h); } public void paintBorder(Component c, Graphics g, int x, int y, int w, int h) { int dx = getBorderInsets(c).left - 1; Color logoColor = AbstractLookAndFeel.getMenuSelectionBackgroundColor(); Color borderColor = ColorHelper.darker(AbstractLookAndFeel.getMenuSelectionBackgroundColor(), 20); g.setColor(logoColor); g.fillRect(x, y, dx, h); if (hasLogo()) { paintLogo((Graphics2D) g, dx, h); } g.setColor(borderColor); boolean menuBarPopup = false; JPopupMenu pm = (JPopupMenu)c; if (pm.getInvoker() != null) { menuBarPopup = (pm.getInvoker().getParent() instanceof JMenuBar); } if (menuBarPopup) g.drawLine(x + dx, y, x + w, y); else g.drawLine(x, y, x + w, y); g.drawLine(x, y, x, y + h); g.drawLine(x + w - 1, y, x + w - 1, y + h); g.drawLine(x, y + h - 1, x + w, y + h - 1); g.drawLine(x + dx, y, x + dx, y + h); } public Insets getBorderInsets(Component c) { if (hasLogo()) { return new Insets(logoInsets.top, logoInsets.left, logoInsets.bottom, logoInsets.right); } else { return new Insets(insets.top, insets.left, insets.bottom, insets.right); } } public Insets getBorderInsets(Component c, Insets borderInsets) { Insets ins = getBorderInsets(c); borderInsets.left = ins.left; borderInsets.top = ins.top; borderInsets.right = ins.right; borderInsets.bottom = ins.bottom; return borderInsets; } } // class PopupMenuBorder public static class InternalFrameBorder extends BaseInternalFrameBorder { public InternalFrameBorder() { } public Insets getBorderInsets(Component c) { return new Insets(insets.top, insets.left, insets.bottom, insets.right); } public Insets getBorderInsets(Component c, Insets borderInsets) { borderInsets.left = insets.left; borderInsets.top = insets.top; borderInsets.right = insets.right; borderInsets.bottom = insets.bottom; return borderInsets; } public void paintBorder(Component c, Graphics g, int x, int y, int w, int h) { int th = getTitleHeight(c); Color titleColor = AbstractLookAndFeel.getWindowTitleColorLight(); Color borderColor = AbstractLookAndFeel.getWindowTitleColorDark(); Color frameColor = AbstractLookAndFeel.getWindowBorderColor(); if (!isActive(c)) { titleColor = AbstractLookAndFeel.getWindowInactiveTitleColorLight(); borderColor = AbstractLookAndFeel.getWindowInactiveTitleColorDark(); frameColor = AbstractLookAndFeel.getWindowInactiveBorderColor(); } g.setColor(titleColor); g.fillRect(x, y + 1, w, insets.top - 1); g.setColor(titleColor); g.fillRect(x + 1, y + h - dw, w - 2, dw - 1); if (isActive(c)) { JTattooUtilities.fillHorGradient(g, AbstractLookAndFeel.getTheme().getWindowTitleColors(), 1, insets.top, dw, th + 1); JTattooUtilities.fillHorGradient(g, AbstractLookAndFeel.getTheme().getWindowTitleColors(), w - dw - 1, insets.top, dw, th + 1); g.setColor(borderColor); JTattooUtilities.fillInverseHorGradient(g, AbstractLookAndFeel.getTheme().getWindowTitleColors(), 1, insets.top + th + 1, dw - 1, h - th - dw); JTattooUtilities.fillInverseHorGradient(g, AbstractLookAndFeel.getTheme().getWindowTitleColors(), w - dw, insets.top + th + 1, dw - 1, h - th - dw); } else { JTattooUtilities.fillHorGradient(g, AbstractLookAndFeel.getTheme().getWindowInactiveTitleColors(), 1, insets.top, dw, th + 1); JTattooUtilities.fillHorGradient(g, AbstractLookAndFeel.getTheme().getWindowInactiveTitleColors(), w - dw - 1, insets.top, dw, th + 1); g.setColor(borderColor); JTattooUtilities.fillInverseHorGradient(g, AbstractLookAndFeel.getTheme().getWindowInactiveTitleColors(), 1, insets.top + th + 1, dw - 1, h - th - dw); JTattooUtilities.fillInverseHorGradient(g, AbstractLookAndFeel.getTheme().getWindowInactiveTitleColors(), w - dw, insets.top + th + 1, dw - 1, h - th - dw); } g.setColor(frameColor); g.drawRect(x, y, w - 1, h - 1); g.drawLine(x + dw - 1, y + insets.top + th, x + dw - 1, y + h - dw); g.drawLine(x + w - dw, y + insets.top + th, x + w - dw, y + h - dw); g.drawLine(x + dw - 1, y + h - dw, x + w - dw, y + h - dw); } } // class InternalFrameBorder } // class GraphiteBorders
package jama2; import java.io.Serializable; import static jama2.util.Maths.eps; import static jama2.util.Maths.hypot; import static java.lang.Math.max; import static java.lang.Math.min; import static java.lang.Math.abs; import static java.lang.Math.sqrt; /** * Singular Value Decomposition. * <P> * For an m-by-n matrix A with m &gt;= n, the singular value decomposition is an * m-by-n orthogonal matrix U, an n-by-n diagonal matrix S, and an n-by-n * orthogonal matrix V so that A = U*S*V'. * </P> * <P> * The singular values, sigma[k] = S[k][k], are ordered so that sigma[0] &gt;= * sigma[1] &gt;= ... &gt;= sigma[n-1]. * </P> * <P> * The singular value decompostion always exists, so the constructor will never * fail. The matrix condition number and the effective numerical rank can be * computed from this decomposition. * </P> * * @author The MathWorks, Inc. and the National Institute of Standards and * Technology. * @version 2.0 * @see <a href="http://tweimer.github.io/java-matrix/">java-matrix</a> */ public class SingularValueDecomposition implements Serializable { /** * For the Serializeable interface */ private static final long serialVersionUID = 1; final static double tiny = Math.pow(2.0, -966.0); /** * Arrays for internal storage of U and V. * * @serial internal storage of U. * @serial internal storage of V. */ private final double[][] U, V; /** * Array for internal storage of singular values. * * @serial internal storage of singular values. */ private final double[] s; /** * Row and column dimensions. * * @serial row dimension. * @serial column dimension. */ private final int m, n; /** * Construct the singular value decomposition Structure to access U, S and V. * * <p> * This is a package-private constructor. Use {@link Matrix#svd()} to create a * cholesky decomposition of a given matrix. * </p> * * @param Arg Rectangular matrix * @see Matrix#svd() */ SingularValueDecomposition(final Matrix Arg) { // Derived from LINPACK code. // Initialize. final var A = Arg.getArrayCopy(); m = Arg.getRowDimension(); n = Arg.getColumnDimension(); final var nu = min(m, n); /* * Apparently the failing cases are only a proper subset of (m<n), so let's not * throw error. Correct fix to come later? */ // if (m<n) // { // throw new IllegalArgumentException("Jama SVD only works for m >= n"); // } s = new double[min(m + 1, n)]; U = new double[m][nu]; V = new double[n][n]; final var e = new double[n]; final var work = new double[m]; final var wantu = true; final var wantv = true; // Reduce A to bidiagonal form, storing the diagonal elements // in s and the super-diagonal elements in e. final var nct = min(m - 1, n); final var nrt = max(0, min(n - 2, m)); for (var k = 0; k < max(nct, nrt); k++) { if (k < nct) { // Compute 2-norm of k-th column without under/overflow. s[k] = 0.0; for (var i = k; i < m; i++) { s[k] = hypot(s[k], A[i][k]); } // Compute the transformation for the k-th column if (s[k] != 0.0) { if (A[k][k] < 0.0) { s[k] = -s[k]; } for (int i = k; i < m; i++) { A[i][k] /= s[k]; } A[k][k]++; } // place the k-th diagonal in s[k]. s[k] = -s[k]; } for (int j = k + 1; j < n; j++) { if (k < nct && s[k] != 0.0) { // Apply the transformation. var t = 0.0; for (var i = k; i < m; i++) { t += A[i][k] * A[i][j]; } t /= -A[k][k]; for (var i = k; i < m; i++) { A[i][j] += t * A[i][k]; } } // Place the k-th row of A into e for the // subsequent calculation of the row transformation. e[j] = A[k][j]; } if (wantu && k < nct) { // Place the transformation in U for subsequent back multiplication. for (var i = k; i < m; i++) { U[i][k] = A[i][k]; } } if (k < nrt) { // Compute the k-th row transformation and place the // k-th super-diagonal in e[k]. // Compute 2-norm without under/overflow. e[k] = 0.0; for (var i = k + 1; i < n; i++) { e[k] = hypot(e[k], e[i]); } if (e[k] != 0.0) { if (e[k + 1] < 0.0) { e[k] = -e[k]; } for (var i = k + 1; i < n; i++) { e[i] /= e[k]; } e[k + 1]++; } e[k] = -e[k]; if (k + 1 < m && e[k] != 0.0) { // Apply the transformation. for (var i = k + 1; i < m; i++) { work[i] = 0.0; } for (var j = k + 1; j < n; j++) { for (var i = k + 1; i < m; i++) { work[i] += e[j] * A[i][j]; } } for (var j = k + 1; j < n; j++) { final var t = -e[j] / e[k + 1]; for (var i = k + 1; i < m; i++) { A[i][j] += t * work[i]; } } } if (wantv) { // Place the transformation in V for subsequent back multiplication. for (var i = k + 1; i < n; i++) { V[i][k] = e[i]; } } } } // Set up the final bidiagonal matrix or order p. var p = min(n, m + 1); if (nct < n) { s[nct] = A[nct][nct]; } if (m < p) { s[p - 1] = 0.0; } if (nrt + 1 < p) { e[nrt] = A[nrt][p - 1]; } e[p - 1] = 0.0; // If required, generate U. if (wantu) { for (var j = nct; j < nu; j++) { for (final var rowU : U) { rowU[j] = 0.0; } U[j][j] = 1.0; } for (int k = nct - 1; k >= 0; k--) { if (s[k] != 0.0) { for (int j = k + 1; j < nu; j++) { var t = 0.0; for (var i = k; i < m; i++) { t += U[i][k] * U[i][j]; } t /= -U[k][k]; for (var i = k; i < m; i++) { U[i][j] += t * U[i][k]; } } for (var i = k; i < m; i++) { U[i][k] = -U[i][k]; } U[k][k]++; for (var i = 0; i < k - 1; i++) { U[i][k] = 0.0; } } else { for (final var rowU : U) { rowU[k] = 0.0; } U[k][k] = 1.0; } } } // If required, generate V. if (wantv) { for (int k = n - 1; k >= 0; k--) { if (k < nrt && e[k] != 0.0) { for (int j = k + 1; j < nu; j++) { var t = 0.0; for (var i = k + 1; i < n; i++) { t += V[i][k] * V[i][j]; } t /= -V[k + 1][k]; for (var i = k + 1; i < n; i++) { V[i][j] += t * V[i][k]; } } } for (final double[] rowV : V) { rowV[k] = 0.0; } V[k][k] = 1.0; } } // Main iteration loop for the singular values. final var pp = p - 1; while (p > 0) { var k = p - 2; while (k >= -1) { if (k == -1) { break; } if (abs(e[k]) <= tiny + eps * (abs(s[k]) + abs(s[k + 1]))) { e[k] = 0.0; break; } k--; } // Here is where a test for too many iterations would go. // // This section of the program inspects for negligible elements in // the s and e arrays. // // On completion the variables kase and k are set as follows. // // kase = 1 if s(p) and e[k-1] are negligible and k<p // kase = 2 if s(k) is negligible and k<p // kase = 3 if e[k-1] is negligible, k<p, and s(k), ..., s(p) are // not negligible (qr step). // kase = 4 if e(p-1) is negligible (convergence). final byte kase; if (k == p - 2) { // e(p-1) is negligible (convergence). kase = 4; } else { var ks = p - 1; while (ks >= k) { if (ks == k) { break; } final var t = (ks != p ? abs(e[ks]) : 0.0) + (ks != k + 1 ? abs(e[ks - 1]) : 0.0); if (abs(s[ks]) <= tiny + eps * t) { s[ks] = 0.0; break; } ks--; } if (ks == k) { // e[k-1] is negligible, k<p, and s(k), ..., s(p) are not // negligible (qr step). kase = 3; } else if (ks == p - 1) { // s(p) and e[k-1] are negligible and k<p kase = 1; } else { // s(k) is negligible and k<p kase = 2; k = ks; } } k++; // Perform the task indicated by kase. switch (kase) { // Deflate negligible s(p). case 1: { // Remember e[p - 2] in f and reset it var f = e[p - 2]; e[p - 2] = 0.0; for (var j = p - 2; j >= k; j--) { final var h = hypot(s[j], f); final var cs = s[j] / h; final var sn = f / h; s[j] = h; if (j != k) { f = -sn * e[j - 1]; e[j - 1] *= cs; } if (wantv) { for (var i = 0; i < n; i++) { final var t = cs*V[i][j] + sn*V[i][p-1]; V[i][p-1] = -sn*V[i][j] + cs*V[i][p-1]; V[i][j] = t; } } } } break; // Split at negligible s(k). case 2: { // Remember e[k - 1] in f and reset it double f = e[k - 1]; e[k - 1] = 0.0; for (int j = k; j < p; j++) { var t = hypot(s[j], f); final double cs = s[j] / t, sn = f / t; s[j] = t; f = -sn * e[j]; e[j] *= cs; if (wantu) { for (final var rowU : U) { // Update U[i][k - 1] and U[i][j] t = rowU[k - 1]; // remember U[i][k - 1] rowU[k - 1] = cs * t - sn * rowU[j]; rowU[j] = cs * rowU[j] + sn * t; } } } } break; // Perform one qr step. case 3: { final var scale = max( max(max(max(abs(s[p - 1]), abs(s[p - 2])), abs(e[p - 2])), abs(s[k])), abs(e[k])); final var sp = s[p - 1] / scale; final var spm1 = s[p - 2] / scale; final var epm1 = e[p - 2] / scale; final var sk = s[k] / scale; final var ek = e[k] / scale; final var b = ((spm1 + sp) * (spm1 - sp) + epm1 * epm1) / 2.0; final var c = (sp * epm1) * (sp * epm1); // Calculate the shift. double shift; if (b != 0.0 || c != 0.0) { shift = sqrt(b * b + c); if (b < 0.0) { shift = -shift; } shift = c / (b + shift); } else { shift = 0.0; } // Chase zeros. var f = (sk + sp) * (sk - sp) + shift; var g = sk * ek; for (var j = k; j < p - 1; j++) { var h = hypot(f, g); var cs = f / h; var sn = g / h; if (j != k) { e[j - 1] = h; } f = cs * s[j] + sn * e[j]; e[j] = cs * e[j] - sn * s[j]; g = sn * s[j + 1]; s[j + 1] *= cs; if (wantv) { for (var i = 0; i < n; i++) { final var t = cs*V[i][j] + sn*V[i][j+1]; V[i][j+1] = -sn*V[i][j] + cs*V[i][j+1]; V[i][j] = t; } } h = hypot(f, g); cs = f / h; sn = g / h; s[j] = h; f = cs * e[j] + sn * s[j + 1]; s[j + 1] = cs * s[j + 1] - sn * e[j]; g = sn * e[j + 1]; e[j + 1] *= cs; if (wantu && j < m - 1) { for (var i = 0; i < m; i++) { final var t = cs*U[i][j] + sn*U[i][j+1]; U[i][j+1] = -sn*U[i][j] + cs*U[i][j+1]; U[i][j] = t; } } } e[p - 2] = f; } break; // Convergence. case 4: { // Make the singular values positive. if (s[k] <= 0.0) { s[k] = -s[k]; if (wantv) { for (var i = 0; i <= pp; i++) { V[i][k] = -V[i][k]; } } } // Order the singular values. while (k < pp) { if (s[k] < s[k + 1]) { // swap s[k] and s[k + 1] double t = s[k]; s[k] = s[k + 1]; s[k + 1] = t; if (wantv && k < n - 1) { for (final double[] rowV : V) { // swap rowV[k + 1] and rowV[k] t = rowV[k + 1]; rowV[k + 1] = rowV[k]; rowV[k] = t; } } if (wantu && k < m - 1) { for (final double[] rowU : U) { // swap rowU[k + 1] and rowU[k] t = rowU[k + 1]; rowU[k + 1] = rowU[k]; rowU[k] = t; } } k++; } else { break; } } p--; } break; } } } /** * Two norm condition number. * * @return max(S)/min(S) */ public double cond() { return s[0] / s[min(m, n) - 1]; } /** * Return the diagonal matrix of singular values. * * @return S */ public Matrix getS() { return Matrix.diag(s); } /** * Return the one-dimensional array of singular values. * * @return diagonal of S. */ public double[] getSingularValues() { return s; } /** * Return the left singular vectors. * * @return U */ public Matrix getU() { return new Matrix(m, min(m, n), U); } /** * Return the right singular vectors. * * @return V */ public Matrix getV() { return new Matrix(n, V); } /** * Two norm. * * @return max(S) */ public double norm2() { return s[0]; } /** * Effective numerical matrix rank. * * @return Number of nonnegligible singular values. */ public int rank() { final double tol = max(m, n) * s[0] * eps; int r = 0; for (final double element : s) { if (element > tol) { r++; } } return r; } }
/* * Copyright (c) 2015-present, Parse, LLC. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.parse; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.junit.After; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import bolts.Task; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class ParseObjectTest { @Rule public ExpectedException thrown = ExpectedException.none(); @After public void tearDown() { ParseCorePlugins.getInstance().reset(); } @Test public void testFromJSONPayload() throws JSONException { JSONObject json = new JSONObject( "{" + "\"className\":\"GameScore\"," + "\"createdAt\":\"2015-06-22T21:23:41.733Z\"," + "\"objectId\":\"TT1ZskATqS\"," + "\"updatedAt\":\"2015-06-22T22:06:18.104Z\"," + "\"score\":{" + "\"__op\":\"Increment\"," + "\"amount\":1" + "}," + "\"age\":33" + "}"); ParseFieldOperations.registerDefaultDecoders(); ParseObject parseObject = ParseObject.fromJSONPayload(json, ParseDecoder.get()); assertEquals("GameScore", parseObject.getClassName()); assertEquals("TT1ZskATqS", parseObject.getObjectId()); ParseDateFormat format = ParseDateFormat.getInstance(); assertTrue(parseObject.getCreatedAt().equals(format.parse("2015-06-22T21:23:41.733Z"))); assertTrue(parseObject.getUpdatedAt().equals(format.parse("2015-06-22T22:06:18.104Z"))); Set<String> keys = parseObject.getState().keySet(); assertEquals(0, keys.size()); ParseOperationSet currentOperations = parseObject.operationSetQueue.getLast(); assertEquals(2, currentOperations.size()); } @Test public void testFromJSONPayloadWithoutClassname() throws JSONException { JSONObject json = new JSONObject("{\"objectId\":\"TT1ZskATqS\"}"); ParseObject parseObject = ParseObject.fromJSONPayload(json, ParseDecoder.get()); assertNull(parseObject); } //region testRevert @Test public void testRevert() throws ParseException { List<Task<Void>> tasks = new ArrayList<>(); // Mocked to let save work ParseCurrentUserController userController = mock(ParseCurrentUserController.class); when(userController.getAsync()).thenReturn(Task.<ParseUser>forResult(null)); ParseCorePlugins.getInstance().registerCurrentUserController(userController); // Mocked to simulate in-flight save Task<ParseObject.State>.TaskCompletionSource tcs = Task.create(); ParseObjectController objectController = mock(ParseObjectController.class); when(objectController.saveAsync( any(ParseObject.State.class), any(ParseOperationSet.class), anyString(), any(ParseDecoder.class))) .thenReturn(tcs.getTask()); ParseCorePlugins.getInstance().registerObjectController(objectController); // New clean object ParseObject object = new ParseObject("TestObject"); object.revert("foo"); // Reverts changes on new object object.put("foo", "bar"); object.put("name", "grantland"); object.revert(); assertNull(object.get("foo")); assertNull(object.get("name")); // Object from server ParseObject.State state = mock(ParseObject.State.class); when(state.className()).thenReturn("TestObject"); when(state.objectId()).thenReturn("test_id"); when(state.keySet()).thenReturn(Collections.singleton("foo")); when(state.get("foo")).thenReturn("bar"); object = ParseObject.from(state); object.revert(); assertFalse(object.isDirty()); assertEquals("bar", object.get("foo")); // Reverts changes on existing object object.put("foo", "baz"); object.put("name", "grantland"); object.revert(); assertFalse(object.isDirty()); assertEquals("bar", object.get("foo")); assertFalse(object.isDataAvailable("name")); // Shouldn't revert changes done before last call to `save` object.put("foo", "baz"); object.put("name", "nlutsenko"); tasks.add(object.saveInBackground()); object.revert(); assertFalse(object.isDirty()); assertEquals("baz", object.get("foo")); assertEquals("nlutsenko", object.get("name")); // Should revert changes done after last call to `save` object.put("foo", "qux"); object.put("name", "grantland"); object.revert(); assertFalse(object.isDirty()); assertEquals("baz", object.get("foo")); assertEquals("nlutsenko", object.get("name")); // Allow save to complete tcs.setResult(state); ParseTaskUtils.wait(Task.whenAll(tasks)); } @Test public void testRevertKey() throws ParseException { List<Task<Void>> tasks = new ArrayList<>(); // Mocked to let save work ParseCurrentUserController userController = mock(ParseCurrentUserController.class); when(userController.getAsync()).thenReturn(Task.<ParseUser>forResult(null)); ParseCorePlugins.getInstance().registerCurrentUserController(userController); // Mocked to simulate in-flight save Task<ParseObject.State>.TaskCompletionSource tcs = Task.create(); ParseObjectController objectController = mock(ParseObjectController.class); when(objectController.saveAsync( any(ParseObject.State.class), any(ParseOperationSet.class), anyString(), any(ParseDecoder.class))) .thenReturn(tcs.getTask()); ParseCorePlugins.getInstance().registerObjectController(objectController); // New clean object ParseObject object = new ParseObject("TestObject"); object.revert("foo"); // Reverts changes on new object object.put("foo", "bar"); object.put("name", "grantland"); object.revert("foo"); assertNull(object.get("foo")); assertEquals("grantland", object.get("name")); // Object from server ParseObject.State state = mock(ParseObject.State.class); when(state.className()).thenReturn("TestObject"); when(state.objectId()).thenReturn("test_id"); when(state.keySet()).thenReturn(Collections.singleton("foo")); when(state.get("foo")).thenReturn("bar"); object = ParseObject.from(state); object.revert("foo"); assertFalse(object.isDirty()); assertEquals("bar", object.get("foo")); // Reverts changes on existing object object.put("foo", "baz"); object.put("name", "grantland"); object.revert("foo"); assertEquals("bar", object.get("foo")); assertEquals("grantland", object.get("name")); // Shouldn't revert changes done before last call to `save` object.put("foo", "baz"); object.put("name", "nlutsenko"); tasks.add(object.saveInBackground()); object.revert("foo"); assertEquals("baz", object.get("foo")); assertEquals("nlutsenko", object.get("name")); // Should revert changes done after last call to `save` object.put("foo", "qux"); object.put("name", "grantland"); object.revert("foo"); assertEquals("baz", object.get("foo")); assertEquals("grantland", object.get("name")); // Allow save to complete tcs.setResult(state); ParseTaskUtils.wait(Task.whenAll(tasks)); } //endregion //region testGetter @Test( expected = IllegalStateException.class ) public void testGetUnavailable() { ParseObject.State state = mock(ParseObject.State.class); when(state.className()).thenReturn("TestObject"); when(state.isComplete()).thenReturn(false); ParseObject object = ParseObject.from(state); object.get("foo"); } @Test public void testGetList() throws Exception { ParseObject object = new ParseObject("Test"); JSONArray array = new JSONArray(); array.put("value"); array.put("valueAgain"); object.put("key", array); List list = object.getList("key"); assertEquals(2, list.size()); assertTrue(list.contains("value")); assertTrue(list.contains("valueAgain")); } @Test public void testGetListWithWrongValue() throws Exception { ParseObject object = new ParseObject("Test"); object.put("key", 1); assertNull(object.getList("key")); } @Test public void testGetJSONArray() throws Exception { ParseObject object = new ParseObject("Test"); object.put("key", Arrays.asList("value", "valueAgain")); JSONArray array = object.getJSONArray("key"); assertEquals(2, array.length()); assertEquals("value", array.getString(0)); assertEquals("valueAgain", array.getString(1)); } @Test public void testGetJsonArrayWithWrongValue() throws Exception { ParseObject object = new ParseObject("Test"); object.put("key", 1); assertNull(object.getJSONArray("key")); } @Test public void testGetJSONObject() throws Exception { ParseObject object = new ParseObject("Test"); Map<String, String> map = new HashMap<>(); map.put("key", "value"); map.put("keyAgain", "valueAgain"); object.put("key", map); JSONObject json = object.getJSONObject("key"); assertEquals(2, json.length()); assertEquals("value", json.getString("key")); assertEquals("valueAgain", json.getString("keyAgain")); } @Test public void testGetJsonObjectWithWrongValue() throws Exception { ParseObject object = new ParseObject("Test"); object.put("key", 1); assertNull(object.getJSONObject("key")); } @Test public void testGetBoolean() throws Exception { ParseObject object = new ParseObject("Test"); object.put("key", true); assertTrue(object.getBoolean("key")); } @Test public void testGetBooleanWithWrongValue() throws Exception { ParseObject object = new ParseObject("Test"); object.put("key", 1); assertFalse(object.getBoolean("key")); } @Test public void testGetDate() throws Exception { ParseObject object = new ParseObject("Test"); Date date = new Date(); object.put("key", date); assertEquals(date, object.getDate("key")); } @Test public void testGetDateWithWrongValue() throws Exception { ParseObject object = new ParseObject("Test"); object.put("key", 1); assertNull(object.getDate("key")); } @Test public void testGetParseGeoPoint() throws Exception { ParseObject object = new ParseObject("Test"); ParseGeoPoint point = new ParseGeoPoint(10, 10); object.put("key", point); assertEquals(point, object.getParseGeoPoint("key")); } @Test public void testGetParseGeoPointWithWrongValue() throws Exception { ParseObject object = new ParseObject("Test"); object.put("key", 1); assertNull(object.getParseGeoPoint("key")); } @Test public void testGetACL() throws Exception { ParseObject object = new ParseObject("Test"); ParseACL acl = new ParseACL(); object.put("ACL", acl); assertEquals(acl, object.getACL()); } @Test public void testGetACLWithSharedACL() throws Exception { ParseObject object = new ParseObject("Test"); ParseACL acl = new ParseACL(); acl.setShared(true); acl.setPublicReadAccess(true); object.put("ACL", acl); ParseACL aclAgain = object.getACL(); assertTrue(aclAgain.getPublicReadAccess()); } @Test public void testGetACLWithNullValue() throws Exception { ParseObject object = new ParseObject("Test"); assertNull(object.getACL()); } @Test public void testGetACLWithWrongValue() throws Exception { ParseObject object = new ParseObject("Test"); object.put("ACL", 1); thrown.expect(RuntimeException.class); thrown.expectMessage("only ACLs can be stored in the ACL key"); object.getACL(); } @Test public void testGetMap() throws Exception { ParseObject object = new ParseObject("Test"); JSONObject json = new JSONObject(); json.put("key", "value"); json.put("keyAgain", "valueAgain"); object.put("key", json); Map map = object.getMap("key"); assertEquals(2, map.size()); assertEquals("value", map.get("key")); assertEquals("valueAgain", map.get("keyAgain")); } @Test public void testGetMapWithWrongValue() throws Exception { ParseObject object = new ParseObject("Test"); object.put("key", 1); assertNull(object.getMap("key")); } @Test public void testGetParseUser() throws Exception { ParseObject object = new ParseObject("Test"); ParseUser user = mock(ParseUser.class); object.put("key", user); assertEquals(user, object.getParseUser("key")); } @Test public void testGetParseUserWithWrongValue() throws Exception { ParseObject object = new ParseObject("Test"); object.put("key", 1); assertNull(object.getParseUser("key")); } @Test public void testGetParseFile() throws Exception { ParseObject object = new ParseObject("Test"); ParseFile file = mock(ParseFile.class); object.put("key", file); assertEquals(file, object.getParseFile("key")); } @Test public void testGetParseFileWithWrongValue() throws Exception { ParseObject object = new ParseObject("Test"); object.put("key", 1); assertNull(object.getParseFile("key")); } @Test public void testGetDouble() throws Exception { ParseObject object = new ParseObject("Test"); object.put("key", 1.1); assertEquals(1.1, object.getDouble("key"), 0.00001); } @Test public void testGetDoubleWithWrongValue() throws Exception { ParseObject object = new ParseObject("Test"); object.put("key", "str"); assertEquals(0.0, object.getDouble("key"), 0.00001); } @Test public void testGetLong() throws Exception { ParseObject object = new ParseObject("Test"); object.put("key", 10L); assertEquals(10L, object.getLong("key")); } @Test public void testGetLongWithWrongValue() throws Exception { ParseObject object = new ParseObject("Test"); object.put("key", "str"); assertEquals(0, object.getLong("key")); } //endregion }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.glue.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * The data structure used by the Data Catalog to encrypt the password as part of <code>CreateConnection</code> or * <code>UpdateConnection</code> and store it in the <code>ENCRYPTED_PASSWORD</code> field in the connection properties. * You can enable catalog encryption or only password encryption. * </p> * <p> * When a <code>CreationConnection</code> request arrives containing a password, the Data Catalog first encrypts the * password using your KMS key. It then encrypts the whole connection object again if catalog encryption is also * enabled. * </p> * <p> * This encryption requires that you set KMS key permissions to enable or restrict access on the password key according * to your security requirements. For example, you might want only administrators to have decrypt permission on the * password key. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/ConnectionPasswordEncryption" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ConnectionPasswordEncryption implements Serializable, Cloneable, StructuredPojo { /** * <p> * When the <code>ReturnConnectionPasswordEncrypted</code> flag is set to "true", passwords remain encrypted in the * responses of <code>GetConnection</code> and <code>GetConnections</code>. This encryption takes effect * independently from catalog encryption. * </p> */ private Boolean returnConnectionPasswordEncrypted; /** * <p> * An KMS key that is used to encrypt the connection password. * </p> * <p> * If connection password protection is enabled, the caller of <code>CreateConnection</code> and * <code>UpdateConnection</code> needs at least <code>kms:Encrypt</code> permission on the specified KMS key, to * encrypt passwords before storing them in the Data Catalog. * </p> * <p> * You can set the decrypt permission to enable or restrict access on the password key according to your security * requirements. * </p> */ private String awsKmsKeyId; /** * <p> * When the <code>ReturnConnectionPasswordEncrypted</code> flag is set to "true", passwords remain encrypted in the * responses of <code>GetConnection</code> and <code>GetConnections</code>. This encryption takes effect * independently from catalog encryption. * </p> * * @param returnConnectionPasswordEncrypted * When the <code>ReturnConnectionPasswordEncrypted</code> flag is set to "true", passwords remain encrypted * in the responses of <code>GetConnection</code> and <code>GetConnections</code>. This encryption takes * effect independently from catalog encryption. */ public void setReturnConnectionPasswordEncrypted(Boolean returnConnectionPasswordEncrypted) { this.returnConnectionPasswordEncrypted = returnConnectionPasswordEncrypted; } /** * <p> * When the <code>ReturnConnectionPasswordEncrypted</code> flag is set to "true", passwords remain encrypted in the * responses of <code>GetConnection</code> and <code>GetConnections</code>. This encryption takes effect * independently from catalog encryption. * </p> * * @return When the <code>ReturnConnectionPasswordEncrypted</code> flag is set to "true", passwords remain encrypted * in the responses of <code>GetConnection</code> and <code>GetConnections</code>. This encryption takes * effect independently from catalog encryption. */ public Boolean getReturnConnectionPasswordEncrypted() { return this.returnConnectionPasswordEncrypted; } /** * <p> * When the <code>ReturnConnectionPasswordEncrypted</code> flag is set to "true", passwords remain encrypted in the * responses of <code>GetConnection</code> and <code>GetConnections</code>. This encryption takes effect * independently from catalog encryption. * </p> * * @param returnConnectionPasswordEncrypted * When the <code>ReturnConnectionPasswordEncrypted</code> flag is set to "true", passwords remain encrypted * in the responses of <code>GetConnection</code> and <code>GetConnections</code>. This encryption takes * effect independently from catalog encryption. * @return Returns a reference to this object so that method calls can be chained together. */ public ConnectionPasswordEncryption withReturnConnectionPasswordEncrypted(Boolean returnConnectionPasswordEncrypted) { setReturnConnectionPasswordEncrypted(returnConnectionPasswordEncrypted); return this; } /** * <p> * When the <code>ReturnConnectionPasswordEncrypted</code> flag is set to "true", passwords remain encrypted in the * responses of <code>GetConnection</code> and <code>GetConnections</code>. This encryption takes effect * independently from catalog encryption. * </p> * * @return When the <code>ReturnConnectionPasswordEncrypted</code> flag is set to "true", passwords remain encrypted * in the responses of <code>GetConnection</code> and <code>GetConnections</code>. This encryption takes * effect independently from catalog encryption. */ public Boolean isReturnConnectionPasswordEncrypted() { return this.returnConnectionPasswordEncrypted; } /** * <p> * An KMS key that is used to encrypt the connection password. * </p> * <p> * If connection password protection is enabled, the caller of <code>CreateConnection</code> and * <code>UpdateConnection</code> needs at least <code>kms:Encrypt</code> permission on the specified KMS key, to * encrypt passwords before storing them in the Data Catalog. * </p> * <p> * You can set the decrypt permission to enable or restrict access on the password key according to your security * requirements. * </p> * * @param awsKmsKeyId * An KMS key that is used to encrypt the connection password. </p> * <p> * If connection password protection is enabled, the caller of <code>CreateConnection</code> and * <code>UpdateConnection</code> needs at least <code>kms:Encrypt</code> permission on the specified KMS key, * to encrypt passwords before storing them in the Data Catalog. * </p> * <p> * You can set the decrypt permission to enable or restrict access on the password key according to your * security requirements. */ public void setAwsKmsKeyId(String awsKmsKeyId) { this.awsKmsKeyId = awsKmsKeyId; } /** * <p> * An KMS key that is used to encrypt the connection password. * </p> * <p> * If connection password protection is enabled, the caller of <code>CreateConnection</code> and * <code>UpdateConnection</code> needs at least <code>kms:Encrypt</code> permission on the specified KMS key, to * encrypt passwords before storing them in the Data Catalog. * </p> * <p> * You can set the decrypt permission to enable or restrict access on the password key according to your security * requirements. * </p> * * @return An KMS key that is used to encrypt the connection password. </p> * <p> * If connection password protection is enabled, the caller of <code>CreateConnection</code> and * <code>UpdateConnection</code> needs at least <code>kms:Encrypt</code> permission on the specified KMS * key, to encrypt passwords before storing them in the Data Catalog. * </p> * <p> * You can set the decrypt permission to enable or restrict access on the password key according to your * security requirements. */ public String getAwsKmsKeyId() { return this.awsKmsKeyId; } /** * <p> * An KMS key that is used to encrypt the connection password. * </p> * <p> * If connection password protection is enabled, the caller of <code>CreateConnection</code> and * <code>UpdateConnection</code> needs at least <code>kms:Encrypt</code> permission on the specified KMS key, to * encrypt passwords before storing them in the Data Catalog. * </p> * <p> * You can set the decrypt permission to enable or restrict access on the password key according to your security * requirements. * </p> * * @param awsKmsKeyId * An KMS key that is used to encrypt the connection password. </p> * <p> * If connection password protection is enabled, the caller of <code>CreateConnection</code> and * <code>UpdateConnection</code> needs at least <code>kms:Encrypt</code> permission on the specified KMS key, * to encrypt passwords before storing them in the Data Catalog. * </p> * <p> * You can set the decrypt permission to enable or restrict access on the password key according to your * security requirements. * @return Returns a reference to this object so that method calls can be chained together. */ public ConnectionPasswordEncryption withAwsKmsKeyId(String awsKmsKeyId) { setAwsKmsKeyId(awsKmsKeyId); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getReturnConnectionPasswordEncrypted() != null) sb.append("ReturnConnectionPasswordEncrypted: ").append(getReturnConnectionPasswordEncrypted()).append(","); if (getAwsKmsKeyId() != null) sb.append("AwsKmsKeyId: ").append(getAwsKmsKeyId()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ConnectionPasswordEncryption == false) return false; ConnectionPasswordEncryption other = (ConnectionPasswordEncryption) obj; if (other.getReturnConnectionPasswordEncrypted() == null ^ this.getReturnConnectionPasswordEncrypted() == null) return false; if (other.getReturnConnectionPasswordEncrypted() != null && other.getReturnConnectionPasswordEncrypted().equals(this.getReturnConnectionPasswordEncrypted()) == false) return false; if (other.getAwsKmsKeyId() == null ^ this.getAwsKmsKeyId() == null) return false; if (other.getAwsKmsKeyId() != null && other.getAwsKmsKeyId().equals(this.getAwsKmsKeyId()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getReturnConnectionPasswordEncrypted() == null) ? 0 : getReturnConnectionPasswordEncrypted().hashCode()); hashCode = prime * hashCode + ((getAwsKmsKeyId() == null) ? 0 : getAwsKmsKeyId().hashCode()); return hashCode; } @Override public ConnectionPasswordEncryption clone() { try { return (ConnectionPasswordEncryption) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.glue.model.transform.ConnectionPasswordEncryptionMarshaller.getInstance().marshall(this, protocolMarshaller); } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.cloud.network; import static org.mockito.Mockito.mock; import com.cloud.dc.DataCenter; import com.cloud.vm.NicProfile; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.InjectMocks; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; import com.cloud.exception.InsufficientAddressCapacityException; import com.cloud.exception.InvalidParameterValueException; import com.cloud.network.IpAddress.State; import com.cloud.network.Network.IpAddresses; import com.cloud.network.dao.IPAddressDaoImpl; import com.cloud.network.dao.IPAddressVO; import com.cloud.network.dao.UserIpv6AddressDaoImpl; import com.cloud.user.Account; import com.cloud.utils.net.NetUtils; import com.cloud.vm.dao.NicSecondaryIpDaoImpl; import com.cloud.vm.dao.NicSecondaryIpVO; public class Ipv6AddressManagerTest { @InjectMocks Ipv6AddressManagerImpl ip6Manager = Mockito.spy(new Ipv6AddressManagerImpl()); @InjectMocks NicSecondaryIpDaoImpl nicSecondaryIpDao = Mockito.spy(new NicSecondaryIpDaoImpl()); @InjectMocks UserIpv6AddressDaoImpl ipv6Dao = Mockito.spy(new UserIpv6AddressDaoImpl()); @InjectMocks IpAddressManagerImpl ipAddressManager = Mockito.spy(new IpAddressManagerImpl()); @InjectMocks NetworkModelImpl networkModel = Mockito.mock(NetworkModelImpl.class);// = Mockito.spy(new NetworkModelImpl()); @InjectMocks IPAddressDaoImpl ipAddressDao = Mockito.spy(new IPAddressDaoImpl()); private Network network = mockNetwork(); @Before public void setup() { MockitoAnnotations.initMocks(this); } @Test public void isIp6TakenTestNoNull() { setIsIp6TakenTest(new UserIpv6AddressVO(), new NicSecondaryIpVO(0l, "ipaddr", 0l, 0l, 0l, 0l)); boolean result = ip6Manager.isIp6Taken(network, "requestedIpv6"); assertAndVerifyIsIp6Taken(true, result); } @Test public void isIp6TakenTestSecIpNull() { setIsIp6TakenTest(new UserIpv6AddressVO(), null); boolean result = ip6Manager.isIp6Taken(network, "requestedIpv6"); assertAndVerifyIsIp6Taken(true, result); } @Test public void isIp6TakenTestUserIpv6AddressNull() { setIsIp6TakenTest(null, new NicSecondaryIpVO(0l, "ipaddr", 0l, 0l, 0l, 0l)); boolean result = ip6Manager.isIp6Taken(network, "requestedIpv6"); assertAndVerifyIsIp6Taken(true, result); } @Test public void isIp6TakenTestAllNull() { setIsIp6TakenTest(null, null); boolean result = ip6Manager.isIp6Taken(network, "requestedIpv6"); assertAndVerifyIsIp6Taken(false, result); } private void assertAndVerifyIsIp6Taken(boolean expected, boolean result) { Assert.assertEquals(expected, result); Mockito.verify(ipv6Dao).findByNetworkIdAndIp(Mockito.anyLong(), Mockito.anyString()); Mockito.verify(nicSecondaryIpDao).findByIp6AddressAndNetworkId(Mockito.anyString(), Mockito.anyLong()); } private void setIsIp6TakenTest(UserIpv6AddressVO userIpv6, NicSecondaryIpVO nicSecondaryIp) { Mockito.doReturn(userIpv6).when(ipv6Dao).findByNetworkIdAndIp(Mockito.anyLong(), Mockito.anyString()); Mockito.doReturn(nicSecondaryIp).when(nicSecondaryIpDao).findByIp6AddressAndNetworkId(Mockito.anyString(), Mockito.anyLong()); } private Network mockNetwork() { Network network = mock(Network.class); Mockito.when(network.getId()).thenReturn(0l); Mockito.when(network.getIp6Cidr()).thenReturn("2001:db8::/32"); return network; } @Test public void allocatePublicIp6ForGuestNicTestNoException() throws InsufficientAddressCapacityException { Account owner = Mockito.mock(Account.class); String requestedIpv6 = setCheckIfCanAllocateIpv6AddresscTest("2001:db8::10", false, false); String returnedIp = ip6Manager.allocatePublicIp6ForGuestNic(network, 0l, owner, requestedIpv6); Mockito.verify(ip6Manager).checkIfCanAllocateIpv6Address(network, requestedIpv6); Assert.assertEquals(requestedIpv6, returnedIp); } @Test(expected = InsufficientAddressCapacityException.class) public void checkIfCanAllocateIpv6AddressTestIp6IsTaken() throws InsufficientAddressCapacityException { String requestedIpv6 = setCheckIfCanAllocateIpv6AddresscTest("2001:db8::10", true, false); ip6Manager.checkIfCanAllocateIpv6Address(network, requestedIpv6); verifyCheckIfCanAllocateIpv6AddressTest(network, requestedIpv6, 1, 0); } @Test(expected = InvalidParameterValueException.class) public void checkIfCanAllocateIpv6AddressTestIpIsIpEqualsGatewayOrNetworkOfferingsEmpty() throws InsufficientAddressCapacityException { String requestedIpv6 = setCheckIfCanAllocateIpv6AddresscTest("2001:db8::10", false, true); ip6Manager.checkIfCanAllocateIpv6Address(network, requestedIpv6); verifyCheckIfCanAllocateIpv6AddressTest(network, requestedIpv6, 1, 1); } @Test(expected = InvalidParameterValueException.class) public void checkIfCanAllocateIpv6AddressTestIpINotInTheNetwork() throws InsufficientAddressCapacityException { String requestedIpv6 = "2002:db8::10"; setCheckIfCanAllocateIpv6AddresscTest(requestedIpv6, false, false); ip6Manager.checkIfCanAllocateIpv6Address(network, requestedIpv6); verifyCheckIfCanAllocateIpv6AddressTest(network, requestedIpv6, 1, 1); } private void verifyCheckIfCanAllocateIpv6AddressTest(Network network, String requestedIpv6, int isIp6TakenTimes, int isIpEqualsGatewayTimes) { Mockito.verify(ip6Manager, Mockito.times(isIp6TakenTimes)).isIp6Taken(network, requestedIpv6); Mockito.verify(ipAddressManager, Mockito.times(isIpEqualsGatewayTimes)).isIpEqualsGatewayOrNetworkOfferingsEmpty(network, requestedIpv6); } private String setCheckIfCanAllocateIpv6AddresscTest(String requestedIpv6, boolean isIp6Taken, boolean isIpEqualsGatewayOrNetworkOfferingsEmpty) { Mockito.doReturn(isIp6Taken).when(ip6Manager).isIp6Taken(Mockito.eq(network), Mockito.anyString()); Mockito.doReturn(isIpEqualsGatewayOrNetworkOfferingsEmpty).when(ipAddressManager).isIpEqualsGatewayOrNetworkOfferingsEmpty(network, requestedIpv6); NetUtils.isIp6InNetwork(requestedIpv6, network.getIp6Cidr()); return requestedIpv6; } @Test public void acquireGuestIpv6AddressTest() throws InsufficientAddressCapacityException { setAcquireGuestIpv6AddressTest(true, State.Free); String requestedIpv6 = setCheckIfCanAllocateIpv6AddresscTest("2001:db8::10", false, false); ip6Manager.acquireGuestIpv6Address(network, requestedIpv6); verifyAcquireGuestIpv6AddressTest(); } private void verifyAcquireGuestIpv6AddressTest() { Mockito.verify(networkModel).areThereIPv6AddressAvailableInNetwork(Mockito.anyLong()); Mockito.verify(networkModel).checkRequestedIpAddresses(Mockito.anyLong(), Mockito.any(IpAddresses.class)); Mockito.verify(ipAddressDao).findByIpAndSourceNetworkId(Mockito.anyLong(), Mockito.anyString()); } @Test(expected = InsufficientAddressCapacityException.class) public void acquireGuestIpv6AddressTestUnavailableIp() throws InsufficientAddressCapacityException { setAcquireGuestIpv6AddressTest(false, State.Free); String requestedIpv6 = setCheckIfCanAllocateIpv6AddresscTest("2001:db8::10", false, false); ip6Manager.acquireGuestIpv6Address(network, requestedIpv6); verifyAcquireGuestIpv6AddressTest(); } @Test(expected = InsufficientAddressCapacityException.class) public void acquireGuestIpv6AddressTestStateAllocating() throws InsufficientAddressCapacityException { setAcquireGuestIpv6AddressTest(false, State.Allocating); String requestedIpv6 = setCheckIfCanAllocateIpv6AddresscTest("2001:db8::10", false, false); ip6Manager.acquireGuestIpv6Address(network, requestedIpv6); verifyAcquireGuestIpv6AddressTest(); } @Test(expected = InsufficientAddressCapacityException.class) public void acquireGuestIpv6AddressTestStateAllocated() throws InsufficientAddressCapacityException { setAcquireGuestIpv6AddressTest(false, State.Allocated); String requestedIpv6 = setCheckIfCanAllocateIpv6AddresscTest("2001:db8::10", false, false); ip6Manager.acquireGuestIpv6Address(network, requestedIpv6); verifyAcquireGuestIpv6AddressTest(); } @Test(expected = InsufficientAddressCapacityException.class) public void acquireGuestIpv6AddressTestStateReleasing() throws InsufficientAddressCapacityException { setAcquireGuestIpv6AddressTest(false, State.Releasing); String requestedIpv6 = setCheckIfCanAllocateIpv6AddresscTest("2001:db8::10", false, false); ip6Manager.acquireGuestIpv6Address(network, requestedIpv6); verifyAcquireGuestIpv6AddressTest(); } private void setAcquireGuestIpv6AddressTest(boolean isIPAvailable, State state) { mockNetwork(); IPAddressVO ipVo = Mockito.mock(IPAddressVO.class); Mockito.doReturn(isIPAvailable).when(networkModel).areThereIPv6AddressAvailableInNetwork(Mockito.anyLong()); Mockito.doReturn(ipVo).when(ipAddressDao).findByIpAndSourceNetworkId(Mockito.anyLong(), Mockito.anyString()); Mockito.when(ipVo.getState()).thenReturn(state); } @Test public void setNICIPv6AddressTest() { NicProfile nic = new NicProfile(); Network network = mock(Network.class); DataCenter dc = mock(DataCenter.class); nic.setMacAddress("1e:00:b1:00:0a:f6"); Mockito.when(network.getIp6Cidr()).thenReturn("2001:db8:100::/64"); Mockito.when(network.getIp6Gateway()).thenReturn("2001:db8:100::1"); Mockito.when(dc.getIp6Dns1()).thenReturn("2001:db8::53:1"); Mockito.when(dc.getIp6Dns1()).thenReturn("2001:db8::53:2"); String expected = "2001:db8:100:0:1c00:b1ff:fe00:af6"; ip6Manager.setNicIp6Address(nic, dc, network); Assert.assertEquals(expected, nic.getIPv6Address()); } @Test(expected = InsufficientAddressCapacityException.class) public void acquireGuestIpv6AddressEUI64Test() throws InsufficientAddressCapacityException { setAcquireGuestIpv6AddressTest(true, State.Free); String requestedIpv6 = setCheckIfCanAllocateIpv6AddresscTest("2001:db8:13f::1c00:4aff:fe00:fe", false, false); ip6Manager.acquireGuestIpv6Address(network, requestedIpv6); } }
/* * Written by Josh Bloch of Google Inc. and released to the public domain, * as explained at http://creativecommons.org/publicdomain/zero/1.0/. * * Adapted from https://android.googlesource.com/platform/libcore/+ * android-4.2.2_r1/luni/src/main/java/java/util/ArrayDeque.java */ package dagger.internal; import java.lang.reflect.Array; import java.util.AbstractCollection; import java.util.Collection; import java.util.ConcurrentModificationException; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.NoSuchElementException; import java.util.Queue; /** * Resizable-array implementation of the {@link Queue} interface. Array * queues have no capacity restrictions; they grow as necessary to support * usage. They are not thread-safe; in the absence of external * synchronization, they do not support concurrent access by multiple threads. * Null elements are prohibited. This class is likely to be faster than * {@link LinkedList} when used as a queue. * * <p>Most <tt>ArrayBackedQueue</tt> operations run in amortized constant time. * Exceptions include {@link #remove(Object) remove}, {@link * #removeFirstOccurrence removeFirstOccurrence}, {@link #contains contains}, * {@link #iterator iterator.remove()}, and the bulk operations, all of which * run in linear time. * * <p>The iterators returned by this class's <tt>iterator</tt> method are * <i>fail-fast</i>: If the queue is modified at any time after the iterator * is created, in any way except through the iterator's own <tt>remove</tt> * method, the iterator will generally throw a {@link * ConcurrentModificationException}. Thus, in the face of concurrent * modification, the iterator fails quickly and cleanly, rather than risking * arbitrary, non-deterministic behavior at an undetermined time in the * future. * * <p>Note that the fail-fast behavior of an iterator cannot be guaranteed * as it is, generally speaking, impossible to make any hard guarantees in the * presence of unsynchronized concurrent modification. Fail-fast iterators * throw <tt>ConcurrentModificationException</tt> on a best-effort basis. * Therefore, it would be wrong to write a program that depended on this * exception for its correctness: <i>the fail-fast behavior of iterators * should be used only to detect bugs.</i> * * <p>This class and its iterator implement all of the * <em>optional</em> methods of the {@link Collection} and {@link * Iterator} interfaces. * * @author Josh Bloch and Doug Lea * @param <E> the type of elements held in this collection */ public class ArrayQueue<E> extends AbstractCollection<E> implements Queue<E>, Cloneable, java.io.Serializable { /** * The array in which the elements of the queue are stored. * The capacity of the queue is the length of this array, which is * always a power of two. The array is never allowed to become * full, except transiently within an addX method where it is * resized (see doubleCapacity) immediately upon becoming full, * thus avoiding head and tail wrapping around to equal each * other. We also guarantee that all array cells not holding * queue elements are always null. */ private transient Object[] elements; /** * The index of the element at the head of the queue (which is the * element that would be removed by remove() or pop()); or an * arbitrary number equal to tail if the queue is empty. */ private transient int head; /** * The index at which the next element would be added to the tail * of the queue (via addLast(E), add(E), or push(E)). */ private transient int tail; /** * The minimum capacity that we'll use for a newly created queue. * Must be a power of 2. */ private static final int MIN_INITIAL_CAPACITY = 8; // ****** Array allocation and resizing utilities ****** /** * Allocate empty array to hold the given number of elements. * * @param numElements the number of elements to hold */ private void allocateElements(int numElements) { int initialCapacity = MIN_INITIAL_CAPACITY; // Find the best power of two to hold elements. // Tests "<=" because arrays aren't kept full. if (numElements >= initialCapacity) { initialCapacity = numElements; initialCapacity |= (initialCapacity >>> 1); initialCapacity |= (initialCapacity >>> 2); initialCapacity |= (initialCapacity >>> 4); initialCapacity |= (initialCapacity >>> 8); initialCapacity |= (initialCapacity >>> 16); initialCapacity++; if (initialCapacity < 0) // Too many elements, must back off initialCapacity >>>= 1; // Good luck allocating 2 ^ 30 elements } elements = new Object[initialCapacity]; } /** * Double the capacity of this queue. Call only when full, i.e., * when head and tail have wrapped around to become equal. */ private void doubleCapacity() { // assert head == tail; int p = head; int n = elements.length; int r = n - p; // number of elements to the right of p int newCapacity = n << 1; if (newCapacity < 0) throw new IllegalStateException("Sorry, queue too big"); Object[] a = new Object[newCapacity]; System.arraycopy(elements, p, a, 0, r); System.arraycopy(elements, 0, a, r, p); elements = a; head = 0; tail = n; } /** * Constructs an empty array queue with an initial capacity * sufficient to hold 16 elements. */ public ArrayQueue() { elements = new Object[16]; } /** * Constructs an empty array queue with an initial capacity * sufficient to hold the specified number of elements. * * @param numElements lower bound on initial capacity of the queue */ public ArrayQueue(int numElements) { allocateElements(numElements); } /** * Constructs a queue containing the elements of the specified * collection, in the order they are returned by the collection's * iterator. (The first element returned by the collection's * iterator becomes the first element, or <i>front</i> of the * queue.) * * @param c the collection whose elements are to be placed into the queue * @throws NullPointerException if the specified collection is null */ public ArrayQueue(Collection<? extends E> c) { allocateElements(c.size()); addAll(c); } /** * Inserts the specified element at the end of this queue. * * <p>This method is equivalent to {@link #offer}. * * @param e the element to add * @return <tt>true</tt> (as specified by {@link Collection#add}) * @throws NullPointerException if the specified element is null */ @Override public boolean add(E e) { if (e == null) throw new NullPointerException("e == null"); elements[tail] = e; if ((tail = (tail + 1) & (elements.length - 1)) == head) doubleCapacity(); return true; } /** * Inserts the specified element at the end of this queue. * * @param e the element to add * @return <tt>true</tt> (as specified by {@link Queue#offer}) * @throws NullPointerException if the specified element is null */ @Override public boolean offer(E e) { return add(e); } /** * Retrieves and removes the head of the queue represented by this queue. * * This method differs from {@link #poll poll} only in that it throws an * exception if this queue is empty. * * @return the head of the queue represented by this queue * @throws NoSuchElementException {@inheritDoc} */ @Override public E remove() { E x = poll(); if (x == null) throw new NoSuchElementException(); return x; } /** * Retrieves and removes the head of the queue represented by this queue * (in other words, the first element of this queue), or returns * <tt>null</tt> if this queue is empty. * * @return the head of the queue represented by this queue, or * <tt>null</tt> if this queue is empty */ @Override public E poll() { int h = head; @SuppressWarnings("unchecked") E result = (E) elements[h]; // Element is null if queue empty if (result == null) return null; elements[h] = null; // Must null out slot head = (h + 1) & (elements.length - 1); return result; } /** * Retrieves, but does not remove, the head of the queue represented by * this queue. This method differs from {@link #peek peek} only in * that it throws an exception if this queue is empty. * * @return the head of the queue represented by this queue * @throws NoSuchElementException {@inheritDoc} */ @Override public E element() { @SuppressWarnings("unchecked") E result = (E) elements[head]; if (result == null) throw new NoSuchElementException(); return result; } /** * Retrieves, but does not remove, the head of the queue represented by * this queue, or returns <tt>null</tt> if this queue is empty. * * @return the head of the queue represented by this queue, or * <tt>null</tt> if this queue is empty */ @Override public E peek() { @SuppressWarnings("unchecked") E result = (E) elements[head]; // elements[head] is null if queue empty return result; } /** * Removes the element at the specified position in the elements array, * adjusting head and tail as necessary. This can result in motion of * elements backwards or forwards in the array. * * <p>This method is called delete rather than remove to emphasize * that its semantics differ from those of {@link List#remove(int)}. * * @return true if elements moved backwards */ private boolean delete(int i) { //checkInvariants(); final Object[] elements = this.elements; final int mask = elements.length - 1; final int h = head; final int t = tail; final int front = (i - h) & mask; final int back = (t - i) & mask; // Invariant: head <= i < tail mod circularity if (front >= ((t - h) & mask)) throw new ConcurrentModificationException(); // Optimize for least element motion if (front < back) { if (h <= i) { System.arraycopy(elements, h, elements, h + 1, front); } else { // Wrap around System.arraycopy(elements, 0, elements, 1, i); elements[0] = elements[mask]; System.arraycopy(elements, h, elements, h + 1, mask - h); } elements[h] = null; head = (h + 1) & mask; return false; } else { if (i < t) { // Copy the null tail as well System.arraycopy(elements, i + 1, elements, i, back); tail = t - 1; } else { // Wrap around System.arraycopy(elements, i + 1, elements, i, mask - i); elements[mask] = elements[0]; System.arraycopy(elements, 1, elements, 0, t); tail = (t - 1) & mask; } return true; } } // *** Collection Methods *** /** * Returns the number of elements in this queue. * * @return the number of elements in this queue */ @Override public int size() { return (tail - head) & (elements.length - 1); } /** * Returns <tt>true</tt> if this queue contains no elements. * * @return <tt>true</tt> if this queue contains no elements */ @Override public boolean isEmpty() { return head == tail; } /** * Returns an iterator over the elements in this queue. The elements * will be ordered from first (head) to last (tail). This is the same * order that elements would be queueued (via successive calls to * {@link #remove} or popped (via successive calls to {@link #pop}). * * @return an iterator over the elements in this queue */ @Override public Iterator<E> iterator() { return new QueueIterator(); } private class QueueIterator implements Iterator<E> { /** * Index of element to be returned by subsequent call to next. */ private int cursor = head; /** * Tail recorded at construction (also in remove), to stop * iterator and also to check for comodification. */ private int fence = tail; /** * Index of element returned by most recent call to next. * Reset to -1 if element is deleted by a call to remove. */ private int lastRet = -1; @Override public boolean hasNext() { return cursor != fence; } @Override public E next() { if (cursor == fence) throw new NoSuchElementException(); @SuppressWarnings("unchecked") E result = (E) elements[cursor]; // This check doesn't catch all possible comodifications, // but does catch the ones that corrupt traversal if (tail != fence || result == null) throw new ConcurrentModificationException(); lastRet = cursor; cursor = (cursor + 1) & (elements.length - 1); return result; } @Override public void remove() { if (lastRet < 0) throw new IllegalStateException(); if (delete(lastRet)) { // if left-shifted, undo increment in next() cursor = (cursor - 1) & (elements.length - 1); fence = tail; } lastRet = -1; } } /** * Returns <tt>true</tt> if this queue contains the specified element. * More formally, returns <tt>true</tt> if and only if this queue contains * at least one element <tt>e</tt> such that <tt>o.equals(e)</tt>. * * @param o object to be checked for containment in this queue * @return <tt>true</tt> if this queue contains the specified element */ @Override public boolean contains(Object o) { if (o == null) return false; int mask = elements.length - 1; int i = head; Object x; while ((x = elements[i]) != null) { if (o.equals(x)) return true; i = (i + 1) & mask; } return false; } /** * Removes a single instance of the specified element from this queue. * If the queue does not contain the element, it is unchanged. * More formally, removes the first element <tt>e</tt> such that * <tt>o.equals(e)</tt> (if such an element exists). * Returns <tt>true</tt> if this queue contained the specified element * (or equivalently, if this queue changed as a result of the call). * * @param o element to be removed from this queue, if present * @return <tt>true</tt> if this queue contained the specified element */ @Override public boolean remove(Object o) { if (o == null) return false; int mask = elements.length - 1; int i = head; Object x; while ((x = elements[i]) != null) { if (o.equals(x)) { delete(i); return true; } i = (i + 1) & mask; } return false; } /** * Removes all of the elements from this queue. * The queue will be empty after this call returns. */ @Override public void clear() { int h = head; int t = tail; if (h != t) { // clear all cells head = tail = 0; int i = h; int mask = elements.length - 1; do { elements[i] = null; i = (i + 1) & mask; } while (i != t); } } /** * Returns an array containing all of the elements in this queue * in proper sequence (from first to last element). * * <p>The returned array will be "safe" in that no references to it are * maintained by this queue. (In other words, this method must allocate * a new array). The caller is thus free to modify the returned array. * * <p>This method acts as bridge between array-based and collection-based * APIs. * * @return an array containing all of the elements in this queue */ @Override public Object[] toArray() { return toArray(new Object[size()]); } /** * Returns an array containing all of the elements in this queue in * proper sequence (from first to last element); the runtime type of the * returned array is that of the specified array. If the queue fits in * the specified array, it is returned therein. Otherwise, a new array * is allocated with the runtime type of the specified array and the * size of this queue. * * <p>If this queue fits in the specified array with room to spare * (i.e., the array has more elements than this queue), the element in * the array immediately following the end of the queue is set to * <tt>null</tt>. * * <p>Like the {@link #toArray()} method, this method acts as bridge between * array-based and collection-based APIs. Further, this method allows * precise control over the runtime type of the output array, and may, * under certain circumstances, be used to save allocation costs. * * <p>Suppose <tt>x</tt> is a queue known to contain only strings. * The following code can be used to dump the queue into a newly * allocated array of <tt>String</tt>: * * <pre> {@code String[] y = x.toArray(new String[0]);}</pre> * * Note that <tt>toArray(new Object[0])</tt> is identical in function to * <tt>toArray()</tt>. * * @param a the array into which the elements of the queue are to * be stored, if it is big enough; otherwise, a new array of the * same runtime type is allocated for this purpose * @return an array containing all of the elements in this queue * @throws ArrayStoreException if the runtime type of the specified array * is not a supertype of the runtime type of every element in * this queue * @throws NullPointerException if the specified array is null */ @Override public <T> T[] toArray(T[] a) { int size = size(); if (a.length < size) a = (T[]) java.lang.reflect.Array.newInstance( a.getClass().getComponentType(), size); if (head < tail) { System.arraycopy(elements, head, a, 0, size()); } else if (head > tail) { int headPortionLen = elements.length - head; System.arraycopy(elements, head, a, 0, headPortionLen); System.arraycopy(elements, 0, a, headPortionLen, tail); } if (a.length > size) a[size] = null; return a; } // *** Object methods *** /** * Returns a copy of this queue. * * @return a copy of this queue */ @Override public ArrayQueue<E> clone() { try { ArrayQueue<E> result = (ArrayQueue<E>) super.clone(); E[] newElements = (E[]) Array.newInstance(elements.getClass().getComponentType(), elements.length); System.arraycopy(elements, 0, newElements, 0, elements.length); result.elements = newElements; return result; } catch (CloneNotSupportedException e) { throw new AssertionError(); } } /** * Appease the serialization gods. */ private static final long serialVersionUID = 2340985798034038923L; /** * Serialize this queue. * * @serialData The current size (<tt>int</tt>) of the queue, * followed by all of its elements (each an object reference) in * first-to-last order. */ private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException { s.defaultWriteObject(); // Write out size s.writeInt(size()); // Write out elements in order. int mask = elements.length - 1; for (int i = head; i != tail; i = (i + 1) & mask) s.writeObject(elements[i]); } /** * Deserialize this queue. */ private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); // Read in size and allocate array int size = s.readInt(); allocateElements(size); head = 0; tail = size; // Read in all elements in the proper order. for (int i = 0; i < size; i++) elements[i] = s.readObject(); } }
/* * Copyright 2011-2013 Tyler Blair. All rights reserved. * Ported to Minecraft Forge by Mike Primm * 1.7.x update by Dries007 * * Redistribution and use in source and binary forms, with or without modification, are * permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list of * conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, this list * of conditions and the following disclaimer in the documentation and/or other materials * provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ''AS IS'' AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * The views and conclusions contained in the software and documentation are those of the * authors and contributors and should not be interpreted as representing official policies, * either expressed or implied, of anybody else. */ package io.github.dawncraft.util; import java.io.BufferedReader; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.net.Proxy; import java.net.URL; import java.net.URLConnection; import java.net.URLEncoder; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.Set; import java.util.UUID; import java.util.zip.GZIPOutputStream; import net.minecraft.server.MinecraftServer; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.common.config.Configuration; import net.minecraftforge.fml.common.FMLCommonHandler; import net.minecraftforge.fml.common.FMLLog; import net.minecraftforge.fml.common.Loader; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; import net.minecraftforge.fml.common.gameevent.TickEvent; public class Metrics { /** * The current revision number */ private final static int REVISION = 7; /** * The base url of the metrics domain */ private static final String BASE_URL = "http://report.mcstats.org"; /** * The url used to report a server's status */ private static final String REPORT_URL = "/plugin/%s"; /** * Interval of time to ping (in minutes) */ private static final int PING_INTERVAL = 15; /** * The server which the mod is running on */ private static MinecraftServer server; /** * The mod this metrics submits for */ private final String modName; private final String modVersion; /** * All of the custom graphs to submit to metrics */ private final Set<Graph> graphs = Collections.synchronizedSet(new HashSet<Graph>()); /** * The metrics configuration file */ private final Configuration configuration; /** * The metrics configuration file */ private final File configurationFile; /** * Unique server id */ private final String guid; /** * Debug mode */ private final boolean debug; private Thread thread = null; private boolean firstPost = true; int tickCount; public Metrics(final String modName, final String modVersion) throws IOException { if (modName == null || modVersion == null) { throw new IllegalArgumentException("modName and modVersion cannot be null"); } this.modName = modName; this.modVersion = modVersion; // load the config this.configurationFile = this.getConfigFile(); this.configuration = new Configuration(this.configurationFile); // Get values, and add some defaults, if needed this.configuration.get(Configuration.CATEGORY_GENERAL, "opt-out", false, "Set to true to disable all reporting"); this.guid = this.configuration.get(Configuration.CATEGORY_GENERAL, "guid", UUID.randomUUID().toString(), "Server unique ID").getString(); this.debug = this.configuration.get(Configuration.CATEGORY_GENERAL, "debug", false, "Set to true for verbose debug").getBoolean(false); this.configuration.save(); } /** * Construct and create a Graph that can be used to separate specific plotters to their own graphs on the metrics * website. Plotters can be added to the graph object returned. * * @param name The name of the graph * @return Graph object created. Will never return NULL under normal circumstances unless bad parameters are given */ public Graph createGraph(final String name) { if (name == null) { throw new IllegalArgumentException("Graph name cannot be null"); } final Graph graph = new Graph(name); this.graphs.add(graph); return graph; } /** * Add a Graph object to BukkitMetrics that represents data for the plugin that should be sent to the backend * * @param graph The name of the graph */ public void addGraph(final Graph graph) { if (graph == null) { throw new IllegalArgumentException("Graph cannot be null"); } this.graphs.add(graph); } /** * Start measuring statistics. This will immediately create an async * repeating task as the plugin and send the initial data to the metrics * backend, and then after that it will post in increments of PING_INTERVAL * * 1200 ticks. * * @return True if statistics measuring is running, otherwise false. */ public boolean start() { // Did we opt out? if (this.isOptOut()) { return false; } MinecraftForge.EVENT_BUS.register(this); return true; } @SubscribeEvent public void tick(TickEvent.ServerTickEvent tick) { if (tick.phase != TickEvent.Phase.END) return; if (this.tickCount++ % (PING_INTERVAL * 1200) != 0) return; if (this.thread == null) { this.thread = new Thread(new Runnable() { @Override public void run() { try { // Disable Task, if it is running and the server owner decided // to opt-out if (Metrics.this.isOptOut()) { MinecraftForge.EVENT_BUS.unregister(Metrics.this); return; } // We use the inverse of firstPost because if it // is the first time we are posting, // it is not a interval ping, so it evaluates to // FALSE // Each time thereafter it will evaluate to // TRUE, i.e PING! Metrics.this.postPlugin(!Metrics.this.firstPost); // After the first post we set firstPost to // false // Each post thereafter will be a ping Metrics.this.firstPost = false; } catch (IOException e) { if (Metrics.this.debug) { FMLLog.info("[Metrics] Exception - %s", e.getMessage()); } } finally { Metrics.this.thread = null; } } }); this.thread.start(); } } /** * Stop processing */ public void stop() { } /** * Has the server owner denied plugin metrics? * * @return true if metrics should be opted out of it */ public boolean isOptOut() { // Reload the metrics file this.configuration.load(); return this.configuration.get(Configuration.CATEGORY_GENERAL, "opt-out", false).getBoolean(false); } /** * Enables metrics for the server by setting "opt-out" to false in the * config file and starting the metrics task. * * @throws java.io.IOException */ public void enable() throws IOException { // Check if the server owner has already set opt-out, if not, set it. if (this.isOptOut()) { this.configuration.getCategory(Configuration.CATEGORY_GENERAL).get("opt-out").set("false"); this.configuration.save(); } // Enable Task, if it is not running FMLCommonHandler.instance().bus().register(this); } /** * Disables metrics for the server by setting "opt-out" to true in the * config file and canceling the metrics task. * * @throws java.io.IOException */ public void disable() throws IOException { // Check if the server owner has already set opt-out, if not, set it. if (!this.isOptOut()) { this.configuration.getCategory(Configuration.CATEGORY_GENERAL).get("opt-out").set("true"); this.configuration.save(); } FMLCommonHandler.instance().bus().unregister(this); } /** * Gets the File object of the config file that should be used to store data * such as the GUID and opt-out status * * @return the File object for the config file */ public File getConfigFile() { return new File(Loader.instance().getConfigDir(), "PluginMetrics.cfg"); } /** * Generic method that posts a plugin to the metrics website */ private void postPlugin(final boolean isPing) throws IOException { // Server software specific section String pluginName = this.modName; boolean onlineMode = server.isServerInOnlineMode(); String pluginVersion = this.modVersion; String serverVersion; if (server.isDedicatedServer()) { serverVersion = "MinecraftForge (MC: " + server.getMinecraftVersion() + ")"; } else { serverVersion = "MinecraftForgeSSP (MC: " + server.getMinecraftVersion() + ")"; } int playersOnline = server.getCurrentPlayerCount(); // END server software specific section -- all code below does not use any code outside of this class / Java // Construct the post data StringBuilder json = new StringBuilder(1024); json.append('{'); // The plugin's description file containg all of the plugin data such as name, version, author, etc appendJSONPair(json, "guid", this.guid); appendJSONPair(json, "plugin_version", pluginVersion); appendJSONPair(json, "server_version", serverVersion); appendJSONPair(json, "players_online", Integer.toString(playersOnline)); // New data as of R6 String osname = System.getProperty("os.name"); String osarch = System.getProperty("os.arch"); String osversion = System.getProperty("os.version"); String java_version = System.getProperty("java.version"); int coreCount = Runtime.getRuntime().availableProcessors(); // normalize os arch .. amd64 -> x86_64 if (osarch.equals("amd64")) { osarch = "x86_64"; } appendJSONPair(json, "osname", osname); appendJSONPair(json, "osarch", osarch); appendJSONPair(json, "osversion", osversion); appendJSONPair(json, "cores", Integer.toString(coreCount)); appendJSONPair(json, "auth_mode", onlineMode ? "1" : "0"); appendJSONPair(json, "java_version", java_version); // If we're pinging, append it if (isPing) { appendJSONPair(json, "ping", "1"); } if (this.graphs.size() > 0) { synchronized (this.graphs) { json.append(','); json.append('"'); json.append("graphs"); json.append('"'); json.append(':'); json.append('{'); boolean firstGraph = true; final Iterator<Graph> iter = this.graphs.iterator(); while (iter.hasNext()) { Graph graph = iter.next(); StringBuilder graphJson = new StringBuilder(); graphJson.append('{'); for (Plotter plotter : graph.getPlotters()) { appendJSONPair(graphJson, plotter.getColumnName(), Integer.toString(plotter.getValue())); } graphJson.append('}'); if (!firstGraph) { json.append(','); } json.append(escapeJSON(graph.getName())); json.append(':'); json.append(graphJson); firstGraph = false; } json.append('}'); } } // close json json.append('}'); // Create the url URL url = new URL(BASE_URL + String.format(REPORT_URL, urlEncode(pluginName))); // Connect to the website URLConnection connection; // Mineshafter creates a socks proxy, so we can safely bypass it // It does not reroute POST requests so we need to go around it if (this.isMineshafterPresent()) { connection = url.openConnection(Proxy.NO_PROXY); } else { connection = url.openConnection(); } byte[] uncompressed = json.toString().getBytes(); byte[] compressed = gzip(json.toString()); // Headers connection.addRequestProperty("User-Agent", "MCStats/" + REVISION); connection.addRequestProperty("Content-Type", "application/json"); connection.addRequestProperty("Content-Encoding", "gzip"); connection.addRequestProperty("Content-Length", Integer.toString(compressed.length)); connection.addRequestProperty("Accept", "application/json"); connection.addRequestProperty("Connection", "close"); connection.setDoOutput(true); if (this.debug) { System.out.println("[Metrics] Prepared request for " + pluginName + " uncompressed=" + uncompressed.length + " compressed=" + compressed.length); } // Write the data OutputStream os = connection.getOutputStream(); os.write(compressed); os.flush(); // Now read the response final BufferedReader reader = new BufferedReader(new InputStreamReader(connection.getInputStream())); String response = reader.readLine(); // close resources os.close(); reader.close(); if (response == null || response.startsWith("ERR") || response.startsWith("7")) { if (response == null) { response = "null"; } else if (response.startsWith("7")) { response = response.substring(response.startsWith("7,") ? 2 : 1); } throw new IOException(response); } } /** * GZip compress a string of bytes * * @param input * @return */ public static byte[] gzip(String input) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); GZIPOutputStream gzos = null; try { gzos = new GZIPOutputStream(baos); gzos.write(input.getBytes("UTF-8")); } catch (IOException e) { e.printStackTrace(); } finally { if (gzos != null) try { gzos.close(); } catch (IOException ignore) { } } return baos.toByteArray(); } /** * Check if mineshafter is present. If it is, we need to bypass it to send POST requests * * @return true if mineshafter is installed on the server */ private boolean isMineshafterPresent() { try { Class.forName("mineshafter.MineServer"); return true; } catch (Exception e) { return false; } } /** * Appends a json encoded key/value pair to the given string builder. * * @param json * @param key * @param value * @throws java.io.UnsupportedEncodingException */ private static void appendJSONPair(StringBuilder json, String key, String value) throws UnsupportedEncodingException { boolean isValueNumeric = false; try { if (value.equals("0") || !value.endsWith("0")) { Double.parseDouble(value); isValueNumeric = true; } } catch (NumberFormatException e) { isValueNumeric = false; } if (json.charAt(json.length() - 1) != '{') { json.append(','); } json.append(escapeJSON(key)); json.append(':'); if (isValueNumeric) { json.append(value); } else { json.append(escapeJSON(value)); } } /** * Escape a string to create a valid JSON string * * @param text * @return */ private static String escapeJSON(String text) { StringBuilder builder = new StringBuilder(); builder.append('"'); for (int index = 0; index < text.length(); index++) { char chr = text.charAt(index); switch (chr) { case '"': case '\\': builder.append('\\'); builder.append(chr); break; case '\b': builder.append("\\b"); break; case '\t': builder.append("\\t"); break; case '\n': builder.append("\\n"); break; case '\r': builder.append("\\r"); break; default: if (chr < ' ') { String t = "000" + Integer.toHexString(chr); builder.append("\\u" + t.substring(t.length() - 4)); } else { builder.append(chr); } break; } } builder.append('"'); return builder.toString(); } /** * Encode text as UTF-8 * * @param text the text to encode * @return the encoded text, as UTF-8 */ private static String urlEncode(final String text) throws UnsupportedEncodingException { return URLEncoder.encode(text, "UTF-8"); } /** * Set the server which the metrics is running on. * * @param server */ public static void setServer(MinecraftServer server) { Metrics.server = server; } /** * Represents a custom graph on the website */ public static class Graph { /** * The graph's name, alphanumeric and spaces only :) If it does not comply to the above when submitted, it is * rejected */ private final String name; /** * The set of plotters that are contained within this graph */ private final Set<Plotter> plotters = new LinkedHashSet<>(); private Graph(final String name) { this.name = name; } /** * Gets the graph's name * * @return the Graph's name */ public String getName() { return this.name; } /** * Add a plotter to the graph, which will be used to plot entries * * @param plotter the plotter to add to the graph */ public void addPlotter(final Plotter plotter) { this.plotters.add(plotter); } /** * Remove a plotter from the graph * * @param plotter the plotter to remove from the graph */ public void removePlotter(final Plotter plotter) { this.plotters.remove(plotter); } /** * Gets an <b>unmodifiable</b> set of the plotter objects in the graph * * @return an unmodifiable {@link java.util.Set} of the plotter objects */ public Set<Plotter> getPlotters() { return Collections.unmodifiableSet(this.plotters); } @Override public int hashCode() { return this.name.hashCode(); } @Override public boolean equals(final Object object) { if (!(object instanceof Graph)) { return false; } final Graph graph = (Graph) object; return graph.name.equals(this.name); } /** * Called when the server owner decides to opt-out of BukkitMetrics while the server is running. */ protected void onOptOut() { } } /** * Interface used to collect custom data for a plugin */ public static abstract class Plotter { /** * The plot's name */ private final String name; /** * Construct a plotter with the default plot name */ public Plotter() { this("Default"); } /** * Construct a plotter with a specific plot name * * @param name the name of the plotter to use, which will show up on the website */ public Plotter(final String name) { this.name = name; } /** * Get the current value for the plotted point. Since this function defers to an external function it may or may * not return immediately thus cannot be guaranteed to be thread friendly or safe. This function can be called * from any thread so care should be taken when accessing resources that need to be synchronized. * * @return the current value for the point to be plotted. */ public abstract int getValue(); /** * Get the column name for the plotted point * * @return the plotted point's column name */ public String getColumnName() { return this.name; } /** * Called after the website graphs have been updated */ public void reset() { } @Override public int hashCode() { return this.getColumnName().hashCode(); } @Override public boolean equals(final Object object) { if (!(object instanceof Plotter)) { return false; } final Plotter plotter = (Plotter) object; return plotter.name.equals(this.name) && plotter.getValue() == this.getValue(); } } }
//======================================================================== //Copyright 2007-2009 David Yu dyuproject@gmail.com //------------------------------------------------------------------------ //Licensed under the Apache License, Version 2.0 (the "License"); //you may not use this file except in compliance with the License. //You may obtain a copy of the License at //http://www.apache.org/licenses/LICENSE-2.0 //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. //======================================================================== package com.dyuproject.protostuff; import static com.dyuproject.protostuff.SerializableObjects.bar; import static com.dyuproject.protostuff.SerializableObjects.baz; import static com.dyuproject.protostuff.SerializableObjects.foo; import static com.dyuproject.protostuff.SerializableObjects.negativeBar; import static com.dyuproject.protostuff.SerializableObjects.negativeBaz; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import junit.framework.TestCase; /** * Testing for json ser/deser against messages. * * @author David Yu * @created Nov 20, 2009 */ public class JsonCoreSerDeserTest extends TestCase { public void testFoo() throws Exception { Foo fooCompare = foo; Foo dfoo = new Foo(); byte[] data = JsonIOUtil.toByteArray(fooCompare, fooCompare.cachedSchema(), false); JsonIOUtil.mergeFrom(data, dfoo, dfoo.cachedSchema(), false); SerializableObjects.assertEquals(fooCompare, dfoo); } public void testBar() throws Exception { for(Bar barCompare : new Bar[]{bar, negativeBar}) { Bar dbar = new Bar(); byte[] data = JsonIOUtil.toByteArray(barCompare, barCompare.cachedSchema(), false); JsonIOUtil.mergeFrom(data, dbar, dbar.cachedSchema(), false); SerializableObjects.assertEquals(barCompare, dbar); } } public void testBaz() throws Exception { for(Baz bazCompare : new Baz[]{baz, negativeBaz}) { Baz dbaz = new Baz(); byte[] data = JsonIOUtil.toByteArray(bazCompare, bazCompare.cachedSchema(), false); JsonIOUtil.mergeFrom(data, dbaz, dbaz.cachedSchema(), false); SerializableObjects.assertEquals(bazCompare, dbaz); } } public void testUnknownScalarFields() throws Exception { String[] regularMessages = new String[]{ "{\"int\":1,\"string\":\"string\",\"double\":555.444,\"id\":1}", "{\"int\":1,\"string\":\"string\",\"id\":2,\"double\":555.444}", "{\"id\":3,\"int\":1,\"string\":\"string\",\"double\":555.444}" }; for(int i=0; i<regularMessages.length; i++) { Baz b = new Baz(); JsonIOUtil.mergeFrom(JsonIOUtil.DEFAULT_JSON_FACTORY.createJsonParser(regularMessages[i]), b, b.cachedSchema(), false); assertTrue(i+1 == b.getId()); } String[] numericMessages = new String[]{ "{\"4\":1,\"5\":\"string\",\"6\":555.444,\"1\":1}", "{\"4\":1,\"5\":\"string\",\"1\":2,\"6\":555.444}", "{\"1\":3,\"4\":1,\"5\":\"string\",\"6\":555.444}" }; for(int i=0; i<numericMessages.length; i++) { Baz b = new Baz(); JsonIOUtil.mergeFrom(JsonIOUtil.DEFAULT_JSON_FACTORY.createJsonParser(numericMessages[i]), b, b.cachedSchema(), true); assertTrue(i+1 == b.getId()); } } public void testUnknownScalarFieldsWithArray() throws Exception { String[] regularMessages = new String[]{ "{\"int\":[1],\"string\":\"string\",\"double\":[555.444],\"id\":1}", "{\"int\":1,\"string\":[\"string\"],\"id\":2,\"double\":[555.444]}", "{\"id\":3,\"int\":[1],\"string\":[\"string\"],\"double\":555.444}" }; for(int i=0; i<regularMessages.length; i++) { Baz b = new Baz(); JsonIOUtil.mergeFrom(JsonIOUtil.DEFAULT_JSON_FACTORY.createJsonParser(regularMessages[i]), b, b.cachedSchema(), false); assertTrue(i+1 == b.getId()); } String[] numericMessages = new String[]{ "{\"4\":[1],\"5\":\"string\",\"6\":[555.444],\"1\":1}", "{\"4\":1,\"5\":[\"string\"],\"1\":2,\"6\":[555.444]}", "{\"1\":3,\"4\":[1],\"5\":[\"string\"],\"6\":555.444}" }; for(int i=0; i<numericMessages.length; i++) { Baz b = new Baz(); JsonIOUtil.mergeFrom(JsonIOUtil.DEFAULT_JSON_FACTORY.createJsonParser(numericMessages[i]), b, b.cachedSchema(), true); assertTrue(i+1 == b.getId()); } } public void testListIO() throws Exception { ArrayList<Bar> bars = new ArrayList<Bar>(); bars.add(SerializableObjects.bar); bars.add(SerializableObjects.negativeBar); ByteArrayOutputStream out = new ByteArrayOutputStream(); JsonIOUtil.writeListTo(out, bars, SerializableObjects.bar.cachedSchema(), false); byte[] data = out.toByteArray(); ByteArrayInputStream in = new ByteArrayInputStream(data); List<Bar> parsedBars = JsonIOUtil.parseListFrom(in, SerializableObjects.bar.cachedSchema(), false); assertTrue(parsedBars.size() == bars.size()); int i=0; for(Bar b : parsedBars) SerializableObjects.assertEquals(bars.get(i++), b); } public void testListEmpty() throws Exception { ArrayList<Bar> bars = new ArrayList<Bar>(); ByteArrayOutputStream out = new ByteArrayOutputStream(); JsonIOUtil.writeListTo(out, bars, SerializableObjects.bar.cachedSchema(), false); byte[] data = out.toByteArray(); assertEquals(new String(data, "UTF-8"), "[]"); ByteArrayInputStream in = new ByteArrayInputStream(data); List<Bar> parsedBars = JsonIOUtil.parseListFrom(in, SerializableObjects.bar.cachedSchema(), false); assertTrue(parsedBars.size() == bars.size()); int i=0; for(Bar b : parsedBars) SerializableObjects.assertEquals(bars.get(i++), b); } public void testListIOWithArrays() throws Exception { ArrayList<Foo> foos = new ArrayList<Foo>(); foos.add(SerializableObjects.foo); foos.add(SerializableObjects.foo); ByteArrayOutputStream out = new ByteArrayOutputStream(); JsonIOUtil.writeListTo(out, foos, SerializableObjects.foo.cachedSchema(), false); byte[] data = out.toByteArray(); ByteArrayInputStream in = new ByteArrayInputStream(data); List<Foo> parsed = JsonIOUtil.parseListFrom(in, SerializableObjects.foo.cachedSchema(), false); assertTrue(parsed.size() == foos.size()); int i=0; for(Foo f : parsed) SerializableObjects.assertEquals(foos.get(i++), f); } public void testEmptyMessage() throws Exception { Bar bar = new Bar(); byte[] data = JsonIOUtil.toByteArray(bar, bar.cachedSchema(), false); assertEquals(new String(data, "UTF-8"), "{}"); Bar parsedBar = new Bar(); JsonIOUtil.mergeFrom(data, parsedBar, parsedBar.cachedSchema(), false); SerializableObjects.assertEquals(bar, parsedBar); } public void testEmptyMessageInner() throws Exception { Baz baz = new Baz(); Bar bar = new Bar(); // method name is setSomeBaz, should have been someBaz! bar.setSomeBaz(baz); byte[] data = JsonIOUtil.toByteArray(bar, bar.cachedSchema(), false); assertEquals(new String(data, "UTF-8"), "{\"someBaz\":{}}"); Bar parsedBar = new Bar(); JsonIOUtil.mergeFrom(data, parsedBar, parsedBar.cachedSchema(), false); SerializableObjects.assertEquals(bar, parsedBar); } public void testPartialEmptyMessage() throws Exception { Baz baz = new Baz(); Bar bar = new Bar(); bar.setSomeInt(1); bar.setSomeBaz(baz); byte[] data = JsonIOUtil.toByteArray(bar, bar.cachedSchema(), false); assertEquals(new String(data, "UTF-8"), "{\"someInt\":1,\"someBaz\":{}}"); Bar parsedBar = new Bar(); JsonIOUtil.mergeFrom(data, parsedBar, parsedBar.cachedSchema(), false); SerializableObjects.assertEquals(bar, parsedBar); } public void testPartialEmptyMessageWithString() throws Exception { Baz baz = new Baz(); Bar bar = new Bar(); bar.setSomeString("someString"); bar.setSomeBaz(baz); byte[] data = JsonIOUtil.toByteArray(bar, bar.cachedSchema(), false); assertEquals(new String(data, "UTF-8"), "{\"someString\":\"someString\",\"someBaz\":{}}"); Bar parsedBar = new Bar(); JsonIOUtil.mergeFrom(data, parsedBar, parsedBar.cachedSchema(), false); SerializableObjects.assertEquals(bar, parsedBar); } public void testPartialEmptyMessageWithEmptyString() throws Exception { Baz baz = new Baz(); Bar bar = new Bar(); bar.setSomeString(""); bar.setSomeBaz(baz); byte[] data = JsonIOUtil.toByteArray(bar, bar.cachedSchema(), false); assertEquals(new String(data, "UTF-8"), "{\"someString\":\"\",\"someBaz\":{}}"); Bar parsedBar = new Bar(); JsonIOUtil.mergeFrom(data, parsedBar, parsedBar.cachedSchema(), false); SerializableObjects.assertEquals(bar, parsedBar); } public void testPartialEmptyMessageInner() throws Exception { Baz baz = new Baz(); Bar bar = new Bar(); baz.setId(2); bar.setSomeBaz(baz); byte[] data = JsonIOUtil.toByteArray(bar, bar.cachedSchema(), false); assertEquals(new String(data, "UTF-8"), "{\"someBaz\":{\"id\":2}}"); Bar parsedBar = new Bar(); JsonIOUtil.mergeFrom(data, parsedBar, parsedBar.cachedSchema(), false); SerializableObjects.assertEquals(bar, parsedBar); } public void testPartialEmptyMessageInnerWithString() throws Exception { Baz baz = new Baz(); Bar bar = new Bar(); baz.setName("asdfsf"); bar.setSomeBaz(baz); byte[] data = JsonIOUtil.toByteArray(bar, bar.cachedSchema(), false); assertEquals(new String(data, "UTF-8"), "{\"someBaz\":{\"name\":\"asdfsf\"}}"); Bar parsedBar = new Bar(); JsonIOUtil.mergeFrom(data, parsedBar, parsedBar.cachedSchema(), false); SerializableObjects.assertEquals(bar, parsedBar); } public void testPartialEmptyMessageInnerWithEmptyString() throws Exception { Baz baz = new Baz(); Bar bar = new Bar(); baz.setName(""); bar.setSomeBaz(baz); byte[] data = JsonIOUtil.toByteArray(bar, bar.cachedSchema(), false); assertEquals(new String(data, "UTF-8"), "{\"someBaz\":{\"name\":\"\"}}"); Bar parsedBar = new Bar(); JsonIOUtil.mergeFrom(data, parsedBar, parsedBar.cachedSchema(), false); SerializableObjects.assertEquals(bar, parsedBar); } public void testEmptyFoo() throws Exception { Foo foo = new Foo(); byte[] data = JsonIOUtil.toByteArray(foo, foo.cachedSchema(), false); assertEquals(new String(data, "UTF-8"), "{}"); Foo parsedFoo = new Foo(); JsonIOUtil.mergeFrom(data, parsedFoo, parsedFoo.cachedSchema(), false); SerializableObjects.assertEquals(foo, parsedFoo); } public void testEmptyFooInner() throws Exception { Foo foo = new Foo(); ArrayList<Bar> bars = new ArrayList<Bar>(); bars.add(new Bar()); foo.setSomeBar(bars); byte[] data = JsonIOUtil.toByteArray(foo, foo.cachedSchema(), false); assertEquals(new String(data, "UTF-8"), "{\"someBar\":[{}]}"); Foo parsedFoo = new Foo(); JsonIOUtil.mergeFrom(data, parsedFoo, parsedFoo.cachedSchema(), false); SerializableObjects.assertEquals(foo, parsedFoo); } public void testEmptyFooDeeper() throws Exception { Foo foo = new Foo(); ArrayList<Bar> bars = new ArrayList<Bar>(); Bar bar = new Bar(); bar.setSomeBaz(new Baz()); bars.add(bar); foo.setSomeBar(bars); byte[] data = JsonIOUtil.toByteArray(foo, foo.cachedSchema(), false); assertEquals(new String(data, "UTF-8"), "{\"someBar\":[{\"someBaz\":{}}]}"); Foo parsedFoo = new Foo(); JsonIOUtil.mergeFrom(data, parsedFoo, parsedFoo.cachedSchema(), false); SerializableObjects.assertEquals(foo, parsedFoo); } public void testFooNullFields() throws Exception { Foo b = new Foo(); JsonIOUtil.mergeFrom(JsonIOUtil.DEFAULT_JSON_FACTORY.createJsonParser( "{\"someInt\":[null]" + ",\"someString\":[null]" + ",\"someBar\":[null]" + ",\"someEnum\":[null]" + ",\"someBytes\":[null]" + ",\"someBoolean\":[null]" + ",\"someFloat\":[null]" + ",\"someDouble\":[null]" + ",\"someLong\":[null]}"), b, b.cachedSchema(), false); assertNull(b.getSomeInt()); assertNull(b.getSomeString()); assertNull(b.getSomeBar()); assertNull(b.getSomeEnum()); assertNull(b.getSomeBytes()); assertNull(b.getSomeBoolean()); assertNull(b.getSomeFloat()); assertNull(b.getSomeDouble()); assertNull(b.getSomeLong()); } public void testFooNullFieldsButFirst() throws Exception { Foo b = new Foo(); JsonIOUtil.mergeFrom(JsonIOUtil.DEFAULT_JSON_FACTORY.createJsonParser( "{\"someInt\":[1,null]" + ",\"someString\":[\"string\",null]" + ",\"someBar\":[{},null]" + ",\"someEnum\":[1,null]" + ",\"someBytes\":[\"fw==\",null]" + // 0x7f ",\"someBoolean\":[true,null]" + ",\"someFloat\":[10.01,null]" + ",\"someDouble\":[100.001,null]" + ",\"someLong\":[1000,null]}"), b, b.cachedSchema(), false); assertEquals(b.getSomeInt(), Arrays.asList(new Integer(1))); assertEquals(b.getSomeString(), Arrays.asList("string")); assertEquals(b.getSomeBar(), Arrays.asList(new Bar())); assertEquals(b.getSomeEnum(), Arrays.asList(Foo.EnumSample.TYPE1)); assertEquals(b.getSomeBytes(), Arrays.asList(ByteString.copyFrom(new byte[]{0x7f}))); assertEquals(b.getSomeBoolean(), Arrays.asList(Boolean.TRUE)); assertEquals(b.getSomeFloat(), Arrays.asList(new Float(10.01f))); assertEquals(b.getSomeDouble(), Arrays.asList(new Double(100.001d))); assertEquals(b.getSomeLong(), Arrays.asList(new Long(1000l))); } public void testFooNullFieldsButMid() throws Exception { Foo b = new Foo(); JsonIOUtil.mergeFrom(JsonIOUtil.DEFAULT_JSON_FACTORY.createJsonParser( "{\"someInt\":[null,1,null]" + ",\"someString\":[null,\"string\",null]" + ",\"someBar\":[null,{},null]" + ",\"someEnum\":[null,1,null]" + ",\"someBytes\":[null,\"fw==\",null]" + // 0x7f ",\"someBoolean\":[null,true,null]" + ",\"someFloat\":[null,10.01,null]" + ",\"someDouble\":[null,100.001,null]" + ",\"someLong\":[null,1000,null]}"), b, b.cachedSchema(), false); assertEquals(b.getSomeInt(), Arrays.asList(new Integer(1))); assertEquals(b.getSomeString(), Arrays.asList("string")); assertEquals(b.getSomeBar(), Arrays.asList(new Bar())); assertEquals(b.getSomeEnum(), Arrays.asList(Foo.EnumSample.TYPE1)); assertEquals(b.getSomeBytes(), Arrays.asList(ByteString.copyFrom(new byte[]{0x7f}))); assertEquals(b.getSomeBoolean(), Arrays.asList(Boolean.TRUE)); assertEquals(b.getSomeFloat(), Arrays.asList(new Float(10.01f))); assertEquals(b.getSomeDouble(), Arrays.asList(new Double(100.001d))); assertEquals(b.getSomeLong(), Arrays.asList(new Long(1000l))); } public void testFooNullFieldsButLast() throws Exception { Foo b = new Foo(); JsonIOUtil.mergeFrom(JsonIOUtil.DEFAULT_JSON_FACTORY.createJsonParser( "{\"someInt\":[null,1]" + ",\"someString\":[null,\"string\"]" + ",\"someBar\":[null,{}]" + ",\"someEnum\":[null,1]" + ",\"someBytes\":[null,\"fw==\"]" + // 0x7f ",\"someBoolean\":[null,true]" + ",\"someFloat\":[null,10.01]" + ",\"someDouble\":[null,100.001]" + ",\"someLong\":[null,1000]}"), b, b.cachedSchema(), false); assertEquals(b.getSomeInt(), Arrays.asList(new Integer(1))); assertEquals(b.getSomeString(), Arrays.asList("string")); assertEquals(b.getSomeBar(), Arrays.asList(new Bar())); assertEquals(b.getSomeEnum(), Arrays.asList(Foo.EnumSample.TYPE1)); assertEquals(b.getSomeBytes(), Arrays.asList(ByteString.copyFrom(new byte[]{0x7f}))); assertEquals(b.getSomeBoolean(), Arrays.asList(Boolean.TRUE)); assertEquals(b.getSomeFloat(), Arrays.asList(new Float(10.01f))); assertEquals(b.getSomeDouble(), Arrays.asList(new Double(100.001d))); assertEquals(b.getSomeLong(), Arrays.asList(new Long(1000l))); } public void testFooNullFieldsButLast2() throws Exception { Foo b = new Foo(); JsonIOUtil.mergeFrom(JsonIOUtil.DEFAULT_JSON_FACTORY.createJsonParser( "{\"someInt\":[null,null,1]" + ",\"someString\":[null,null,\"string\"]" + ",\"someBar\":[null,null,{}]" + ",\"someEnum\":[null,null,1]" + ",\"someBytes\":[null,null,\"fw==\"]" + // 0x7f ",\"someBoolean\":[null,null,true]" + ",\"someFloat\":[null,null,10.01]" + ",\"someDouble\":[null,null,100.001]" + ",\"someLong\":[null,null,1000]}"), b, b.cachedSchema(), false); assertEquals(b.getSomeInt(), Arrays.asList(new Integer(1))); assertEquals(b.getSomeString(), Arrays.asList("string")); assertEquals(b.getSomeBar(), Arrays.asList(new Bar())); assertEquals(b.getSomeEnum(), Arrays.asList(Foo.EnumSample.TYPE1)); assertEquals(b.getSomeBytes(), Arrays.asList(ByteString.copyFrom(new byte[]{0x7f}))); assertEquals(b.getSomeBoolean(), Arrays.asList(Boolean.TRUE)); assertEquals(b.getSomeFloat(), Arrays.asList(new Float(10.01f))); assertEquals(b.getSomeDouble(), Arrays.asList(new Double(100.001d))); assertEquals(b.getSomeLong(), Arrays.asList(new Long(1000l))); } public void testBarNullFields() throws Exception { Bar b = new Bar(); JsonIOUtil.mergeFrom(JsonIOUtil.DEFAULT_JSON_FACTORY.createJsonParser( "{\"someInt\":null" + ",\"someString\":null" + ",\"someBaz\":null" + ",\"someEnum\":null" + ",\"someBytes\":null" + ",\"someBoolean\":null" + ",\"someFloat\":null" + ",\"someDouble\":null" + ",\"someLong\":null}"), b, b.cachedSchema(), false); assertEquals(0, b.getSomeInt()); assertNull(b.getSomeString()); assertNull(b.getSomeBaz()); assertNull(b.getSomeEnum()); assertNull(b.getSomeBytes()); assertFalse(b.getSomeBoolean()); assertEquals(0f, b.getSomeFloat()); assertEquals(0d, b.getSomeDouble()); assertEquals(0l, b.getSomeLong()); } public void testBazNullFields() throws Exception { Baz b = new Baz(); JsonIOUtil.mergeFrom(JsonIOUtil.DEFAULT_JSON_FACTORY.createJsonParser( "{\"id\":null,\"name\":null,\"timestamp\":null}"), b, b.cachedSchema(), false); assertEquals(0, b.getId()); assertNull(b.getName()); assertEquals(0l, b.getTimestamp()); } }
/* * Copyright 2018 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.rpc.server; import com.navercorp.pinpoint.common.util.Assert; import com.navercorp.pinpoint.common.util.StringUtils; import com.navercorp.pinpoint.rpc.ChannelWriteFailListenableFuture; import com.navercorp.pinpoint.rpc.Future; import com.navercorp.pinpoint.rpc.ResponseMessage; import com.navercorp.pinpoint.rpc.client.RequestManager; import com.navercorp.pinpoint.rpc.client.WriteFailFutureListener; import com.navercorp.pinpoint.rpc.cluster.ClusterOption; import com.navercorp.pinpoint.rpc.cluster.Role; import com.navercorp.pinpoint.rpc.common.CyclicStateChecker; import com.navercorp.pinpoint.rpc.common.SocketStateChangeResult; import com.navercorp.pinpoint.rpc.common.SocketStateCode; import com.navercorp.pinpoint.rpc.control.ProtocolException; import com.navercorp.pinpoint.rpc.packet.ControlHandshakePacket; import com.navercorp.pinpoint.rpc.packet.ControlHandshakeResponsePacket; import com.navercorp.pinpoint.rpc.packet.HandshakeResponseCode; import com.navercorp.pinpoint.rpc.packet.Packet; import com.navercorp.pinpoint.rpc.packet.PacketType; import com.navercorp.pinpoint.rpc.packet.PingPacket; import com.navercorp.pinpoint.rpc.packet.PingPayloadPacket; import com.navercorp.pinpoint.rpc.packet.PongPacket; import com.navercorp.pinpoint.rpc.packet.RequestPacket; import com.navercorp.pinpoint.rpc.packet.ResponsePacket; import com.navercorp.pinpoint.rpc.packet.SendPacket; import com.navercorp.pinpoint.rpc.packet.ServerClosePacket; import com.navercorp.pinpoint.rpc.packet.stream.StreamPacket; import com.navercorp.pinpoint.rpc.server.handler.DoNothingChannelStateEventHandler; import com.navercorp.pinpoint.rpc.server.handler.ServerStateChangeEventHandler; import com.navercorp.pinpoint.rpc.stream.ClientStreamChannel; import com.navercorp.pinpoint.rpc.stream.ClientStreamChannelContext; import com.navercorp.pinpoint.rpc.stream.ClientStreamChannelMessageListener; import com.navercorp.pinpoint.rpc.stream.StreamChannelContext; import com.navercorp.pinpoint.rpc.stream.StreamChannelManager; import com.navercorp.pinpoint.rpc.stream.StreamChannelStateChangeEventHandler; import com.navercorp.pinpoint.rpc.util.ClassUtils; import com.navercorp.pinpoint.rpc.util.ControlMessageEncodingUtils; import com.navercorp.pinpoint.rpc.util.IDGenerator; import com.navercorp.pinpoint.rpc.util.ListUtils; import com.navercorp.pinpoint.rpc.util.MapUtils; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFuture; import org.jboss.netty.channel.ChannelFutureListener; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.SocketAddress; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; /** * @author Taejin Koo */ public class DefaultPinpointServer implements PinpointServer { private final Logger logger = LoggerFactory.getLogger(this.getClass()); private final long startTimestamp = System.currentTimeMillis(); private final Channel channel; private final RequestManager requestManager; private final DefaultPinpointServerState state; private final CyclicStateChecker stateChecker; private HealthCheckStateContext healthCheckStateContext = new HealthCheckStateContext(); private final ServerMessageListener messageListener; private final List<ServerStateChangeEventHandler> stateChangeEventListeners; private final StreamChannelManager streamChannelManager; private final AtomicReference<Map<Object, Object>> properties = new AtomicReference<Map<Object, Object>>(); private final String objectUniqName; private final ClusterOption localClusterOption; private ClusterOption remoteClusterOption; private final ChannelFutureListener serverCloseWriteListener; private final ChannelFutureListener responseWriteFailListener; private final WriteFailFutureListener pongWriteFutureListener = new WriteFailFutureListener(logger, "pong write fail.", "pong write success."); public DefaultPinpointServer(Channel channel, PinpointServerConfig serverConfig) { this(channel, serverConfig, null); } public DefaultPinpointServer(Channel channel, PinpointServerConfig serverConfig, ServerStateChangeEventHandler... stateChangeEventListeners) { this.channel = channel; this.messageListener = serverConfig.getMessageListener(); StreamChannelManager streamChannelManager = new StreamChannelManager(channel, IDGenerator.createEvenIdGenerator(), serverConfig.getStreamMessageListener()); this.streamChannelManager = streamChannelManager; this.stateChangeEventListeners = new ArrayList<ServerStateChangeEventHandler>(); List<ServerStateChangeEventHandler> configuredStateChangeEventHandlers = serverConfig.getStateChangeEventHandlers(); if (configuredStateChangeEventHandlers != null) { for (ServerStateChangeEventHandler configuredStateChangeEventHandler : configuredStateChangeEventHandlers) { ListUtils.addIfValueNotNull(this.stateChangeEventListeners, configuredStateChangeEventHandler); } } ListUtils.addAllExceptNullValue(this.stateChangeEventListeners, stateChangeEventListeners); if (this.stateChangeEventListeners.isEmpty()) { this.stateChangeEventListeners.add(DoNothingChannelStateEventHandler.INSTANCE); } RequestManager requestManager = new RequestManager(serverConfig.getRequestManagerTimer(), serverConfig.getDefaultRequestTimeout()); this.requestManager = requestManager; this.objectUniqName = ClassUtils.simpleClassNameAndHashCodeString(this); this.serverCloseWriteListener = new WriteFailFutureListener(logger, objectUniqName + " sendClosePacket() write fail.", "serverClosePacket write success"); this.responseWriteFailListener = new WriteFailFutureListener(logger, objectUniqName + " response() write fail."); this.state = new DefaultPinpointServerState(this, this.stateChangeEventListeners); this.stateChecker = new CyclicStateChecker(5); this.localClusterOption = serverConfig.getClusterOption(); } public void start() { logger.info("{} start() started. channel:{}.", objectUniqName, channel); state.toConnected(); state.toRunWithoutHandshake(); logger.info("{} start() completed.", objectUniqName); } public void stop() { logger.info("{} stop() started. channel:{}.", objectUniqName, channel); stop(false); logger.info("{} stop() completed.", objectUniqName); } public void stop(boolean serverStop) { try { SocketStateCode currentStateCode = getCurrentStateCode(); if (SocketStateCode.BEING_CLOSE_BY_SERVER == currentStateCode) { state.toClosed(); } else if (SocketStateCode.BEING_CLOSE_BY_CLIENT == currentStateCode) { state.toClosedByPeer(); } else if (SocketStateCode.isRun(currentStateCode) && serverStop) { state.toUnexpectedClosed(); } else if (SocketStateCode.isRun(currentStateCode)) { state.toUnexpectedClosedByPeer(); } else if (SocketStateCode.isClosed(currentStateCode)) { logger.warn("{} stop(). Socket has closed state({}).", objectUniqName, currentStateCode); } else { state.toErrorUnknown(); logger.warn("{} stop(). Socket has unexpected state.", objectUniqName, currentStateCode); } if (this.channel.isConnected()) { channel.close(); } } finally { streamChannelManager.close(); } } @Override public void send(byte[] payload) { Assert.requireNonNull(payload, "payload must not be null."); if (!isEnableDuplexCommunication()) { throw new IllegalStateException("Send fail. Error: Illegal State. pinpointServer:" + toString()); } SendPacket send = new SendPacket(payload); write0(send); } @Override public Future<ResponseMessage> request(byte[] payload) { Assert.requireNonNull(payload, "payload must not be null."); if (!isEnableDuplexCommunication()) { throw new IllegalStateException("Request fail. Error: Illegal State. pinpointServer:" + toString()); } final int requestId = this.requestManager.nextRequestId(); RequestPacket requestPacket = new RequestPacket(requestId, payload); ChannelWriteFailListenableFuture<ResponseMessage> responseFuture = this.requestManager.register(requestPacket.getRequestId()); write0(requestPacket, responseFuture); return responseFuture; } @Override public void response(int requestId, byte[] payload) { Assert.requireNonNull(payload, "payload must not be null."); if (!isEnableCommunication()) { throw new IllegalStateException("Response fail. Error: Illegal State. pinpointServer:" + toString()); } ResponsePacket responsePacket = new ResponsePacket(requestId, payload); write0(responsePacket, responseWriteFailListener); } private ChannelFuture write0(Object message) { return write0(message, null); } private ChannelFuture write0(Object message, ChannelFutureListener futureListener) { ChannelFuture future = channel.write(message); if (futureListener != null) { future.addListener(futureListener); } return future; } public StreamChannelContext getStreamChannel(int channelId) { return streamChannelManager.findStreamChannel(channelId); } @Override public ClientStreamChannelContext openStream(byte[] payload, ClientStreamChannelMessageListener messageListener) { return openStream(payload, messageListener, null); } @Override public ClientStreamChannelContext openStream(byte[] payload, ClientStreamChannelMessageListener messageListener, StreamChannelStateChangeEventHandler<ClientStreamChannel> stateChangeListener) { logger.info("{} createStream() started.", objectUniqName); ClientStreamChannelContext streamChannel = streamChannelManager.openStream(payload, messageListener, stateChangeListener); logger.info("{} createStream() completed.", objectUniqName); return streamChannel; } public void closeAllStreamChannel() { logger.info("{} closeAllStreamChannel() started.", objectUniqName); streamChannelManager.close(); logger.info("{} closeAllStreamChannel() completed.", objectUniqName); } @Override public Map<Object, Object> getChannelProperties() { Map<Object, Object> properties = this.properties.get(); return properties == null ? Collections.emptyMap() : properties; } public boolean setChannelProperties(Map<Object, Object> value) { if (value == null) { return false; } return this.properties.compareAndSet(null, Collections.unmodifiableMap(value)); } @Override public SocketAddress getRemoteAddress() { return channel.getRemoteAddress(); } public ChannelFuture sendClosePacket() { logger.info("{} sendClosePacket() started.", objectUniqName); SocketStateChangeResult stateChangeResult = state.toBeingClose(); if (stateChangeResult.isChange()) { ChannelFuture writeFuture = write0(ServerClosePacket.DEFAULT_SERVER_CLOSE_PACKET, serverCloseWriteListener); logger.info("{} sendClosePacket() completed.", objectUniqName); return writeFuture; } else { logger.info("{} sendClosePacket() failed. Error:{}.", objectUniqName, stateChangeResult); return null; } } @Override public void messageReceived(Object message) { if (!isEnableCommunication()) { // FIXME need change rules. // as-is : do nothing when state is not run. // candidate : close channel when state is not run. logger.warn("{} messageReceived() failed. Error: Illegal state this message({}) will be ignore.", objectUniqName, message); return; } final short packetType = getPacketType(message); switch (packetType) { case PacketType.APPLICATION_SEND: { handleSend((SendPacket) message); return; } case PacketType.APPLICATION_REQUEST: { handleRequest((RequestPacket) message); return; } case PacketType.APPLICATION_RESPONSE: { handleResponse((ResponsePacket) message); return; } case PacketType.APPLICATION_STREAM_CREATE: case PacketType.APPLICATION_STREAM_CLOSE: case PacketType.APPLICATION_STREAM_CREATE_SUCCESS: case PacketType.APPLICATION_STREAM_CREATE_FAIL: case PacketType.APPLICATION_STREAM_RESPONSE: case PacketType.APPLICATION_STREAM_PING: case PacketType.APPLICATION_STREAM_PONG: handleStreamEvent((StreamPacket) message); return; case PacketType.CONTROL_HANDSHAKE: handleHandshake((ControlHandshakePacket) message); return; case PacketType.CONTROL_CLIENT_CLOSE: { handleClosePacket(channel); return; } case PacketType.CONTROL_PING_PAYLOAD: { handlePingPacket(channel, (PingPayloadPacket) message); return; } case PacketType.CONTROL_PING: { handlePingPacket(channel, (PingPacket) message); return; } default: { logger.warn("invalid messageReceived msg:{}, connection:{}", message, channel); } } } private short getPacketType(Object packet) { if (packet == null) { return PacketType.UNKNOWN; } if (packet instanceof Packet) { return ((Packet) packet).getPacketType(); } return PacketType.UNKNOWN; } private void handleSend(SendPacket sendPacket) { messageListener.handleSend(sendPacket, this); } private void handleRequest(RequestPacket requestPacket) { messageListener.handleRequest(requestPacket, this); } private void handleResponse(ResponsePacket responsePacket) { this.requestManager.messageReceived(responsePacket, this); } private void handleStreamEvent(StreamPacket streamPacket) { streamChannelManager.messageReceived(streamPacket); } private void handleHandshake(ControlHandshakePacket handshakePacket) { int requestId = handshakePacket.getRequestId(); Map<Object, Object> handshakeData = decodeHandshakePacket(handshakePacket); logger.info("{} handleHandshake() started. requestId:{}, data:{}", objectUniqName, requestId, handshakeData); HandshakeResponseCode responseCode = messageListener.handleHandshake(handshakeData); if (responseCode != null) { boolean isFirst = setChannelProperties(handshakeData); if (isFirst) { if (HandshakeResponseCode.DUPLEX_COMMUNICATION == responseCode) { this.remoteClusterOption = getClusterOption(handshakeData); state.toRunDuplex(); } else if (HandshakeResponseCode.SIMPLEX_COMMUNICATION == responseCode || HandshakeResponseCode.SUCCESS == responseCode) { state.toRunSimplex(); } } Map<String, Object> responseData = createHandshakeResponse(responseCode, isFirst); sendHandshakeResponse0(requestId, responseData); logger.info("{} handleHandshake() completed(isFirst:{}). requestId:{}, responseCode:{}", objectUniqName, isFirst, requestId, responseCode); } else { logger.info("{} to execute handleHandshake() is not ready", objectUniqName); } } private ClusterOption getClusterOption(Map handshakeResponse) { if (handshakeResponse == Collections.EMPTY_MAP) { return ClusterOption.DISABLE_CLUSTER_OPTION; } Map cluster = (Map) handshakeResponse.get(ControlHandshakeResponsePacket.CLUSTER); if (cluster == null) { return ClusterOption.DISABLE_CLUSTER_OPTION; } String id = MapUtils.getString(cluster, "id", ""); List<Role> roles = getRoles((List) cluster.get("roles")); if (StringUtils.isEmpty(id)) { return ClusterOption.DISABLE_CLUSTER_OPTION; } else { return new ClusterOption(true, id, roles); } } private List<Role> getRoles(List roleNames) { List<Role> roles = new ArrayList<Role>(); for (Object roleName : roleNames) { if (roleName instanceof String && StringUtils.hasLength((String) roleName)) { roles.add(Role.getValue((String) roleName)); } } return roles; } private void handleClosePacket(Channel channel) { logger.info("{} handleClosePacket() started.", objectUniqName); SocketStateChangeResult stateChangeResult = state.toBeingCloseByPeer(); if (!stateChangeResult.isChange()) { logger.info("{} handleClosePacket() failed. Error: {}", objectUniqName, stateChangeResult); } else { logger.info("{} handleClosePacket() completed.", objectUniqName); } } private void handlePingPacket(Channel channel, PingPacket packet) { logger.debug("{} handleLegacyPingPacket() started. packet:{}", objectUniqName, packet); if (healthCheckStateContext.getState() == HealthCheckState.WAIT) { healthCheckStateContext.toReceivedLegacy(); } // packet without status value if (packet == PingPacket.PING_PACKET) { writePong(channel); return; } PingPayloadPacket pingPayloadPacket = new PingPayloadPacket(packet.getPingId(), packet.getStateVersion(), packet.getStateCode()); handlePingPacket0(channel, pingPayloadPacket); } private void handlePingPacket(Channel channel, PingPayloadPacket packet) { logger.debug("{} handlePingPacket() started. packet:{}", objectUniqName, packet); if (healthCheckStateContext.getState() == HealthCheckState.WAIT) { healthCheckStateContext.toReceived(); } handlePingPacket0(channel, packet); } private void handlePingPacket0(Channel channel, PingPayloadPacket packet) { SocketStateCode statusCode = state.getCurrentStateCode(); if (statusCode.getId() == packet.getStateCode()) { stateChecker.unmark(); messageListener.handlePing(packet, this); writePong(channel); } else { logger.warn("Session state sync failed. channel:{}, packet:{}, server-state:{}", channel, packet, statusCode); if (stateChecker.markAndCheckCondition()) { state.toErrorSyncStateSession(); stop(); } else { writePong(channel); } } } private void writePong(Channel channel) { write0(PongPacket.PONG_PACKET, pongWriteFutureListener); } private Map<String, Object> createHandshakeResponse(HandshakeResponseCode responseCode, boolean isFirst) { final HandshakeResponseCode createdCode = getHandshakeResponseCode(responseCode, isFirst); Map<String, Object> result = new HashMap<String, Object>(); result.put(ControlHandshakeResponsePacket.CODE, createdCode.getCode()); result.put(ControlHandshakeResponsePacket.SUB_CODE, createdCode.getSubCode()); if (localClusterOption.isEnable()) { Map<String, Object> clusterOption = localClusterOption.toMap(); result.put(ControlHandshakeResponsePacket.CLUSTER, clusterOption); } return result; } private HandshakeResponseCode getHandshakeResponseCode(HandshakeResponseCode responseCode, boolean isFirst) { if (isFirst) { return responseCode; } if (HandshakeResponseCode.DUPLEX_COMMUNICATION == responseCode) { return HandshakeResponseCode.ALREADY_DUPLEX_COMMUNICATION; } else if (HandshakeResponseCode.SIMPLEX_COMMUNICATION == responseCode) { return HandshakeResponseCode.ALREADY_SIMPLEX_COMMUNICATION; } return responseCode; } private void sendHandshakeResponse0(int requestId, Map<String, Object> data) { try { byte[] resultPayload = ControlMessageEncodingUtils.encode(data); ControlHandshakeResponsePacket packet = new ControlHandshakeResponsePacket(requestId, resultPayload); write0(packet); } catch (ProtocolException e) { logger.warn(e.getMessage(), e); } } private Map<Object, Object> decodeHandshakePacket(ControlHandshakePacket message) { try { byte[] payload = message.getPayload(); Map<Object, Object> properties = (Map) ControlMessageEncodingUtils.decode(payload); return properties; } catch (ProtocolException e) { logger.warn(e.getMessage(), e); } return Collections.EMPTY_MAP; } public boolean isEnableCommunication() { return state.isEnableCommunication(); } public boolean isEnableDuplexCommunication() { return state.isEnableDuplexCommunication(); } String getObjectUniqName() { return objectUniqName; } @Override public ClusterOption getLocalClusterOption() { return localClusterOption; } @Override public ClusterOption getRemoteClusterOption() { return remoteClusterOption; } @Override public long getStartTimestamp() { return startTimestamp; } @Override public HealthCheckState getHealthCheckState() { return healthCheckStateContext.getState(); } @Override public SocketStateCode getCurrentStateCode() { return state.getCurrentStateCode(); } @Override public void close() { stop(); } @Override public String toString() { StringBuilder log = new StringBuilder(32); log.append(objectUniqName); log.append("("); log.append("remote:"); log.append(getRemoteAddress()); log.append(", state:"); log.append(getCurrentStateCode()); log.append(", healthCheckState:"); log.append(getHealthCheckState()); log.append(")"); return log.toString(); } }
package persistency; import com.fasterxml.jackson.databind.ObjectMapper; import de.tudarmstadt.informatik.tk.assistanceplatform.modules.Capability; import models.ActiveAssistanceModule; import org.apache.commons.dbutils.QueryRunner; import org.apache.commons.dbutils.handlers.ArrayListHandler; import play.db.DB; import play.libs.Json; import java.sql.PreparedStatement; import java.sql.ResultSet; public class ActiveAssistanceModulePersistency { public static String TABLE_NAME = "active_modules"; private static String LOCALIZATION_TABLE_NAME = "active_module_localization"; private static String allFields = "id, name, logo_url, description_short, description_long, required_capabilities, optional_capabilities, copyright, administrator_email, support_email, rest_contact_address"; private ActiveAssistanceModulePersistency() { } public static boolean create(ActiveAssistanceModule module) { if (doesModuleWithIdExist(module.id)) { return false; } return DB.withConnection(conn -> { PreparedStatement s = conn.prepareStatement( "INSERT INTO " + TABLE_NAME + " (" + allFields + ") VALUES " + "(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"); ObjectMapper mapper = new ObjectMapper(); s.setString(1, module.id); s.setString(2, module.name); s.setString(3, module.logoUrl); s.setString(4, module.descriptionShort); s.setString(5, module.descriptionLong); String requiredCapString = module.requiredCapabilities == null ? "null" : mapper.valueToTree(module.requiredCapabilities).toString(); s.setString(6, requiredCapString); String optCapString = module.optionalCapabilites == null ? "null" : mapper.valueToTree(module.optionalCapabilites).toString(); s.setString(7, optCapString); s.setString(8, module.copyright); s.setString(9, module.administratorEmail); s.setString(10, module.supportEmail); s.setString(11, module.restContactAddress); int affectedRows = s.executeUpdate(); s.close(); return affectedRows != 0; }); } public static boolean update(ActiveAssistanceModule module) { if (!doesModuleWithIdExist(module.id)) { return false; } return DB.withConnection(conn -> { String allFieldsForUpdate = "SET " + allFields.replace("id,", "").replaceAll(",", " = ?, ") + " = ?"; PreparedStatement s = conn.prepareStatement( "UPDATE " + TABLE_NAME + " " + allFieldsForUpdate + " WHERE id = ?"); ObjectMapper mapper = new ObjectMapper(); s.setString(1, module.name); s.setString(2, module.logoUrl); s.setString(3, module.descriptionShort); s.setString(4, module.descriptionLong); s.setString(5, mapper.valueToTree(module.requiredCapabilities).toString()); s.setString(6, mapper.valueToTree(module.optionalCapabilites).toString()); s.setString(7, module.copyright); s.setString(8, module.administratorEmail); s.setString(9, module.supportEmail); s.setString(10, module.restContactAddress); s.setString(11, module.id); int affectedRows = s.executeUpdate(); s.close(); return affectedRows != 0; }); } public static boolean setIsAlive(String moduleId) { if (!doesModuleWithIdExist(moduleId)) { return false; } return DB.withConnection(conn -> { PreparedStatement s = conn.prepareStatement( "UPDATE " + TABLE_NAME + " SET is_alive = TRUE, last_alive_message = CURRENT_TIMESTAMP WHERE id = ?"); s.setString(1, moduleId); int affectedRows = s.executeUpdate(); s.close(); return affectedRows != 0; }); } public static boolean localize(String languageCode, ActiveAssistanceModule module) { if (!doesModuleWithIdExist(module.id)) { return false; } return DB.withConnection(conn -> { PreparedStatement tmpDelete = conn.prepareStatement("DELETE FROM " + LOCALIZATION_TABLE_NAME + " WHERE module_id = ? AND language_code = ?"); tmpDelete.setString(1, module.id); tmpDelete.setString(2, languageCode); tmpDelete.executeUpdate(); tmpDelete.close(); PreparedStatement s = conn.prepareStatement( "INSERT INTO " + LOCALIZATION_TABLE_NAME + " (module_id, language_code, name, logo_url, description_short, description_long) VALUES " + "(?, ?, ?, ?, ?, ?)"); s.setString(1, module.id); s.setString(2, languageCode); s.setString(3, module.name); s.setString(4, module.logoUrl); s.setString(5, module.descriptionShort); s.setString(6, module.descriptionLong); int affectedRows = s.executeUpdate(); s.close(); return affectedRows != 0; }); } public static boolean doesModuleWithIdExist(String id) { return DB.withConnection(conn -> { PreparedStatement s = conn .prepareStatement("SELECT id FROM " + TABLE_NAME + " WHERE id = ?"); s.setString(1, id); ResultSet result = s.executeQuery(); boolean returnResult = result != null && result.next(); s.close(); result.close(); return returnResult; }); } public static ActiveAssistanceModule[] list() { return list("en"); } public static ActiveAssistanceModule[] list(String language) { return DB.withConnection(conn -> { String fields = ActiveAssistanceModulePersistency.allFields .replace("name", "COALESCE(l.name, m.name)") .replace("logo_url", "COALESCE(l.logo_url, m.logo_url)") .replace("description_short", "COALESCE(l.description_short, m.description_short)") .replace("description_long", "COALESCE(l.description_long, m.description_long)"); ActiveAssistanceModule[] modules = new QueryRunner() .query(conn, "SELECT " + fields + " FROM " + TABLE_NAME + " m LEFT JOIN " + LOCALIZATION_TABLE_NAME + " AS l ON l.module_id = m.id AND l.language_code = ?", new ArrayListHandler(), language) .stream() .map(array -> { String id = (String) array[0]; String name = (String) array[1]; String logoUrl = (String) array[2]; String description_short = (String) array[3]; String description_long = (String) array[4]; String requiredCapsRaw = (String) array[5]; Capability[] requiredCapabilities = Json.fromJson(Json.parse(requiredCapsRaw), Capability[].class); String optionalCapsRaw = (String) array[6]; Capability[] optionalCapabilities = Json.fromJson(Json.parse(optionalCapsRaw), Capability[].class); String copyright = (String) array[7]; String administratorEmail = (String) array[8]; String supportEmail = (String) array[9]; String restAddress = (String) array[10]; return new ActiveAssistanceModule(name, id, logoUrl, description_short, description_long, requiredCapabilities, optionalCapabilities, copyright, administratorEmail, supportEmail, restAddress); }).toArray(ActiveAssistanceModule[]::new); return modules; }); } }
/* * Copyright (c) 2012-2015, Peter Abeles. All Rights Reserved. * * This file is part of DDogleg (http://ddogleg.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ddogleg.combinatorics; import java.util.ArrayList; import java.util.List; /** * <p> * Exhaustively computes all the permutations of a set, without recursion. Designed to be memory and speed efficient. * </p> * * <p> * Example for the set "0123". * </p> * <pre> * {@code * 0123 * 0132 * 0213 * 0231 * 0321 * 0312 * 1023 * 1032 * 1203 * 1230 * 1320 * 1302 * 2103 * 2130 * 2013 * 2031 * 2301 * 2310 * 3120 * 3102 * 3210 * 3201 * 3021 * 3012 * } * </pre> * * @param <T> */ public class Permute< T > { protected List<T> list; private int indexes[]; private int counters[]; private int total; private int permutation; /** Permute the elements in the list provided */ public Permute(List<T> list) { init( list ); } public Permute() { } /** * Initializes the permutation for a new list * * @param list List which is to be permuted. */ private void init( List<T> list ) { this.list = list; indexes = new int[ list.size() ]; counters = new int[ list.size() ]; for( int i = 0; i < indexes.length ; i++ ) { counters[i] = indexes[i] = i; } total = 1; for( int i = 2; i <= indexes.length ; i++ ) { total *= i; } permutation = 0; } /** * Returns the total number of permutations */ public int getTotalPermutations() { return total; } /** * This will permute the list once */ public boolean next() { if( indexes.length <= 1 || permutation >= total-1 ) return false; int N = indexes.length-2; int k = N; swap(k, counters[k]++); while( counters[k] == indexes.length ) { k -= 1; swap(k, counters[k]++); } swap(counters[k], k); //before while (k < indexes.length - 1) { k++; counters[k] = k; } permutation++; return true; } /** * This will undo a permutation. */ public boolean previous() { if( indexes.length <= 1 || permutation <= 0 ) return false; int N = indexes.length-2; int k = N; while( counters[k] <= k ) { k--; } swap(counters[k], k); counters[k]--; swap(k, counters[k]); int foo = k+1; while( counters[k+1] == k+1 && k < indexes.length-2) { k++; swap(k, indexes.length-1); } for (int i = foo; i < indexes.length - 1; i++) { counters[i] = indexes.length - 1; } permutation--; return true; } private void swap( int i , int j ) { int val = indexes[i]; indexes[i] = indexes[j]; indexes[j] = val; } /** * Returns the size of the list being premuted * * @return list size */ public int size() { return list.size(); } /** * Returns element 'i' in the current permutation * * @param i index * @return element in permuted list */ public T get( int i ) { return list.get(indexes[i]); } /** * Returns a list containing the current permutation. * * @param storage Optional storage. If null a new list will be declared. * @return Current permutation */ public List<T> getPermutation(List<T> storage) { if( storage == null ) storage = new ArrayList<T>(); else storage.clear(); for( int i = 0; i < list.size(); i++ ) { storage.add(get(i)); } return storage; } public static void main(String[] args) { List<Integer> list = new ArrayList<Integer>(); for (int i = 0; i < 4; i++) { list.add(i); } Permute permute = new Permute(list); print(permute); while( permute.next() ) { print(permute); } System.out.println(); System.out.println("Reverse"); print(permute); while( permute.previous() ) { print(permute); } } private static void print( Permute permute ) { System.out.print(" * "); for (int i = 0; i < permute.size(); i++) { System.out.print(permute.get(i)); } System.out.println(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.serde2.avro; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.nio.ByteBuffer; import java.rmi.server.UID; import java.time.ZoneId; import java.util.ArrayList; import java.util.HashMap; import java.util.Hashtable; import java.util.List; import java.util.Map; import org.apache.avro.Schema; import org.apache.avro.generic.GenericData; import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StandardListObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaStringObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.junit.Assert; import org.junit.Test; public class TestAvroDeserializer { private final GenericData GENERIC_DATA = GenericData.get(); @Test public void canDeserializeVoidType() throws IOException, SerDeException { String schemaString = "{\n" + " \"type\": \"record\", \n" + " \"name\": \"nullTest\",\n" + " \"fields\" : [\n" + " {\"name\": \"isANull\", \"type\": \"null\"}\n" + " ]\n" + "}"; Schema s = AvroSerdeUtils.getSchemaFor(schemaString); GenericData.Record record = new GenericData.Record(s); record.put("isANull", null); assertTrue(GENERIC_DATA.validate(s, record)); AvroGenericRecordWritable garw = Utils.serializeAndDeserializeRecord(record); AvroObjectInspectorGenerator aoig = new AvroObjectInspectorGenerator(s); AvroDeserializer de = new AvroDeserializer(); ArrayList<Object> row = (ArrayList<Object>)de.deserialize(aoig.getColumnNames(), aoig.getColumnTypes(), garw, s); assertEquals(1, row.size()); Object theVoidObject = row.get(0); assertNull(theVoidObject); StandardStructObjectInspector oi = (StandardStructObjectInspector)aoig.getObjectInspector(); StructField fieldRef = oi.getStructFieldRef("isANull"); Object shouldBeNull = oi.getStructFieldData(row, fieldRef); assertNull(shouldBeNull); assertTrue(fieldRef.getFieldObjectInspector() instanceof VoidObjectInspector); } @Test public void canDeserializeMapsWithPrimitiveKeys() throws SerDeException, IOException { Schema s = AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.MAP_WITH_PRIMITIVE_VALUE_TYPE); GenericData.Record record = new GenericData.Record(s); Map<String, Long> m = new Hashtable<String, Long>(); m.put("one", 1l); m.put("two", 2l); m.put("three", 3l); record.put("aMap", m); assertTrue(GENERIC_DATA.validate(s, record)); System.out.println("record = " + record); AvroGenericRecordWritable garw = Utils.serializeAndDeserializeRecord(record); AvroObjectInspectorGenerator aoig = new AvroObjectInspectorGenerator(s); AvroDeserializer de = new AvroDeserializer(); ArrayList<Object> row = (ArrayList<Object>)de.deserialize(aoig.getColumnNames(), aoig.getColumnTypes(), garw, s); assertEquals(1, row.size()); Object theMapObject = row.get(0); assertTrue(theMapObject instanceof Map); Map theMap = (Map)theMapObject; // Verify the raw object that's been created assertEquals(1l, theMap.get("one")); assertEquals(2l, theMap.get("two")); assertEquals(3l, theMap.get("three")); // Verify that the provided object inspector can pull out these same values StandardStructObjectInspector oi = (StandardStructObjectInspector)aoig.getObjectInspector(); List<Object> z = oi.getStructFieldsDataAsList(row); assertEquals(1, z.size()); StructField fieldRef = oi.getStructFieldRef("amap"); Map theMap2 = (Map)oi.getStructFieldData(row, fieldRef); assertEquals(1l, theMap2.get("one")); assertEquals(2l, theMap2.get("two")); assertEquals(3l, theMap2.get("three")); } @Test public void canDeserializeArrays() throws SerDeException, IOException { Schema s = AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.ARRAY_WITH_PRIMITIVE_ELEMENT_TYPE); GenericData.Record record = new GenericData.Record(s); List<String> list = new ArrayList<String>(); list.add("Eccleston"); list.add("Tennant"); list.add("Smith"); record.put("anArray", list); assertTrue(GENERIC_DATA.validate(s, record)); System.out.println("Array-backed record = " + record); AvroGenericRecordWritable garw = Utils.serializeAndDeserializeRecord(record); AvroObjectInspectorGenerator aoig = new AvroObjectInspectorGenerator(s); AvroDeserializer de = new AvroDeserializer(); ArrayList<Object> row = (ArrayList<Object>)de.deserialize(aoig.getColumnNames(), aoig.getColumnTypes(), garw, s); assertEquals(1, row.size()); Object theArrayObject = row.get(0); assertTrue(theArrayObject instanceof List); List theList = (List)theArrayObject; // Verify the raw object that's been created assertEquals("Eccleston", theList.get(0)); assertEquals("Tennant", theList.get(1)); assertEquals("Smith", theList.get(2)); // Now go the correct way, through objectinspectors StandardStructObjectInspector oi = (StandardStructObjectInspector)aoig.getObjectInspector(); StructField fieldRefToArray = oi.getStructFieldRef("anArray"); Object anArrayData = oi.getStructFieldData(row, fieldRefToArray); StandardListObjectInspector anArrayOI = (StandardListObjectInspector)fieldRefToArray.getFieldObjectInspector(); assertEquals(3, anArrayOI.getListLength(anArrayData)); JavaStringObjectInspector elementOI = (JavaStringObjectInspector)anArrayOI.getListElementObjectInspector(); Object firstElement = anArrayOI.getListElement(anArrayData, 0); assertEquals("Eccleston", elementOI.getPrimitiveJavaObject(firstElement)); assertTrue(firstElement instanceof String); Object secondElement = anArrayOI.getListElement(anArrayData, 1); assertEquals("Tennant", elementOI.getPrimitiveJavaObject(secondElement)); assertTrue(secondElement instanceof String); Object thirdElement = anArrayOI.getListElement(anArrayData, 2); assertEquals("Smith", elementOI.getPrimitiveJavaObject(thirdElement)); assertTrue(thirdElement instanceof String); } public void canDeserializeRecordsInternal(Schema s, Schema fileSchema) throws SerDeException, IOException { GenericData.Record record = new GenericData.Record(s); GenericData.Record innerRecord = new GenericData.Record(s.getField("aRecord").schema()); innerRecord.put("int1", 42); innerRecord.put("boolean1", true); innerRecord.put("long1", 42432234234l); record.put("aRecord", innerRecord); assertTrue(GENERIC_DATA.validate(s, record)); AvroGenericRecordWritable garw = Utils.serializeAndDeserializeRecord(record, fileSchema); AvroObjectInspectorGenerator aoig = new AvroObjectInspectorGenerator(s); AvroDeserializer de = new AvroDeserializer(); ArrayList<Object> row = (ArrayList<Object>)de.deserialize(aoig.getColumnNames(), aoig.getColumnTypes(), garw, s); assertEquals(1, row.size()); Object theRecordObject = row.get(0); System.out.println("theRecordObject = " + theRecordObject.getClass().getCanonicalName()); // The original record was lost in the deserialization, so just go the // correct way, through objectinspectors StandardStructObjectInspector oi = (StandardStructObjectInspector)aoig.getObjectInspector(); List<? extends StructField> allStructFieldRefs = oi.getAllStructFieldRefs(); assertEquals(1, allStructFieldRefs.size()); StructField fieldRefForaRecord = allStructFieldRefs.get(0); assertEquals("arecord", fieldRefForaRecord.getFieldName()); Object innerRecord2 = oi.getStructFieldData(row, fieldRefForaRecord); // Extract innerRecord field refs StandardStructObjectInspector innerRecord2OI = (StandardStructObjectInspector) fieldRefForaRecord.getFieldObjectInspector(); List<? extends StructField> allStructFieldRefs1 = innerRecord2OI.getAllStructFieldRefs(); assertEquals(3, allStructFieldRefs1.size()); assertEquals("int1", allStructFieldRefs1.get(0).getFieldName()); assertEquals("boolean1", allStructFieldRefs1.get(1).getFieldName()); assertEquals("long1", allStructFieldRefs1.get(2).getFieldName()); innerRecord2OI.getStructFieldsDataAsList(innerRecord2); assertEquals(42, innerRecord2OI.getStructFieldData(innerRecord2, allStructFieldRefs1.get(0))); assertEquals(true, innerRecord2OI.getStructFieldData(innerRecord2, allStructFieldRefs1.get(1))); assertEquals(42432234234l, innerRecord2OI.getStructFieldData(innerRecord2, allStructFieldRefs1.get(2))); } @Test public void canDeserializeRecords() throws SerDeException, IOException { Schema s = AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.RECORD_SCHEMA); canDeserializeRecordsInternal(s, s); } @Test public void canDeserializeNullableRecords() throws SerDeException, IOException { Schema s = AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.RECORD_SCHEMA); Schema fileSchema = AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.NULLABLE_RECORD_SCHEMA); canDeserializeRecordsInternal(s, fileSchema); } private class ResultPair { // Because Pairs give Java the vapors. public final ObjectInspector oi; public final Object value; public final Object unionObject; private ResultPair(ObjectInspector oi, Object value, Object unionObject) { this.oi = oi; this.value = value; this.unionObject = unionObject; } } @Test public void canDeserializeSingleItemUnions() throws SerDeException, IOException { Schema s = AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.SINGLE_ITEM_UNION_SCHEMA); GenericData.Record record = new GenericData.Record(s); record.put("aUnion", "this is a string"); ResultPair result = unionTester(s, record); assertTrue(result.value instanceof String); assertEquals("this is a string", result.value); UnionObjectInspector uoi = (UnionObjectInspector)result.oi; assertEquals(0, uoi.getTag(result.unionObject)); } /** * Test whether Avro timestamps can be deserialized according to new behavior (storage in UTC but * LocalDateTime semantics as timestamps are converted back to the writer time zone) as well as * old behavior (Instant semantics). */ @Test public void canDeserializeTimestamps() throws SerDeException, IOException { List<String> columnNames = new ArrayList<>(); columnNames.add("timestampField"); List<TypeInfo> columnTypes = new ArrayList<>(); columnTypes.add(TypeInfoFactory.getPrimitiveTypeInfo("timestamp")); Schema readerSchema = AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.TIMESTAMP_SCHEMA); // 2019-01-02 00:00:00 GMT is 1546387200000 milliseconds after epoch GenericData.Record record = new GenericData.Record(readerSchema); record.put("timestampField", 1546387200999L); assertTrue(GENERIC_DATA.validate(readerSchema, record)); AvroGenericRecordWritable agrw = new AvroGenericRecordWritable(ZoneId.of("America/New_York"), false, false); agrw.setRecord(record); agrw.setFileSchema(readerSchema); agrw.setRecordReaderID(new UID()); AvroDeserializer deserializer = new AvroDeserializer(); ArrayList<Object> row = (ArrayList<Object>) deserializer.deserialize(columnNames, columnTypes, agrw, readerSchema); Timestamp resultTimestamp = (Timestamp) row.get(0); // 2019-01-02 00:00:00 GMT is 2019-01-01 19:00:00 GMT-0500 (America/New_York / EST) assertEquals(Timestamp.valueOf("2019-01-01 19:00:00.999"), resultTimestamp); // Do the same without specifying writer time zone. This tests deserialization of older records // which should be interpreted in Instant semantics AvroGenericRecordWritable agrw2 = new AvroGenericRecordWritable(); agrw2.setRecord(record); agrw2.setFileSchema(readerSchema); agrw2.setRecordReaderID(new UID()); row = (ArrayList<Object>) deserializer.deserialize(columnNames, columnTypes, agrw2, readerSchema); resultTimestamp = (Timestamp) row.get(0); // 2019-01-02 00:00:00 GMT is 2019-01-01 16:00:00 in zone GMT-0800 (PST) // This is the time zone for VM in test. assertEquals(Timestamp.valueOf("2019-01-01 16:00:00.999"), resultTimestamp); } @Test public void canDeserializeUnions() throws SerDeException, IOException { Schema s = AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.UNION_SCHEMA); GenericData.Record record = new GenericData.Record(s); record.put("aUnion", "this is a string"); ResultPair result = unionTester(s, record); assertTrue(result.value instanceof String); assertEquals("this is a string", result.value); UnionObjectInspector uoi = (UnionObjectInspector)result.oi; assertEquals(1, uoi.getTag(result.unionObject)); // Now the other enum possibility record = new GenericData.Record(s); record.put("aUnion", 99); result = unionTester(s, record); assertTrue(result.value instanceof Integer); assertEquals(99, result.value); uoi = (UnionObjectInspector)result.oi; assertEquals(0, uoi.getTag(result.unionObject)); } @Test public void canDeserializeEvolvedUnions1() throws SerDeException, IOException { Schema ws = AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.UNION_SCHEMA); Schema rs = AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.UNION_SCHEMA_2); GenericData.Record record = new GenericData.Record(ws); record.put("aUnion", "this is a string"); ResultPair result = unionTester(ws, rs, record); assertTrue(result.value instanceof String); assertEquals("this is a string", result.value); UnionObjectInspector uoi = (UnionObjectInspector)result.oi; // The null in union type should be removed assertEquals(1, uoi.getTag(result.unionObject)); // Now the other enum possibility record = new GenericData.Record(ws); record.put("aUnion", 99); result = unionTester(ws, rs, record); assertTrue(result.value instanceof Integer); assertEquals(99, result.value); uoi = (UnionObjectInspector)result.oi; // The null in union type should be removed assertEquals(0, uoi.getTag(result.unionObject)); } @Test public void canDeserializeEvolvedUnions2() throws SerDeException, IOException { Schema ws = AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.UNION_SCHEMA_3); Schema rs = AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.UNION_SCHEMA_4); GenericData.Record record = new GenericData.Record(ws); record.put("aUnion", 90); ResultPair result = unionTester(ws, rs, record); assertTrue(result.value instanceof Integer); assertEquals(90, result.value); UnionObjectInspector uoi = (UnionObjectInspector)result.oi; assertEquals(0, uoi.getTag(result.unionObject)); // Now the other enum possibility record = new GenericData.Record(ws); record.put("aUnion", 99.9f); result = unionTester(ws, rs, record); assertTrue(result.value instanceof Float); assertEquals(99.9f, result.value); uoi = (UnionObjectInspector)result.oi; assertEquals(1, uoi.getTag(result.unionObject)); } private ResultPair unionTester(Schema ws, GenericData.Record record) throws SerDeException, IOException { return unionTester(ws, ws, record); } private ResultPair unionTester(Schema ws, Schema rs, GenericData.Record record) throws SerDeException, IOException { assertTrue(GENERIC_DATA.validate(ws, record)); AvroGenericRecordWritable garw = Utils.serializeAndDeserializeRecord(record); AvroObjectInspectorGenerator aoig = new AvroObjectInspectorGenerator(rs); AvroDeserializer de = new AvroDeserializer(); ArrayList<Object> row = (ArrayList<Object>)de.deserialize(aoig.getColumnNames(), aoig.getColumnTypes(), garw, rs); assertEquals(1, row.size()); StandardStructObjectInspector oi = (StandardStructObjectInspector)aoig.getObjectInspector(); List<? extends StructField> fieldRefs = oi.getAllStructFieldRefs(); assertEquals(1, fieldRefs.size()); StructField fieldRef = fieldRefs.get(0); assertEquals("aunion", fieldRef.getFieldName()); Object theUnion = oi.getStructFieldData(row, fieldRef); assertTrue(fieldRef.getFieldObjectInspector() instanceof UnionObjectInspector); UnionObjectInspector fieldObjectInspector = (UnionObjectInspector)fieldRef.getFieldObjectInspector(); Object value = fieldObjectInspector.getField(theUnion); return new ResultPair(fieldObjectInspector, value, theUnion); } @Test public void primitiveSchemaEvolution() throws Exception { Schema fileSchema = AvroSerdeUtils.getSchemaFor( "{\n" + " \"type\": \"record\",\n" + " \"name\": \"r1\",\n" + " \"fields\": [\n" + " {\n" + " \"name\": \"int_field\",\n" + " \"type\": \"int\"\n" + " }\n" + " ]\n" + "}" ); Schema readerSchema = AvroSerdeUtils.getSchemaFor( "{\n" + " \"type\": \"record\",\n" + " \"name\": \"r1\",\n" + " \"fields\": [\n" + " {\n" + " \"name\": \"int_field\",\n" + " \"type\": \"int\"\n" + " },\n" + " {\n" + " \"name\": \"dec_field\",\n" + " \"type\": [\n" + " \"null\",\n" + " {\n" + " \"type\": \"bytes\",\n" + " \"logicalType\": \"decimal\",\n" + " \"precision\": 5,\n" + " \"scale\": 4\n" + " }\n" + " ],\n" + " \"default\": null\n" + " }\n" + " ]\n" + "}" ); GenericData.Record record = new GenericData.Record(fileSchema); record.put("int_field", 1); assertTrue(GENERIC_DATA.validate(fileSchema, record)); AvroGenericRecordWritable garw = Utils.serializeAndDeserializeRecord(record); AvroObjectInspectorGenerator aoig = new AvroObjectInspectorGenerator(readerSchema); AvroDeserializer de = new AvroDeserializer(); List<Object> row = (List<Object>) de.deserialize(aoig.getColumnNames(), aoig.getColumnTypes(), garw, readerSchema); Assert.assertEquals(1, row.get(0)); Assert.assertNull(row.get(1)); } @Test // Enums are one of two types we fudge for Hive. Enums go in, Strings come out. public void canDeserializeEnums() throws SerDeException, IOException { Schema s = AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.ENUM_SCHEMA); GenericData.Record record = new GenericData.Record(s); record.put("baddies", new GenericData.EnumSymbol(s.getField("baddies").schema(),"DALEKS")); assertTrue(GENERIC_DATA.validate(s, record)); AvroGenericRecordWritable garw = Utils.serializeAndDeserializeRecord(record); AvroObjectInspectorGenerator aoig = new AvroObjectInspectorGenerator(s); AvroDeserializer de = new AvroDeserializer(); ArrayList<Object> row = (ArrayList<Object>)de.deserialize(aoig.getColumnNames(), aoig.getColumnTypes(), garw, s); assertEquals(1, row.size()); StandardStructObjectInspector oi = (StandardStructObjectInspector)aoig.getObjectInspector(); List<? extends StructField> fieldRefs = oi.getAllStructFieldRefs(); assertEquals(1, fieldRefs.size()); StructField fieldRef = fieldRefs.get(0); assertEquals("baddies", fieldRef.getFieldName()); Object theStringObject = oi.getStructFieldData(row, fieldRef); assertTrue(fieldRef.getFieldObjectInspector() instanceof StringObjectInspector); StringObjectInspector soi = (StringObjectInspector)fieldRef.getFieldObjectInspector(); String finalValue = soi.getPrimitiveJavaObject(theStringObject); assertEquals("DALEKS", finalValue); } @Test // Fixed doesn't exist in Hive. Fixed go in, lists of bytes go out. public void canDeserializeFixed() throws SerDeException, IOException { Schema s = AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.FIXED_SCHEMA); GenericData.Record record = new GenericData.Record(s); byte [] bytes = "ANANCIENTBLUEBOX".getBytes(); record.put("hash", new GenericData.Fixed(s, bytes)); assertTrue(GENERIC_DATA.validate(s, record)); AvroGenericRecordWritable garw = Utils.serializeAndDeserializeRecord(record); AvroObjectInspectorGenerator aoig = new AvroObjectInspectorGenerator(s); AvroDeserializer de = new AvroDeserializer(); ArrayList<Object> row = (ArrayList<Object>)de.deserialize(aoig.getColumnNames(), aoig.getColumnTypes(), garw, s); assertEquals(1, row.size()); Object byteObject = row.get(0); assertTrue(byteObject instanceof byte[]); byte[] outBytes = (byte[]) byteObject; // Verify the raw object that's been created for(int i = 0; i < bytes.length; i++) { assertEquals(bytes[i], outBytes[i]); } // Now go the correct way, through objectinspectors StandardStructObjectInspector oi = (StandardStructObjectInspector)aoig.getObjectInspector(); List<Object> fieldsDataAsList = oi.getStructFieldsDataAsList(row); assertEquals(1, fieldsDataAsList.size()); StructField fieldRef = oi.getStructFieldRef("hash"); outBytes = (byte[]) oi.getStructFieldData(row, fieldRef); for(int i = 0; i < outBytes.length; i++) { assertEquals(bytes[i], outBytes[i]); } } @Test public void canDeserializeBytes() throws SerDeException, IOException { Schema s = AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.BYTES_SCHEMA); GenericData.Record record = new GenericData.Record(s); byte [] bytes = "ANANCIENTBLUEBOX".getBytes(); ByteBuffer bb = ByteBuffer.wrap(bytes); bb.rewind(); record.put("bytesField", bb); assertTrue(GENERIC_DATA.validate(s, record)); AvroGenericRecordWritable garw = Utils.serializeAndDeserializeRecord(record); AvroObjectInspectorGenerator aoig = new AvroObjectInspectorGenerator(s); AvroDeserializer de = new AvroDeserializer(); ArrayList<Object> row = (ArrayList<Object>)de.deserialize(aoig.getColumnNames(), aoig.getColumnTypes(), garw, s); assertEquals(1, row.size()); Object byteObject = row.get(0); assertTrue(byteObject instanceof byte[]); byte[] outBytes = (byte[]) byteObject; // Verify the raw object that's been created for(int i = 0; i < bytes.length; i++) { assertEquals(bytes[i], outBytes[i]); } // Now go the correct way, through objectinspectors StandardStructObjectInspector oi = (StandardStructObjectInspector)aoig.getObjectInspector(); List<Object> fieldsDataAsList = oi.getStructFieldsDataAsList(row); assertEquals(1, fieldsDataAsList.size()); StructField fieldRef = oi.getStructFieldRef("bytesField"); outBytes = (byte[]) oi.getStructFieldData(row, fieldRef); for(int i = 0; i < outBytes.length; i++) { assertEquals(bytes[i], outBytes[i]); } } @Test public void canDeserializeNullableTypes() throws IOException, SerDeException { Schema s = AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.NULLABLE_STRING_SCHEMA); GenericData.Record record = new GenericData.Record(s); record.put("nullableString", "this is a string"); verifyNullableType(record, s, "nullableString", "this is a string"); record = new GenericData.Record(s); record.put("nullableString", null); verifyNullableType(record, s, "nullableString", null); } @Test public void canDeserializeNullableEnums() throws IOException, SerDeException { Schema s = AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.NULLABLE_ENUM_SCHEMA); GenericData.Record record = new GenericData.Record(s); record.put("nullableEnum", new GenericData.EnumSymbol(AvroSerdeUtils.getOtherTypeFromNullableType(s.getField("nullableEnum").schema()), "CYBERMEN")); verifyNullableType(record, s, "nullableEnum", "CYBERMEN"); record = new GenericData.Record(s); record.put("nullableEnum", null); verifyNullableType(record, s, "nullableEnum", null); } @Test public void canDeserializeMapWithNullablePrimitiveValues() throws SerDeException, IOException { Schema s = AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator .MAP_WITH_NULLABLE_PRIMITIVE_VALUE_TYPE_SCHEMA); GenericData.Record record = new GenericData.Record(s); Map<String, Long> m = new HashMap<String, Long>(); m.put("one", 1l); m.put("two", 2l); m.put("three", 3l); m.put("mu", null); record.put("aMap", m); assertTrue(GENERIC_DATA.validate(s, record)); System.out.println("record = " + record); AvroGenericRecordWritable garw = Utils.serializeAndDeserializeRecord(record); AvroObjectInspectorGenerator aoig = new AvroObjectInspectorGenerator(s); AvroDeserializer de = new AvroDeserializer(); ArrayList<Object> row = (ArrayList<Object>)de.deserialize(aoig.getColumnNames(), aoig.getColumnTypes(), garw, s); assertEquals(1, row.size()); Object theMapObject = row.get(0); assertTrue(theMapObject instanceof Map); Map theMap = (Map)theMapObject; // Verify the raw object that's been created assertEquals(1l, theMap.get("one")); assertEquals(2l, theMap.get("two")); assertEquals(3l, theMap.get("three")); assertTrue(theMap.containsKey("mu")); assertEquals(null, theMap.get("mu")); // Verify that the provided object inspector can pull out these same values StandardStructObjectInspector oi = (StandardStructObjectInspector)aoig.getObjectInspector(); List<Object> z = oi.getStructFieldsDataAsList(row); assertEquals(1, z.size()); StructField fieldRef = oi.getStructFieldRef("amap"); Map theMap2 = (Map)oi.getStructFieldData(row, fieldRef); assertEquals(1l, theMap2.get("one")); assertEquals(2l, theMap2.get("two")); assertEquals(3l, theMap2.get("three")); assertTrue(theMap2.containsKey("mu")); assertEquals(null, theMap2.get("mu")); } @Test public void canDeserializeMapsWithJavaLangStringKeys() throws IOException, SerDeException { // Ensures maps can be deserialized when avro.java.string=String. // See http://stackoverflow.com/a/19868919/312944 for why that might be used. String schemaString = "{\n" + " \"namespace\": \"testing\",\n" + " \"name\": \"oneMap\",\n" + " \"type\": \"record\",\n" + " \"fields\": [\n" + " {\n" + " \"name\":\"aMap\",\n" + " \"type\":{\"type\":\"map\",\n" + " \"avro.java.string\":\"String\",\n" + " \"values\":\"long\"}\n" + "\t}\n" + " ]\n" + "}"; Schema s = AvroSerdeUtils.getSchemaFor(schemaString); GenericData.Record record = new GenericData.Record(s); Map<String, Long> m = new Hashtable<String, Long>(); m.put("one", 1l); m.put("two", 2l); m.put("three", 3l); record.put("aMap", m); assertTrue(GENERIC_DATA.validate(s, record)); System.out.println("record = " + record); AvroGenericRecordWritable garw = Utils.serializeAndDeserializeRecord(record); AvroObjectInspectorGenerator aoig = new AvroObjectInspectorGenerator(s); AvroDeserializer de = new AvroDeserializer(); ArrayList<Object> row = (ArrayList<Object>)de.deserialize(aoig.getColumnNames(), aoig.getColumnTypes(), garw, s); assertEquals(1, row.size()); Object theMapObject = row.get(0); assertTrue(theMapObject instanceof Map); Map theMap = (Map)theMapObject; // Verify the raw object that's been created assertEquals(1l, theMap.get("one")); assertEquals(2l, theMap.get("two")); assertEquals(3l, theMap.get("three")); // Verify that the provided object inspector can pull out these same values StandardStructObjectInspector oi = (StandardStructObjectInspector)aoig.getObjectInspector(); List<Object> z = oi.getStructFieldsDataAsList(row); assertEquals(1, z.size()); StructField fieldRef = oi.getStructFieldRef("amap"); Map theMap2 = (Map)oi.getStructFieldData(row, fieldRef); assertEquals(1l, theMap2.get("one")); assertEquals(2l, theMap2.get("two")); assertEquals(3l, theMap2.get("three")); } private void verifyNullableType(GenericData.Record record, Schema s, String fieldName, String expected) throws SerDeException, IOException { assertTrue(GENERIC_DATA.validate(s, record)); AvroGenericRecordWritable garw = Utils.serializeAndDeserializeRecord(record); AvroObjectInspectorGenerator aoig = new AvroObjectInspectorGenerator(s); AvroDeserializer de = new AvroDeserializer(); ArrayList<Object> row = (ArrayList<Object>)de.deserialize(aoig.getColumnNames(), aoig.getColumnTypes(), garw, s); assertEquals(1, row.size()); Object rowElement = row.get(0); StandardStructObjectInspector oi = (StandardStructObjectInspector)aoig.getObjectInspector(); List<Object> fieldsDataAsList = oi.getStructFieldsDataAsList(row); assertEquals(1, fieldsDataAsList.size()); StructField fieldRef = oi.getStructFieldRef(fieldName); ObjectInspector fieldObjectInspector = fieldRef.getFieldObjectInspector(); StringObjectInspector soi = (StringObjectInspector)fieldObjectInspector; if(expected == null) { assertNull(soi.getPrimitiveJavaObject(rowElement)); } else { assertEquals(expected, soi.getPrimitiveJavaObject(rowElement)); } } @Test public void verifyCaching() throws SerDeException, IOException { Schema s = AvroSerdeUtils.getSchemaFor(TestAvroObjectInspectorGenerator.RECORD_SCHEMA); GenericData.Record record = new GenericData.Record(s); GenericData.Record innerRecord = new GenericData.Record(s.getField("aRecord").schema()); innerRecord.put("int1", 42); innerRecord.put("boolean1", true); innerRecord.put("long1", 42432234234l); record.put("aRecord", innerRecord); assertTrue(GENERIC_DATA.validate(s, record)); AvroGenericRecordWritable garw = Utils.serializeAndDeserializeRecord(record); UID recordReaderID = new UID(); garw.setRecordReaderID(recordReaderID); AvroObjectInspectorGenerator aoig = new AvroObjectInspectorGenerator(s); AvroDeserializer de = new AvroDeserializer(); ArrayList<Object> row = (ArrayList<Object>) de.deserialize(aoig.getColumnNames(), aoig.getColumnTypes(), garw, s); assertEquals(1, de.getNoEncodingNeeded().size()); assertEquals(0, de.getReEncoderCache().size()); // Read the record with the same record reader ID row = (ArrayList<Object>) de.deserialize(aoig.getColumnNames(), aoig.getColumnTypes(), garw, s); //Expecting not to change the size of internal structures assertEquals(1, de.getNoEncodingNeeded().size()); assertEquals(0, de.getReEncoderCache().size()); //Read the record with **different** record reader ID garw.setRecordReaderID(new UID()); //New record reader ID row = (ArrayList<Object>) de.deserialize(aoig.getColumnNames(), aoig.getColumnTypes(), garw, s); //Expecting to change the size of internal structures assertEquals(2, de.getNoEncodingNeeded().size()); assertEquals(0, de.getReEncoderCache().size()); //Read the record with **different** record reader ID and **evolved** schema Schema evolvedSchema = AvroSerdeUtils.getSchemaFor(s.toString()); evolvedSchema.getField("aRecord").schema().addProp("Testing", "meaningless"); garw.setRecordReaderID(recordReaderID = new UID()); //New record reader ID row = (ArrayList<Object>)de.deserialize(aoig.getColumnNames(), aoig.getColumnTypes(), garw, evolvedSchema); //Expecting to change the size of internal structures assertEquals(2, de.getNoEncodingNeeded().size()); assertEquals(1, de.getReEncoderCache().size()); //Read the record with existing record reader ID and same **evolved** schema garw.setRecordReaderID(recordReaderID); //Reuse record reader ID row = (ArrayList<Object>)de.deserialize(aoig.getColumnNames(), aoig.getColumnTypes(), garw, evolvedSchema); //Expecting NOT to change the size of internal structures assertEquals(2, de.getNoEncodingNeeded().size()); assertEquals(1, de.getReEncoderCache().size()); } }
package com.github.kostyasha.yad.steps; import com.github.kostyasha.yad.commons.cmds.DockerBuildImage; import com.github.kostyasha.yad.connector.YADockerConnector; import com.github.kostyasha.yad.utils.VariableUtils; import com.github.kostyasha.yad_docker_java.com.github.dockerjava.api.DockerClient; import com.github.kostyasha.yad_docker_java.com.github.dockerjava.api.command.BuildImageCmd; import com.github.kostyasha.yad_docker_java.com.github.dockerjava.api.command.PushImageCmd; import com.github.kostyasha.yad_docker_java.com.github.dockerjava.api.exception.ConflictException; import com.github.kostyasha.yad_docker_java.com.github.dockerjava.api.exception.NotFoundException; import com.github.kostyasha.yad_docker_java.com.github.dockerjava.api.model.AuthConfig; import com.github.kostyasha.yad_docker_java.com.github.dockerjava.api.model.AuthConfigurations; import com.github.kostyasha.yad_docker_java.com.github.dockerjava.api.model.BuildResponseItem; import com.github.kostyasha.yad_docker_java.com.github.dockerjava.api.model.PushResponseItem; import com.github.kostyasha.yad_docker_java.com.github.dockerjava.core.NameParser; import com.github.kostyasha.yad_docker_java.com.github.dockerjava.core.command.BuildImageResultCallback; import com.github.kostyasha.yad_docker_java.com.github.dockerjava.core.command.PushImageResultCallback; import com.google.common.base.Throwables; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import hudson.AbortException; import hudson.model.Run; import hudson.model.TaskListener; import hudson.remoting.VirtualChannel; import jenkins.MasterToSlaveFileCallable; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.exception.ExceptionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nonnull; import java.io.File; import java.io.IOException; import java.io.PrintStream; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import static com.github.kostyasha.yad.utils.DockerJavaUtils.getAuthConfig; import static com.github.kostyasha.yad.utils.LogUtils.printResponseItemToListener; import static java.util.Objects.isNull; import static java.util.Objects.nonNull; import static org.apache.commons.lang.StringUtils.isEmpty; import static org.apache.commons.lang.StringUtils.isNotEmpty; /** * All actions happening on remote. * Variables should be resolved during execution on remote. * Client should be instantiated in this remoting exection. * All configuration objects should be serializable. * * @author Kanstantsin Shautsou * @see DockerImageComboStep */ public class DockerImageComboStepFileCallable extends MasterToSlaveFileCallable<DockerImageComboStepResponse> { private static final Logger LOG = LoggerFactory.getLogger(DockerImageComboStepFileCallable.class); private static final long serialVersionUID = 1L; private final YADockerConnector connector; private DockerBuildImage buildImage; private final boolean cleanup; private final boolean cleanupDangling; private final boolean push; private final TaskListener taskListener; public DockerImageComboStepFileCallable(final YADockerConnector connector, final DockerBuildImage buildImage, final boolean cleanup, final boolean cleanupDangling, final boolean push, final TaskListener taskListener) { this.connector = connector; this.buildImage = buildImage; this.cleanup = cleanup; this.cleanupDangling = cleanupDangling; this.push = push; this.taskListener = taskListener; } @SuppressFBWarnings(value = "UWF_FIELD_NOT_INITIALIZED_IN_CONSTRUCTOR") public static class Builder { private YADockerConnector connector; private DockerBuildImage buildImage; private boolean cleanup; private boolean cleanupDangling; private boolean push; private TaskListener taskListener; private Run run; public Builder() { } public Builder withConnector(@Nonnull YADockerConnector connector) { this.connector = connector; return this; } public Builder withBuildImage(@Nonnull DockerBuildImage that) { this.buildImage = new DockerBuildImage(that); return this; } public Builder withTaskListener(@Nonnull TaskListener taskListener) { this.taskListener = taskListener; return this; } public Builder withRun(@Nonnull Run run) { this.run = run; return this; } public Builder withCleanAll(boolean cleanup) { this.cleanup = cleanup; return this; } public Builder withCleanupDangling(boolean cleanupDangling) { this.cleanupDangling = cleanupDangling; return this; } public Builder withPushAll(boolean push) { this.push = push; return this; } public DockerImageComboStepFileCallable build() throws IOException, InterruptedException { if (isNull(run) || isNull(taskListener) || isNull(connector) || isNull(buildImage)) { throw new IllegalStateException("Specify vars!"); } // if something should be resolved on master side do it here final List<String> tags = buildImage.getTags(); final ArrayList<String> expandedTags = new ArrayList<>(tags.size()); for (String tag : tags) { expandedTags.add(VariableUtils.resolveVar(tag, run, taskListener)); } buildImage.setTags(expandedTags); if (isNull(buildImage.getAuthConfigurations())) { buildImage.resolveCreds(); } return new DockerImageComboStepFileCallable( connector, buildImage, cleanup, cleanupDangling, push, taskListener ); } } public static Builder newDockerImageComboStepFileCallableBuilder() { return new Builder(); } public DockerImageComboStepResponse invoke(File f, VirtualChannel channel) throws IOException { PrintStream llog = taskListener.getLogger(); llog.println("Creating connection to docker daemon..."); try (DockerClient client = connector.getClient()) { return invoke(client); } catch (Exception ex) { Throwables.propagate(ex); } return null; } /** * less indents */ private DockerImageComboStepResponse invoke(DockerClient client) throws AbortException, InterruptedException { PrintStream llog = taskListener.getLogger(); DockerImageComboStepResponse response = new DockerImageComboStepResponse(); String imageId = null; MyBuildImageResultCallback imageResultCallback = new MyBuildImageResultCallback(llog); List<String> builtImages = new ArrayList<>(); try { // build image BuildImageCmd buildImageCmd = client.buildImageCmd(); buildImage.fillSettings(buildImageCmd); llog.println("Building image... "); imageId = buildImageCmd.exec(imageResultCallback).awaitImageId(); llog.println("Build done."); if (isEmpty(imageId)) { throw new AbortException("Built image is empty or null!"); } // re-tag according to buildImage config for (String tag : buildImage.getTagsNormalised()) { NameParser.ReposTag reposTag = NameParser.parseRepositoryTag(tag); llog.printf("Adding additional tag '%s:%s'...%n", reposTag.repos, reposTag.tag); // no need to remove before client.tagImageCmd(imageId, reposTag.repos, reposTag.tag) .exec(); llog.printf("Added additional tag '%s:%s'.%n", reposTag.repos, reposTag.tag); builtImages.add(String.format("%s:%s", reposTag.repos, reposTag.tag)); } // push if (push) { llog.println("Pushing all tagged images..."); for (String tag : buildImage.getTagsNormalised()) { try { llog.println("Pushing '" + tag + "'..."); PushImageCmd pushImageCmd = client.pushImageCmd(tag); final AuthConfigurations autConfigs = buildImage.getAuthConfigurations(); if (nonNull(autConfigs)) { AuthConfig authConfig = getAuthConfig(tag, autConfigs); if (nonNull(authConfig)) { pushImageCmd.withAuthConfig(authConfig); } } pushImageCmd.exec(new MyPushImageResultCallback()) .awaitCompletion(); llog.println("Pushed '" + tag + "'."); } catch (Exception ex) { taskListener.error("Can't push " + tag + " " + ex.getMessage()); throw ex; } } } response.setSuccess(true); } catch (Throwable t) { response.setSuccess(false); llog.println(t.getMessage()); response.setErrorMessage(t.getMessage()); response.setErrorTrace(ExceptionUtils.getFullStackTrace(t)); } finally { builtImages.add(imageId); response.setImages(builtImages); response.setContainers(imageResultCallback.getContainers()); invokeCleanup(client, builtImages, imageResultCallback.getContainers()); } return response; } /** * Try to clean as much as we can without throwing errors. */ private void invokeCleanup(DockerClient client, List<String> builtImages, @Nonnull Set<String> containers) { PrintStream llog = taskListener.getLogger(); if (cleanupDangling) { for (String containerId : containers) { try { client.removeImageCmd(containerId) .exec(); llog.printf("Removed dangling layer image %s.%n", containerId); LOG.debug("Removed dangling layer image '{}'", containerId); } catch (NotFoundException | ConflictException ex) { // ignore } catch (Throwable ex) { //taskListener.error("Can't remove dangling layer image " + containerId + "."); LOG.error("Can't remove dangling layer image " + containerId, ex); } } } if (!cleanup) { llog.println("Skipping cleanup."); return; } else { llog.println("Running cleanup..."); } for (String image : builtImages) { if (isNotEmpty(image)) { llog.println("Removing built image " + image); try { client.removeImageCmd(image) .exec(); } catch (NotFoundException ex) { LOG.trace("Image '{}' already doesn't exist.", image); } catch (Throwable ex) { taskListener.error("Can't remove image" + ex.getMessage()); //ignore as it cleanup } } } } private static class MyBuildImageResultCallback extends BuildImageResultCallback { private static final String RUNNING_IN = "---> Running in"; private static final String IN = "--->"; private static final String REMOVING = "Removing intermediate container"; private static final String BUILT = "Successfully built"; private final PrintStream llog; private final Set<String> containers = new HashSet<>(); MyBuildImageResultCallback(PrintStream llog) { this.llog = llog; } public Set<String> getContainers() { return containers; } public void onNext(BuildResponseItem item) { String text = item.getStream(); if (nonNull(text)) { llog.print(text); LOG.trace(text); String s = StringUtils.trimToNull(StringUtils.chomp(text)); if (nonNull(s)) { checkContainer(s); } } super.onNext(item); } /** * Docker can't cleanup dangling images https://github.com/moby/moby/issues/37311 * Track all containers from input stream. */ private void checkContainer(String text) { String trimmed = text.trim(); if (trimmed.startsWith(RUNNING_IN)) { String container = trimmed.replace(RUNNING_IN, "").trim(); containers.add(container); } else if (trimmed.startsWith(IN)) { // exclude ---> Package gcc-c++.x86_64 0:4.8.5-28.el7_5.1 will be installed if (trimmed.length() == 12 && !trimmed.contains(" ")) { String container = trimmed.replace(IN, "").trim(); containers.add(container); } } else if (trimmed.contains(REMOVING)) { String container = trimmed.replace(REMOVING, "").trim(); containers.remove(container); } else if (trimmed.contains(BUILT)) { String container = trimmed.replace(BUILT, "").trim(); containers.remove(container); } } } private class MyPushImageResultCallback extends PushImageResultCallback { @Override public void onNext(PushResponseItem item) { printResponseItemToListener(taskListener, item); super.onNext(item); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.operator.scalar; import com.google.common.collect.ImmutableList; import io.airlift.slice.Slice; import io.prestosql.metadata.FunctionKind; import io.prestosql.metadata.MetadataManager; import io.prestosql.metadata.Signature; import io.prestosql.operator.DriverYieldSignal; import io.prestosql.operator.project.PageProcessor; import io.prestosql.spi.Page; import io.prestosql.spi.block.ArrayBlock; import io.prestosql.spi.block.Block; import io.prestosql.spi.block.BlockBuilder; import io.prestosql.spi.block.DictionaryBlock; import io.prestosql.spi.type.ArrayType; import io.prestosql.spi.type.Type; import io.prestosql.sql.gen.ExpressionCompiler; import io.prestosql.sql.gen.PageFunctionCompiler; import io.prestosql.sql.relational.CallExpression; import io.prestosql.sql.relational.RowExpression; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Measurement; import org.openjdk.jmh.annotations.Mode; import org.openjdk.jmh.annotations.OperationsPerInvocation; import org.openjdk.jmh.annotations.OutputTimeUnit; import org.openjdk.jmh.annotations.Param; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.runner.Runner; import org.openjdk.jmh.runner.options.Options; import org.openjdk.jmh.runner.options.OptionsBuilder; import org.openjdk.jmh.runner.options.VerboseMode; import org.openjdk.jmh.runner.options.WarmupMode; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; import static io.airlift.slice.Slices.utf8Slice; import static io.prestosql.block.BlockAssertions.createSlicesBlock; import static io.prestosql.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext; import static io.prestosql.spi.function.OperatorType.SUBSCRIPT; import static io.prestosql.spi.type.BigintType.BIGINT; import static io.prestosql.spi.type.DoubleType.DOUBLE; import static io.prestosql.spi.type.VarcharType.createUnboundedVarcharType; import static io.prestosql.sql.relational.Expressions.constant; import static io.prestosql.sql.relational.Expressions.field; import static io.prestosql.testing.TestingConnectorSession.SESSION; @SuppressWarnings("MethodMayBeStatic") @State(Scope.Thread) @OutputTimeUnit(TimeUnit.NANOSECONDS) @Fork(2) @Warmup(iterations = 10, time = 500, timeUnit = TimeUnit.MILLISECONDS) @Measurement(iterations = 10, time = 500, timeUnit = TimeUnit.MILLISECONDS) @BenchmarkMode(Mode.AverageTime) public class BenchmarkArraySubscript { private static final int POSITIONS = 1024; @Benchmark @OperationsPerInvocation(POSITIONS) public List<Optional<Page>> arraySubscript(BenchmarkData data) { return ImmutableList.copyOf( data.getPageProcessor().process( SESSION, new DriverYieldSignal(), newSimpleAggregatedMemoryContext().newLocalMemoryContext(PageProcessor.class.getSimpleName()), data.getPage())); } @SuppressWarnings("FieldMayBeFinal") @State(Scope.Thread) public static class BenchmarkData { @Param({"fix-width", "var-width", "dictionary", "array"}) private String name = "dictionary"; @Param({"1", "13"}) private int arraySize = 13; private Page page; private PageProcessor pageProcessor; @Setup public void setup() { MetadataManager metadata = MetadataManager.createTestMetadataManager(); ExpressionCompiler compiler = new ExpressionCompiler(metadata, new PageFunctionCompiler(metadata, 0)); ArrayType arrayType; Block elementsBlock; switch (name) { case "fix-width": arrayType = new ArrayType(DOUBLE); elementsBlock = createFixWidthValueBlock(POSITIONS, arraySize); break; case "var-width": arrayType = new ArrayType(createUnboundedVarcharType()); elementsBlock = createVarWidthValueBlock(POSITIONS, arraySize); break; case "dictionary": arrayType = new ArrayType(createUnboundedVarcharType()); elementsBlock = createDictionaryValueBlock(POSITIONS, arraySize); break; case "array": arrayType = new ArrayType(new ArrayType(createUnboundedVarcharType())); elementsBlock = createArrayBlock(POSITIONS * arraySize, createVarWidthValueBlock(POSITIONS, arraySize)); break; default: throw new UnsupportedOperationException(); } Block block = createArrayBlock(POSITIONS, elementsBlock); ImmutableList.Builder<RowExpression> projectionsBuilder = ImmutableList.builder(); Signature signature = new Signature( "$operator$" + SUBSCRIPT.name(), FunctionKind.SCALAR, arrayType.getElementType().getTypeSignature(), arrayType.getTypeSignature(), BIGINT.getTypeSignature()); for (int i = 0; i < arraySize; i++) { projectionsBuilder.add(new CallExpression( signature, arrayType.getElementType(), ImmutableList.of(field(0, arrayType), constant((long) i + 1, BIGINT)))); } ImmutableList<RowExpression> projections = projectionsBuilder.build(); pageProcessor = compiler.compilePageProcessor(Optional.empty(), projections).get(); page = new Page(block); } public PageProcessor getPageProcessor() { return pageProcessor; } public Page getPage() { return page; } private static Block createArrayBlock(int positionCount, Block elementsBlock) { int[] offsets = new int[positionCount + 1]; int arraySize = elementsBlock.getPositionCount() / positionCount; for (int i = 0; i < offsets.length; i++) { offsets[i] = arraySize * i; } return ArrayBlock.fromElementBlock(positionCount, Optional.empty(), offsets, elementsBlock); } private static Block createFixWidthValueBlock(int positionCount, int mapSize) { BlockBuilder valueBlockBuilder = DOUBLE.createBlockBuilder(null, positionCount * mapSize); for (int i = 0; i < positionCount * mapSize; i++) { DOUBLE.writeDouble(valueBlockBuilder, ThreadLocalRandom.current().nextDouble()); } return valueBlockBuilder.build(); } private static Block createVarWidthValueBlock(int positionCount, int mapSize) { Type valueType = createUnboundedVarcharType(); BlockBuilder valueBlockBuilder = valueType.createBlockBuilder(null, positionCount * mapSize); for (int i = 0; i < positionCount * mapSize; i++) { int wordLength = ThreadLocalRandom.current().nextInt(5, 10); valueType.writeSlice(valueBlockBuilder, utf8Slice(randomString(wordLength))); } return valueBlockBuilder.build(); } private static Block createDictionaryValueBlock(int positionCount, int mapSize) { double distinctRatio = 0.82; int dictionarySize = (int) (positionCount * mapSize * distinctRatio); List<String> dictionaryStrings = new ArrayList<>(dictionarySize); for (int i = 0; i < dictionarySize; i++) { int wordLength = ThreadLocalRandom.current().nextInt(5, 10); dictionaryStrings.add(randomString(wordLength)); } Block dictionaryBlock = createSliceArrayBlock(dictionaryStrings); int[] keyIds = new int[positionCount * mapSize]; for (int i = 0; i < keyIds.length; i++) { keyIds[i] = ThreadLocalRandom.current().nextInt(0, dictionarySize); } return new DictionaryBlock(dictionaryBlock, keyIds); } private static String randomString(int length) { String symbols = "abcdefghijklmnopqrstuvwxyz"; char[] chars = new char[length]; for (int i = 0; i < length; i++) { chars[i] = symbols.charAt(ThreadLocalRandom.current().nextInt(symbols.length())); } return new String(chars); } private static Block createSliceArrayBlock(List<String> keys) { // last position is reserved for null Slice[] sliceArray = new Slice[keys.size() + 1]; for (int i = 0; i < keys.size(); i++) { sliceArray[i] = utf8Slice(keys.get(i)); } return createSlicesBlock(sliceArray); } } public static void main(String[] args) throws Throwable { // assure the benchmarks are valid before running BenchmarkData data = new BenchmarkData(); data.setup(); new BenchmarkArraySubscript().arraySubscript(data); Options options = new OptionsBuilder() .verbosity(VerboseMode.NORMAL) .warmupMode(WarmupMode.INDI) .include(".*" + BenchmarkArraySubscript.class.getSimpleName() + ".*") .build(); new Runner(options).run(); } }
package org.upennapo.app.activity; import android.app.ActionBar; import android.app.FragmentTransaction; import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.PackageManager; import android.net.Uri; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentActivity; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentPagerAdapter; import android.support.v4.view.ViewPager; import android.util.Log; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.Toast; import org.upennapo.app.R; import org.upennapo.app.fragment.BrotherStatusFragment; import org.upennapo.app.fragment.DirectoryFragment; import org.upennapo.app.fragment.LinkListFragment; import org.upennapo.app.fragment.WebFragment; public class MainActivity extends FragmentActivity implements ActionBar.TabListener { public static final int NUM_TABS = 5; private static final int NUM_TAPS_ACTIVATE = 10; private static final String EASTER_EGG_UNLOCKED = "2048_UNLOCKED"; private static final String TAG = "MainActivity"; /** * The {@link android.support.v4.view.PagerAdapter} that will provide * fragments for each of the sections. We use a * {@link android.support.v4.app.FragmentPagerAdapter} derivative, which * will keep every loaded fragment in memory. If this becomes too memory * intensive, it may be best to switch to a * {@link android.support.v4.app.FragmentStatePagerAdapter}. */ SectionsPagerAdapter mSectionsPagerAdapter; /** * The {@link ViewPager} that will host the section contents. */ ViewPager mViewPager; /** * Used to count the number of times the Helpful Links tab has been selected. * When mNumTaps == NUM_TAPS_ACTIVATE, trigger the easter egg activity. */ private int mNumTaps = 0; /** * Get the corresponding unselected tab icon ID for a tab. * * @param position for which to get the tab icon * @return the unselected tab icon's resource ID for the tab at position */ protected static int getPageIcon(int position) { int iconID = 0; switch (position) { case 0: // Brother Status iconID = R.drawable.ic_tab_user; break; case 1: // Calendar iconID = R.drawable.ic_tab_calendar_day; break; case 2: // Brother Directory iconID = R.drawable.ic_tab_people; break; case 3: // Pledge Directory iconID = R.drawable.ic_action_grow; break; case 4: // Helpful Links iconID = R.drawable.ic_tab_bookmark; break; } return iconID; } /** * Get the corresponding selected tab icon ID for a tab. * * @param position for which to get the tab icon * @return the selected tab icon's resource ID for the tab at position */ protected static int getSelectedPageIcon(int position) { int iconID = 0; switch (position) { case 0: // Brother Status iconID = R.drawable.ic_tab_user_selected; break; case 1: // Calendar iconID = R.drawable.ic_tab_calendar_day_selected; break; case 2: // Brother Directory iconID = R.drawable.ic_tab_people_selected; break; case 3: // Pledge Directory iconID = R.drawable.ic_action_grow_selected; break; case 4: // Helpful Links iconID = R.drawable.ic_tab_bookmark_selected; break; } return iconID; } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); // Set up the action bar. final ActionBar actionBar = getActionBar(); assert actionBar != null; actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_TABS); // Create the adapter that will return a fragment for each of the three // primary sections of the app. mSectionsPagerAdapter = new SectionsPagerAdapter(getSupportFragmentManager()); // Set up the ViewPager with the sections adapter. mViewPager = (ViewPager) findViewById(R.id.pager); mViewPager.setAdapter(mSectionsPagerAdapter); mViewPager.setOffscreenPageLimit(NUM_TABS - 1); // When swiping between different sections, select the corresponding // tab. We can also use ActionBar.Tab#select() to do this if we have // a reference to the Tab. mViewPager.setOnPageChangeListener(new ViewPager.SimpleOnPageChangeListener() { @Override public void onPageSelected(int position) { actionBar.setSelectedNavigationItem(position); } }); // For each of the sections in the app, add a tab to the action bar. for (int i = 0; i < mSectionsPagerAdapter.getCount(); i++) { // Create a tab with text corresponding to the page title defined by // the adapter. Also specify this Activity object, which implements // the TabListener interface, as the callback (listener) for when // this tab is selected. actionBar.addTab( actionBar.newTab() .setIcon(getPageIcon(i)) .setTabListener(this) ); } } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.main, menu); // Enable the 2048 option if unlocked. SharedPreferences prefs = getSharedPreferences(getString(R.string.app_global_storage_key), MODE_PRIVATE); if (prefs.getBoolean(EASTER_EGG_UNLOCKED, false)) { menu.findItem(R.id.menu_play_2048).setVisible(true); } // Disable switch mode feature for active brothers. try { final String appVersion = getPackageManager().getPackageInfo(getPackageName(), 0).versionName; boolean isTestingMode = appVersion.endsWith("-DEBUG") || appVersion.endsWith("-beta"); if (!(isTestingMode || LoginActivity.isAlumLoggedIn(this))) { menu.findItem(R.id.menu_switch_mode).setVisible(false); } } catch (PackageManager.NameNotFoundException e) { Log.v(TAG, e.getMessage()); } return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle item selection switch (item.getItemId()) { case R.id.menu_play_2048: Intent play2048 = new Intent(this, EasterEggActivity.class); startActivity(play2048); return true; case R.id.menu_about_app: final String githubPageUrl = getString(R.string.menu_about_app_url); Intent openGithubPage = new Intent(Intent.ACTION_VIEW, Uri.parse(githubPageUrl)); startActivity(openGithubPage); return true; case R.id.menu_switch_mode: Intent switchMode = new Intent(this, AlumModeActivity.class); startActivity(switchMode); finish(); return true; case R.id.menu_switch_user: Intent openLoginScreen = new Intent(this, LoginActivity.class); openLoginScreen.putExtra(LoginActivity.LOGOUT_INTENT, true); startActivity(openLoginScreen); finish(); return true; default: return super.onOptionsItemSelected(item); } } @Override public void onTabSelected(ActionBar.Tab tab, FragmentTransaction fragmentTransaction) { // When the given tab is selected, switch to the corresponding page in // the ViewPager. int position = tab.getPosition(); // Whenever the Helpful Links tab is selected, we edge closer to activating the Easter Egg! if (position == 4) { updateEasterEggStatus(); } setTitle(mSectionsPagerAdapter.getPageTitle(position)); tab.setIcon(getSelectedPageIcon(position)); mViewPager.setCurrentItem(position); } @Override public void onTabUnselected(ActionBar.Tab tab, FragmentTransaction fragmentTransaction) { int position = tab.getPosition(); tab.setIcon(getPageIcon(position)); mViewPager.setCurrentItem(position); } @Override public void onTabReselected(ActionBar.Tab tab, FragmentTransaction fragmentTransaction) { } /** * Increments the number of times we've entered the last tab. If it reaches the requisite number, * we unlock the easter egg. */ private void updateEasterEggStatus() { SharedPreferences prefs = getSharedPreferences(getString(R.string.app_global_storage_key), MODE_PRIVATE); if (++mNumTaps == NUM_TAPS_ACTIVATE && !prefs.contains(EASTER_EGG_UNLOCKED)) { // Remember that we've unlocked the easter egg. SharedPreferences.Editor editor = prefs.edit(); editor.putBoolean(EASTER_EGG_UNLOCKED, true); editor.apply(); invalidateOptionsMenu(); // Show user unlock message. Toast t = Toast.makeText(this, R.string.apo_2048_unlock_msg, Toast.LENGTH_LONG); t.show(); } } /** * A dummy fragment representing a section of the app, but that simply * displays dummy text. */ public static class DummySectionFragment extends Fragment { /** * The fragment argument representing the section number for this * fragment. */ public DummySectionFragment() { } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { return inflater.inflate(R.layout.work_in_progress_view, container, false); } } /** * A {@link FragmentPagerAdapter} that returns a fragment corresponding to * one of the sections/tabs/pages. */ public class SectionsPagerAdapter extends FragmentPagerAdapter { FragmentManager mManager; public SectionsPagerAdapter(FragmentManager fm) { super(fm); mManager = fm; } @Override public Fragment getItem(int position) { // For Fragments with AsyncTasks, attempt to retrieve the retained Fragment. If this // isn't possible, use the FragmentManager to attach it to a tag. switch (position) { case 0: // Brother Status return BrotherStatusFragment.newInstance(MainActivity.this); case 1: // Calendar WebView return WebFragment.newCalendarInstance(MainActivity.this); case 2: // Brother Directory return DirectoryFragment.newBrotherDirectoryInstance(MainActivity.this); case 3: // Pledge Directory return DirectoryFragment.newPledgeDirectoryInstance(MainActivity.this); case 4: // Helpful Links return LinkListFragment.newBrotherLinksInstance(MainActivity.this); default: // getItem is called to instantiate the fragment for the given page. // Return a DummySectionFragment (defined as a static inner class // below) with the page number as its lone argument. return new DummySectionFragment(); } } @Override public int getCount() { // Show total number of pages. return NUM_TABS; } @Override public CharSequence getPageTitle(int position) { String sectionName = ""; switch (position) { case 0: // Brother Status sectionName = getString(R.string.title_section1); break; case 1: // Calendar sectionName = getString(R.string.title_section2); break; case 2: // Brother Directory sectionName = getString(R.string.title_section3); break; case 3: // Pledge Directory sectionName = getString(R.string.title_section4); break; case 4: // Helpful Links sectionName = getString(R.string.title_section5); break; } return sectionName; } } }
// Copyright 2009 Victor Iacoban // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed under // the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, // either express or implied. See the License for the specific language governing permissions and // limitations under the License. package bazaar4idea.ui; import com.intellij.openapi.options.Configurable; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.project.Project; import com.intellij.openapi.vcs.FilePath; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.uiDesigner.core.GridConstraints; import com.intellij.uiDesigner.core.GridLayoutManager; import com.intellij.uiDesigner.core.Spacer; import bazaar4idea.command.BzrTagBranchCommand; import bazaar4idea.data.BzrTagBranch; import org.jetbrains.annotations.Nls; import javax.swing.*; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.Collection; import java.util.LinkedList; import java.util.List; import java.util.ResourceBundle; public class BzrIntegrateDialog implements Configurable { private final Project project; private JRadioButton revisionOption; private JTextField revisionTxt; private JRadioButton branchOption; private JRadioButton tagOption; private JComboBox branchSelector; private JComboBox tagSelector; private JPanel contentPanel; private BzrRepositorySelectorComponent repositorySelectorComponent; public BzrIntegrateDialog(Project project, Collection<FilePath> roots) { this.project = project; repositorySelectorComponent.setRoots(pathsToFiles(roots)); repositorySelectorComponent.setTitle("Select repository to integrate"); repositorySelectorComponent.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { updateRepository(); } }); ChangeListener changeListener = new ChangeListener() { public void stateChanged(ChangeEvent e) { updateOptions(); } }; branchOption.addChangeListener(changeListener); tagOption.addChangeListener(changeListener); revisionOption.addChangeListener(changeListener); updateRepository(); } public VirtualFile getRepository() { return repositorySelectorComponent.getRepository(); } public BzrTagBranch getBranch() { return branchOption.isSelected() ? (BzrTagBranch)branchSelector.getSelectedItem() : null; } public BzrTagBranch getTag() { return tagOption.isSelected() ? (BzrTagBranch)tagSelector.getSelectedItem() : null; } public String getRevision() { return revisionOption.isSelected() ? revisionTxt.getText() : null; } @Nls public String getDisplayName() { return null; } public Icon getIcon() { return null; } public String getHelpTopic() { return null; } public JComponent createComponent() { return contentPanel; } public boolean isModified() { return true; } public void apply() throws ConfigurationException { } public void reset() { } private void updateRepository() { VirtualFile repo = getRepository(); loadBranches(repo); loadTags(repo); } private void updateOptions() { revisionTxt.setEnabled(revisionOption.isSelected()); branchSelector.setEnabled(branchOption.isSelected()); tagSelector.setEnabled(tagOption.isSelected()); } private void loadBranches(VirtualFile root) { List<BzrTagBranch> branches = new BzrTagBranchCommand(project, root).listBranches(); branchSelector.setModel(new DefaultComboBoxModel(branches.toArray())); } private void loadTags(VirtualFile root) { List<BzrTagBranch> tags = new BzrTagBranchCommand(project, root).listTags(); tagSelector.setModel(new DefaultComboBoxModel(tags.toArray())); } private List<VirtualFile> pathsToFiles(Collection<FilePath> paths) { List<VirtualFile> files = new LinkedList<VirtualFile>(); for (FilePath path : paths) { files.add(path.getVirtualFile()); } return files; } public void disposeUIResources() { } { // GUI initializer generated by IntelliJ IDEA GUI Designer // >>> IMPORTANT!! <<< // DO NOT EDIT OR ADD ANY CODE HERE! $$$setupUI$$$(); } /** Method generated by IntelliJ IDEA GUI Designer * >>> IMPORTANT!! <<< * DO NOT edit this method OR call it in your code! * @noinspection ALL */ private void $$$setupUI$$$() { contentPanel = new JPanel(); contentPanel.setLayout(new GridLayoutManager(3, 1, new Insets(0, 0, 0, 0), -1, -1)); final JPanel panel1 = new JPanel(); panel1.setLayout(new GridLayoutManager(4, 2, new Insets(0, 0, 0, 0), -1, -1)); contentPanel.add(panel1, new GridConstraints(1, 0, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_BOTH, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, null, null, null, 0, false)); panel1.setBorder(BorderFactory.createTitledBorder( ResourceBundle.getBundle("org/emergent/bzr4j/intellij/BzrVcsMessages") .getString("dialog.integrate.merge_from"))); branchOption = new JRadioButton(); branchOption.setSelected(true); branchOption.setText("Branch"); branchOption.setMnemonic('B'); branchOption.setDisplayedMnemonicIndex(0); panel1.add(branchOption, new GridConstraints(0, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false)); tagOption = new JRadioButton(); tagOption.setText("Tag"); tagOption.setMnemonic('T'); tagOption.setDisplayedMnemonicIndex(0); panel1.add(tagOption, new GridConstraints(1, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false)); branchSelector = new JComboBox(); branchSelector.setEditable(true); branchSelector.setEnabled(true); panel1.add(branchSelector, new GridConstraints(0, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false)); tagSelector = new JComboBox(); tagSelector.setEnabled(false); panel1.add(tagSelector, new GridConstraints(1, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false)); final Spacer spacer1 = new Spacer(); panel1.add(spacer1, new GridConstraints(3, 1, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_WANT_GROW, 1, null, null, null, 0, false)); revisionTxt = new JTextField(); revisionTxt.setEnabled(false); panel1.add(revisionTxt, new GridConstraints(2, 1, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_WANT_GROW, GridConstraints.SIZEPOLICY_FIXED, null, new Dimension(150, -1), null, 0, false)); revisionOption = new JRadioButton(); revisionOption.setSelected(false); revisionOption.setText("Revision"); revisionOption.setMnemonic('R'); revisionOption.setDisplayedMnemonicIndex(0); panel1.add(revisionOption, new GridConstraints(2, 0, 1, 1, GridConstraints.ANCHOR_WEST, GridConstraints.FILL_NONE, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, GridConstraints.SIZEPOLICY_FIXED, null, null, null, 0, false)); final Spacer spacer2 = new Spacer(); contentPanel.add(spacer2, new GridConstraints(2, 0, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_VERTICAL, 1, GridConstraints.SIZEPOLICY_WANT_GROW, null, null, null, 0, false)); repositorySelectorComponent = new BzrRepositorySelectorComponent(); contentPanel.add(repositorySelectorComponent.$$$getRootComponent$$$(), new GridConstraints(0, 0, 1, 1, GridConstraints.ANCHOR_CENTER, GridConstraints.FILL_HORIZONTAL, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_WANT_GROW, GridConstraints.SIZEPOLICY_CAN_SHRINK | GridConstraints.SIZEPOLICY_CAN_GROW, null, null, null, 0, false)); ButtonGroup buttonGroup; buttonGroup = new ButtonGroup(); buttonGroup.add(revisionOption); buttonGroup.add(branchOption); buttonGroup.add(tagOption); } /** @noinspection ALL */ public JComponent $$$getRootComponent$$$() { return contentPanel; } }
// Copyright 2000-2017 JetBrains s.r.o. // Use of this source code is governed by the Apache 2.0 license that can be // found in the LICENSE file. package com.intellij.codeInspection.naming; import com.intellij.codeInspection.LocalInspectionTool; import com.intellij.codeInspection.LocalQuickFix; import com.intellij.codeInspection.ProblemsHolder; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.InvalidDataException; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiNameIdentifierOwner; import com.intellij.psi.SyntheticElement; import com.intellij.ui.CheckBoxList; import com.intellij.ui.CheckBoxListListener; import com.intellij.ui.components.JBScrollPane; import com.intellij.util.ObjectUtils; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import com.intellij.util.xmlb.XmlSerializationException; import com.intellij.util.xmlb.XmlSerializer; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.util.*; import java.util.List; import java.util.function.Consumer; /** * Abstract class for naming convention inspections. Base inspection expects {@link NamingConvention} extensions which are processed one by one, * the first which returns true from {@link NamingConvention#isApplicable(PsiNameIdentifierOwner)}, wins and provides bean to check the member name. * * Provide {@link #createRenameFix()} to register rename fix. * Register {@link AbstractNamingConventionMerger} to provide settings migration from multiple inspections to compound one */ public abstract class AbstractNamingConventionInspection<T extends PsiNameIdentifierOwner> extends LocalInspectionTool { private static final Logger LOG = Logger.getInstance(AbstractNamingConventionInspection.class); private final Map<String, NamingConvention<T>> myNamingConventions = new LinkedHashMap<>(); private final Map<String, NamingConventionBean> myNamingConventionBeans = new LinkedHashMap<>(); private final Map<String, Element> myUnloadedElements = new LinkedHashMap<>(); private final Set<String> myDisabledShortNames = new HashSet<>(); @Nullable private final String myDefaultConventionShortName; protected AbstractNamingConventionInspection(Iterable<NamingConvention<T>> extensions, @Nullable final String defaultConventionShortName) { for (NamingConvention<T> convention : extensions) { String shortName = convention.getShortName(); NamingConvention<T> oldConvention = myNamingConventions.put(shortName, convention); if (oldConvention != null) { LOG.error("Duplicated short names: " + shortName + " first: " + oldConvention + "; second: " + convention); } myNamingConventionBeans.put(shortName, convention.createDefaultBean()); } initDisabledState(); myDefaultConventionShortName = defaultConventionShortName; } @Nullable protected abstract LocalQuickFix createRenameFix(); private void initDisabledState() { myDisabledShortNames.clear(); for (NamingConvention<T> convention : myNamingConventions.values()) { if (!convention.isEnabledByDefault()) { myDisabledShortNames.add(convention.getShortName()); } } } public NamingConventionBean getNamingConventionBean(String shortName) { return myNamingConventionBeans.get(shortName); } public Set<String> getOldToolNames() { return myNamingConventions.keySet(); } @NotNull protected String createErrorMessage(String name, String shortName) { return myNamingConventions.get(shortName).createErrorMessage(name, myNamingConventionBeans.get(shortName)); } @Override public void readSettings(@NotNull Element node) { initDisabledState(); for (Element extension : node.getChildren("extension")) { String shortName = extension.getAttributeValue("name"); if (shortName == null) continue; NamingConventionBean conventionBean = myNamingConventionBeans.get(shortName); if (conventionBean == null) { myUnloadedElements.put(shortName, extension); continue; } try { XmlSerializer.deserializeInto(conventionBean, extension); conventionBean.initPattern(); } catch (XmlSerializationException e) { throw new InvalidDataException(e); } String enabled = extension.getAttributeValue("enabled"); if (Boolean.parseBoolean(enabled)) { myDisabledShortNames.remove(shortName); } } } @Override public void writeSettings(@NotNull Element node) { Set<String> shortNames = new TreeSet<>(myNamingConventions.keySet()); shortNames.addAll(myUnloadedElements.keySet()); for (String shortName : shortNames) { NamingConvention<T> convention = myNamingConventions.get(shortName); if (convention == null) { Element element = myUnloadedElements.get(shortName); if (element != null) node.addContent(element.clone()); continue; } boolean disabled = myDisabledShortNames.contains(shortName); Element element = new Element("extension") .setAttribute("name", shortName) .setAttribute("enabled", disabled ? "false" : "true"); NamingConventionBean conventionBean = myNamingConventionBeans.get(shortName); if (!convention.createDefaultBean().equals(conventionBean)) { XmlSerializer.serializeInto(conventionBean, element); } else { if (disabled) continue; } node.addContent(element); } } public boolean isConventionEnabled(String shortName) { return !myDisabledShortNames.contains(shortName); } protected void checkName(@NotNull T member, @NotNull String name, @NotNull ProblemsHolder holder) { if (member instanceof SyntheticElement) return; checkName(member, shortName -> { LocalQuickFix[] fixes; if (holder.isOnTheFly()) { LocalQuickFix fix = createRenameFix(); fixes = fix != null ? new LocalQuickFix[]{ fix } : null; } else { fixes = null; } PsiElement element = ObjectUtils.notNull(member.getNameIdentifier(), member); if (!element.isPhysical()) { element = element.getNavigationElement(); } holder.registerProblem(element, createErrorMessage(name, shortName), fixes); }); } protected void checkName(@NotNull T member, @NotNull Consumer<? super String> errorRegister) { for (NamingConvention<T> namingConvention : myNamingConventions.values()) { if (namingConvention.isApplicable(member)) { String shortName = namingConvention.getShortName(); if (myDisabledShortNames.contains(shortName)) { break; } NamingConventionBean activeBean = myNamingConventionBeans.get(shortName); if (activeBean instanceof NamingConventionWithFallbackBean && ((NamingConventionWithFallbackBean)activeBean).isInheritDefaultSettings()) { LOG.assertTrue(myDefaultConventionShortName != null, activeBean + " expects that default conversion is configured"); shortName = myDefaultConventionShortName; //disabled when fallback is disabled if (myDisabledShortNames.contains(shortName)) { break; } activeBean = myNamingConventionBeans.get(shortName); namingConvention = myNamingConventions.get(shortName); } if (!namingConvention.isValid(member, activeBean)) { errorRegister.accept(shortName); } break; } } } @Nullable @Override public JComponent createOptionsPanel() { JPanel panel = new JPanel(new BorderLayout(JBUI.scale(2), JBUI.scale(2))); CardLayout layout = new CardLayout(); JPanel descriptionPanel = new JPanel(layout); descriptionPanel.setBorder(JBUI.Borders.empty(2)); panel.add(descriptionPanel, BorderLayout.CENTER); CheckBoxList<NamingConvention<T>> list = new CheckBoxList<>(); list.setBorder(JBUI.Borders.empty(2)); List<NamingConvention<T>> values = new ArrayList<>(myNamingConventions.values()); Collections.reverse(values); for (NamingConvention<T> convention : values) { String shortName = convention.getShortName(); list.addItem(convention, convention.getElementDescription(), !myDisabledShortNames.contains(shortName)); descriptionPanel.add(myNamingConventionBeans.get(shortName).createOptionsPanel(), shortName); } list.addListSelectionListener((e) -> { int selectedIndex = list.getSelectedIndex(); NamingConvention<T> item = list.getItemAt(selectedIndex); if (item != null) { String shortName = item.getShortName(); layout.show(descriptionPanel, shortName); UIUtil.setEnabled(descriptionPanel, list.isItemSelected(selectedIndex), true); } }); list.setCheckBoxListListener(new CheckBoxListListener() { @Override public void checkBoxSelectionChanged(int index, boolean value) { NamingConvention<T> convention = values.get(index); setEnabled(value, convention.getShortName()); UIUtil.setEnabled(descriptionPanel, value, true); } }); list.setSelectedIndex(0); panel.add(new JBScrollPane(list), BorderLayout.WEST); return panel; } public void setEnabled(boolean value, String conventionShortName) { if (value) { myDisabledShortNames.remove(conventionShortName); } else { myDisabledShortNames.add(conventionShortName); } } }
/* * Copyright (c) 2016, Oracle and/or its affiliates. * * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are * permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list of * conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, this list of * conditions and the following disclaimer in the documentation and/or other materials provided * with the distribution. * * 3. Neither the name of the copyright holder nor the names of its contributors may be used to * endorse or promote products derived from this software without specific prior written * permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE * GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.oracle.truffle.llvm.nodes.impl.memory; import com.oracle.truffle.api.dsl.NodeChild; import com.oracle.truffle.api.dsl.NodeChildren; import com.oracle.truffle.api.dsl.NodeField; import com.oracle.truffle.api.dsl.Specialization; import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.interop.ForeignAccess; import com.oracle.truffle.api.interop.Message; import com.oracle.truffle.api.interop.TruffleObject; import com.oracle.truffle.api.interop.UnknownIdentifierException; import com.oracle.truffle.api.interop.UnsupportedMessageException; import com.oracle.truffle.api.interop.UnsupportedTypeException; import com.oracle.truffle.api.nodes.ExplodeLoop; import com.oracle.truffle.api.nodes.Node; import com.oracle.truffle.llvm.nodes.base.LLVMNode; import com.oracle.truffle.llvm.nodes.impl.base.LLVMAddressNode; import com.oracle.truffle.llvm.nodes.impl.base.LLVMFunctionNode; import com.oracle.truffle.llvm.nodes.impl.base.floating.LLVM80BitFloatNode; import com.oracle.truffle.llvm.nodes.impl.base.floating.LLVMDoubleNode; import com.oracle.truffle.llvm.nodes.impl.base.floating.LLVMFloatNode; import com.oracle.truffle.llvm.nodes.impl.base.integers.LLVMI16Node; import com.oracle.truffle.llvm.nodes.impl.base.integers.LLVMI1Node; import com.oracle.truffle.llvm.nodes.impl.base.integers.LLVMI32Node; import com.oracle.truffle.llvm.nodes.impl.base.integers.LLVMI64Node; import com.oracle.truffle.llvm.nodes.impl.base.integers.LLVMI8Node; import com.oracle.truffle.llvm.nodes.impl.base.integers.LLVMIVarBitNode; import com.oracle.truffle.llvm.types.LLVMAddress; import com.oracle.truffle.llvm.types.LLVMFunctionDescriptor; import com.oracle.truffle.llvm.types.LLVMIVarBit; import com.oracle.truffle.llvm.types.LLVMTruffleObject; import com.oracle.truffle.llvm.types.floating.LLVM80BitFloat; import com.oracle.truffle.llvm.types.memory.LLVMHeap; import com.oracle.truffle.llvm.types.memory.LLVMMemory; @NodeChildren(value = {@NodeChild(type = LLVMAddressNode.class, value = "pointerNode")}) public abstract class LLVMStoreNode extends LLVMNode { @Child protected Node foreignWrite = Message.WRITE.createNode(); protected void doForeignAccess(VirtualFrame frame, LLVMTruffleObject addr, int stride, Object value) { try { ForeignAccess.sendWrite(foreignWrite, frame, addr.getObject(), (int) (addr.getOffset() / stride), value); } catch (UnknownIdentifierException | UnsupportedMessageException | UnsupportedTypeException e) { throw new IllegalStateException(e); } } protected void doForeignAccess(VirtualFrame frame, TruffleObject addr, Object value) { try { ForeignAccess.sendWrite(foreignWrite, frame, addr, 0, value); } catch (UnknownIdentifierException | UnsupportedMessageException | UnsupportedTypeException e) { throw new IllegalStateException(e); } } @NodeChild(type = LLVMI1Node.class, value = "valueNode") public abstract static class LLVMI1StoreNode extends LLVMStoreNode { @Specialization public void execute(LLVMAddress address, boolean value) { LLVMMemory.putI1(address, value); } } @NodeChild(type = LLVMI8Node.class, value = "valueNode") public abstract static class LLVMI8StoreNode extends LLVMStoreNode { @Specialization public void execute(LLVMAddress address, byte value) { LLVMMemory.putI8(address, value); } } @NodeChild(type = LLVMI16Node.class, value = "valueNode") public abstract static class LLVMI16StoreNode extends LLVMStoreNode { @Specialization public void execute(LLVMAddress address, short value) { LLVMMemory.putI16(address, value); } } @NodeChild(type = LLVMI32Node.class, value = "valueNode") public abstract static class LLVMI32StoreNode extends LLVMStoreNode { @Specialization public void execute(LLVMAddress address, int value) { LLVMMemory.putI32(address, value); } @Specialization public void execute(VirtualFrame frame, LLVMTruffleObject address, int value) { doForeignAccess(frame, address, LLVMI32Node.BYTE_SIZE, value); } @Specialization public void execute(VirtualFrame frame, TruffleObject address, int value) { execute(frame, new LLVMTruffleObject(address), value); } } @NodeChild(type = LLVMI64Node.class, value = "valueNode") public abstract static class LLVMI64StoreNode extends LLVMStoreNode { @Specialization public void execute(LLVMAddress address, long value) { LLVMMemory.putI64(address, value); } } @NodeChild(type = LLVMIVarBitNode.class, value = "valueNode") public abstract static class LLVMIVarBitStoreNode extends LLVMStoreNode { @Specialization public void execute(LLVMAddress address, LLVMIVarBit value) { LLVMMemory.putIVarBit(address, value); } } @NodeChild(type = LLVMFloatNode.class, value = "valueNode") public abstract static class LLVMFloatStoreNode extends LLVMStoreNode { @Specialization public void execute(LLVMAddress address, float value) { LLVMMemory.putFloat(address, value); } } @NodeChild(type = LLVMDoubleNode.class, value = "valueNode") public abstract static class LLVMDoubleStoreNode extends LLVMStoreNode { @Specialization public void execute(LLVMAddress address, double value) { LLVMMemory.putDouble(address, value); } @Specialization public void execute(VirtualFrame frame, LLVMTruffleObject address, double value) { doForeignAccess(frame, address, LLVMDoubleNode.BYTE_SIZE, value); } @Specialization public void execute(VirtualFrame frame, TruffleObject address, double value) { doForeignAccess(frame, address, value); } } @NodeChild(type = LLVM80BitFloatNode.class, value = "valueNode") public abstract static class LLVM80BitFloatStoreNode extends LLVMStoreNode { @Specialization public void execute(LLVMAddress address, LLVM80BitFloat value) { LLVMMemory.put80BitFloat(address, value); } } @NodeChild(type = LLVMAddressNode.class, value = "valueNode") public abstract static class LLVMAddressStoreNode extends LLVMStoreNode { @Specialization public void execute(LLVMAddress address, LLVMAddress value) { LLVMMemory.putAddress(address, value); } } @NodeChild(type = LLVMFunctionNode.class, value = "valueNode") public abstract static class LLVMFunctionStoreNode extends LLVMStoreNode { @Specialization public void execute(LLVMAddress address, LLVMFunctionDescriptor function) { LLVMHeap.putFunctionIndex(address, function.getFunctionIndex()); } } @NodeChild(type = LLVMAddressNode.class, value = "valueNode") @NodeField(type = int.class, name = "structSize") public abstract static class LLVMStructStoreNode extends LLVMStoreNode { public abstract int getStructSize(); @Specialization public void execute(LLVMAddress address, LLVMAddress value) { LLVMMemory.putStruct(address, value, getStructSize()); } } @NodeChild(value = "address", type = LLVMAddressNode.class) public abstract static class LLVMI1ArrayLiteralNode extends LLVMAddressNode { @Children private final LLVMI1Node[] values; private final int stride; public LLVMI1ArrayLiteralNode(LLVMI1Node[] values, int stride) { this.values = values; this.stride = stride; } @Specialization @ExplodeLoop protected LLVMAddress writeI8(VirtualFrame frame, LLVMAddress addr) { LLVMAddress currentAddress = addr; for (int i = 0; i < values.length; i++) { boolean currentValue = values[i].executeI1(frame); LLVMMemory.putI1(currentAddress, currentValue); currentAddress = currentAddress.increment(stride); } return addr; } } @NodeChild(value = "address", type = LLVMAddressNode.class) public abstract static class LLVMI8ArrayLiteralNode extends LLVMAddressNode { @Children private final LLVMI8Node[] values; private final int stride; public LLVMI8ArrayLiteralNode(LLVMI8Node[] values, int stride) { this.values = values; this.stride = stride; } @Specialization @ExplodeLoop protected LLVMAddress writeI8(VirtualFrame frame, LLVMAddress addr) { LLVMAddress currentAddress = addr; for (int i = 0; i < values.length; i++) { byte currentValue = values[i].executeI8(frame); LLVMMemory.putI8(currentAddress, currentValue); currentAddress = currentAddress.increment(stride); } return addr; } } @NodeChild(value = "address", type = LLVMAddressNode.class) public abstract static class LLVMI16ArrayLiteralNode extends LLVMAddressNode { @Children private final LLVMI16Node[] values; private final int stride; public LLVMI16ArrayLiteralNode(LLVMI16Node[] values, int stride) { this.values = values; this.stride = stride; } @Specialization @ExplodeLoop protected LLVMAddress writeI8(VirtualFrame frame, LLVMAddress addr) { LLVMAddress currentAddress = addr; for (int i = 0; i < values.length; i++) { short currentValue = values[i].executeI16(frame); LLVMMemory.putI16(currentAddress, currentValue); currentAddress = currentAddress.increment(stride); } return addr; } } @NodeChild(value = "address", type = LLVMAddressNode.class) public abstract static class LLVMI32ArrayLiteralNode extends LLVMAddressNode { @Children private final LLVMI32Node[] values; private final int stride; public LLVMI32ArrayLiteralNode(LLVMI32Node[] values, int stride) { this.values = values; this.stride = stride; } @Specialization @ExplodeLoop protected LLVMAddress writeI32(VirtualFrame frame, LLVMAddress addr) { LLVMAddress currentAddress = addr; for (int i = 0; i < values.length; i++) { int currentValue = values[i].executeI32(frame); LLVMMemory.putI32(currentAddress, currentValue); currentAddress = currentAddress.increment(stride); } return addr; } } @NodeChild(value = "address", type = LLVMAddressNode.class) public abstract static class LLVMI64ArrayLiteralNode extends LLVMAddressNode { @Children private final LLVMI64Node[] values; private final int stride; public LLVMI64ArrayLiteralNode(LLVMI64Node[] values, int stride) { this.values = values; this.stride = stride; } @Specialization @ExplodeLoop protected LLVMAddress writeI64(VirtualFrame frame, LLVMAddress addr) { LLVMAddress currentAddress = addr; for (int i = 0; i < values.length; i++) { long currentValue = values[i].executeI64(frame); LLVMMemory.putI64(currentAddress, currentValue); currentAddress = currentAddress.increment(stride); } return addr; } } @NodeChild(value = "address", type = LLVMAddressNode.class) public abstract static class LLVMFloatArrayLiteralNode extends LLVMAddressNode { @Children private final LLVMFloatNode[] values; private final int stride; public LLVMFloatArrayLiteralNode(LLVMFloatNode[] values, int stride) { this.values = values; this.stride = stride; } @Specialization @ExplodeLoop protected LLVMAddress writeI64(VirtualFrame frame, LLVMAddress addr) { LLVMAddress currentAddress = addr; for (int i = 0; i < values.length; i++) { float currentValue = values[i].executeFloat(frame); LLVMMemory.putFloat(currentAddress, currentValue); currentAddress = currentAddress.increment(stride); } return addr; } } @NodeChild(value = "address", type = LLVMAddressNode.class) public abstract static class LLVMDoubleArrayLiteralNode extends LLVMAddressNode { @Children private final LLVMDoubleNode[] values; private final int stride; public LLVMDoubleArrayLiteralNode(LLVMDoubleNode[] values, int stride) { this.values = values; this.stride = stride; } @Specialization @ExplodeLoop protected LLVMAddress writeDouble(VirtualFrame frame, LLVMAddress addr) { LLVMAddress currentAddress = addr; for (int i = 0; i < values.length; i++) { double currentValue = values[i].executeDouble(frame); LLVMMemory.putDouble(currentAddress, currentValue); currentAddress = currentAddress.increment(stride); } return addr; } } @NodeChild(value = "address", type = LLVMAddressNode.class) public abstract static class LLVM80BitFloatArrayLiteralNode extends LLVMAddressNode { @Children private final LLVM80BitFloatNode[] values; private final int stride; public LLVM80BitFloatArrayLiteralNode(LLVM80BitFloatNode[] values, int stride) { this.values = values; this.stride = stride; } @Specialization @ExplodeLoop protected LLVMAddress write80BitFloat(VirtualFrame frame, LLVMAddress addr) { LLVMAddress currentAddress = addr; for (int i = 0; i < values.length; i++) { LLVM80BitFloat currentValue = values[i].execute80BitFloat(frame); LLVMMemory.put80BitFloat(currentAddress, currentValue); currentAddress = currentAddress.increment(stride); } return addr; } } @NodeChild(value = "address", type = LLVMAddressNode.class) public abstract static class LLVMAddressArrayLiteralNode extends LLVMAddressNode { @Children private final LLVMAddressNode[] values; private final int stride; public LLVMAddressArrayLiteralNode(LLVMAddressNode[] values, int stride) { this.values = values; this.stride = stride; } @Specialization @ExplodeLoop protected LLVMAddress writeDouble(VirtualFrame frame, LLVMAddress addr) { LLVMAddress currentAddress = addr; for (int i = 0; i < values.length; i++) { LLVMAddress currentValue = values[i].executePointee(frame); LLVMMemory.putAddress(currentAddress, currentValue); currentAddress = currentAddress.increment(stride); } return addr; } } @NodeChild(value = "address", type = LLVMAddressNode.class) public abstract static class LLVMFunctionArrayLiteralNode extends LLVMAddressNode { @Children private final LLVMFunctionNode[] values; private final int stride; public LLVMFunctionArrayLiteralNode(LLVMFunctionNode[] values, int stride) { this.values = values; this.stride = stride; } @Specialization @ExplodeLoop protected LLVMAddress writeDouble(VirtualFrame frame, LLVMAddress addr) { LLVMAddress currentAddress = addr; for (int i = 0; i < values.length; i++) { LLVMFunctionDescriptor currentValue = values[i].executeFunction(frame); LLVMHeap.putFunctionIndex(currentAddress, currentValue.getFunctionIndex()); currentAddress = currentAddress.increment(stride); } return addr; } } @NodeChild(value = "address", type = LLVMAddressNode.class) public abstract static class LLVMAddressArrayCopyNode extends LLVMAddressNode { @Children private final LLVMAddressNode[] values; private final int stride; public LLVMAddressArrayCopyNode(LLVMAddressNode[] values, int stride) { this.values = values; this.stride = stride; } @Specialization @ExplodeLoop protected LLVMAddress writeDouble(VirtualFrame frame, LLVMAddress addr) { LLVMAddress currentAddress = addr; for (int i = 0; i < values.length; i++) { LLVMAddress currentValue = values[i].executePointee(frame); LLVMHeap.memCopy(currentAddress, currentValue, stride); currentAddress = currentAddress.increment(stride); } return addr; } } }
/* * WebOSWebAppSessionTest * Connect SDK * * Copyright (c) 2015 LG Electronics. * Created by Oleksii Frolov on 27 May 2015 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.connectsdk.service.sessions; import com.connectsdk.core.MediaInfo; import com.connectsdk.core.SubtitleInfo; import com.connectsdk.service.DeviceService; import com.connectsdk.service.WebOSTVService; import com.connectsdk.service.capability.CapabilityMethods; import com.connectsdk.service.capability.MediaPlayer; import com.connectsdk.service.capability.listeners.ResponseListener; import com.connectsdk.service.config.ServiceConfig; import com.connectsdk.service.config.ServiceDescription; import com.connectsdk.service.config.WebOSTVServiceConfig; import com.connectsdk.service.webos.WebOSTVServiceSocketClient; import junit.framework.Assert; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; import org.robolectric.Robolectric; import org.robolectric.RobolectricTestRunner; import org.robolectric.annotation.Config; import android.support.annotation.NonNull; @RunWith(RobolectricTestRunner.class) @Config(manifest=Config.NONE) public class WebOSWebAppSessionTest { private WebOSWebAppSession session; private LaunchSession launchSession; private DeviceService service; private WebOSTVServiceSocketClient socket; @Before public void setUp() { socket = Mockito.mock(WebOSTVServiceSocketClient.class); Mockito.when(socket.isConnected()).thenReturn(Boolean.TRUE); launchSession = Mockito.mock(LaunchSession.class); service = Mockito.mock(WebOSTVService.class); session = new WebOSWebAppSession(launchSession, service); session.setConnected(Boolean.TRUE); session.socket = socket; session.mFullAppId = "com.webos.app.webapphost.MediaPlayer"; } @Test public void testPrevious() throws JSONException { ResponseListener<Object> listener = Mockito.mock(ResponseListener.class); session.previous(listener); Robolectric.runUiThreadTasksIncludingDelayedTasks(); ArgumentCaptor<JSONObject> argPacket = ArgumentCaptor.forClass(JSONObject.class); ArgumentCaptor<JSONObject> argPayload = ArgumentCaptor.forClass(JSONObject.class); Mockito.verify(socket).sendMessage(argPacket.capture(), argPayload.capture()); Mockito.verify(listener).onSuccess(null); JSONObject packet = argPacket.getValue(); JSONObject payload = argPayload.getValue(); Assert.assertNull(payload); Assert.assertTrue(packet.has("payload")); Assert.assertEquals("playPrevious", packet.getJSONObject("payload") .getJSONObject("mediaCommand").getString("type")); Assert.assertEquals("connectsdk.mediaCommand", packet.getJSONObject("payload") .getString ("contentType")); } @Test public void testNext() throws JSONException { ResponseListener<Object> listener = Mockito.mock(ResponseListener.class); session.next(listener); Robolectric.runUiThreadTasksIncludingDelayedTasks(); ArgumentCaptor<JSONObject> argPacket = ArgumentCaptor.forClass(JSONObject.class); ArgumentCaptor<JSONObject> argPayload = ArgumentCaptor.forClass(JSONObject.class); Mockito.verify(socket).sendMessage(argPacket.capture(), argPayload.capture()); Mockito.verify(listener).onSuccess(null); JSONObject packet = argPacket.getValue(); JSONObject payload = argPayload.getValue(); Assert.assertNull(payload); Assert.assertTrue(packet.has("payload")); Assert.assertEquals("playNext", packet.getJSONObject("payload") .getJSONObject("mediaCommand").getString("type")); Assert.assertEquals("connectsdk.mediaCommand", packet.getJSONObject("payload") .getString("contentType")); } @Test public void testJumpToTrack() throws JSONException { ResponseListener<Object> listener = Mockito.mock(ResponseListener.class); session.jumpToTrack(7, listener); Robolectric.runUiThreadTasksIncludingDelayedTasks(); ArgumentCaptor<JSONObject> argPacket = ArgumentCaptor.forClass(JSONObject.class); ArgumentCaptor<JSONObject> argPayload = ArgumentCaptor.forClass(JSONObject.class); Mockito.verify(socket).sendMessage(argPacket.capture(), argPayload.capture()); Mockito.verify(listener).onSuccess(null); JSONObject packet = argPacket.getValue(); JSONObject payload = argPayload.getValue(); Assert.assertNull(payload); Assert.assertTrue(packet.has("payload")); Assert.assertEquals("jumpToTrack", packet.getJSONObject("payload") .getJSONObject("mediaCommand").getString("type")); Assert.assertEquals(7, packet.getJSONObject("payload") .getJSONObject("mediaCommand").getInt("index")); Assert.assertEquals("connectsdk.mediaCommand", packet.getJSONObject("payload") .getString("contentType")); } @Test public void testGetPlaylistControl() { Assert.assertSame(session, session.getPlaylistControl()); } @Test public void testGetPlaylistControlCapability() { Assert.assertEquals(CapabilityMethods.CapabilityPriorityLevel.HIGH, session.getPlaylistControlCapabilityLevel()); } @Test public void testSendMessageWithEmptySocketShouldNotCrash() { ServiceDescription description = Mockito.mock(ServiceDescription.class); Mockito.when(description.getIpAddress()).thenReturn("127.0.0.1"); Mockito.when(service.getServiceDescription()).thenReturn(description); ServiceConfig config = Mockito.mock(WebOSTVServiceConfig.class); Mockito.when(service.getServiceConfig()).thenReturn(config); session.socket = null; session.setConnected(true); ResponseListener<Object> listener = Mockito.mock(ResponseListener.class); try { session.sendMessage("message", listener); } catch (RuntimeException e) { Assert.fail("sendMessage should not throw an exception"); } } @Test public void testPlayMediaDeprecatedWithRequiredParameters() throws JSONException { MediaPlayer.LaunchListener listener = Mockito.mock(MediaPlayer.LaunchListener.class); final boolean shouldLoop = true; final MediaInfo mediaInfo = new MediaInfo.Builder("url", "type").build(); session.playMedia(mediaInfo.getUrl(), mediaInfo.getMimeType(), null, null, null, shouldLoop, listener); verifyPlayMedia(shouldLoop, null, mediaInfo); } @Test(expected = NullPointerException.class) public void testPlayMediaWithNullParametersShouldThrowException() throws JSONException { MediaPlayer.LaunchListener listener = Mockito.mock(MediaPlayer.LaunchListener.class); final boolean shouldLoop = true; session.playMedia(null, shouldLoop, listener); } @Test public void testPlayMediaWithRequiredParameters() throws JSONException { MediaPlayer.LaunchListener listener = Mockito.mock(MediaPlayer.LaunchListener.class); final boolean shouldLoop = true; final MediaInfo mediaInfo = new MediaInfo.Builder("url", "type").build(); session.playMedia(mediaInfo, shouldLoop, listener); verifyPlayMedia(shouldLoop, null, mediaInfo); } @Test public void testPlayMediaWithSubtitles() throws JSONException { MediaPlayer.LaunchListener listener = Mockito.mock(MediaPlayer.LaunchListener.class); final boolean shouldLoop = true; final SubtitleInfo subtitleInfo = new SubtitleInfo.Builder("subtitleurl") .setLabel("label") .setLanguage("en") .setMimeType("subtitletype") .build(); final MediaInfo mediaInfo = new MediaInfo.Builder("url", "type") .setIcon("icon") .setTitle("title") .setDescription("description") .setSubtitleInfo(subtitleInfo) .build(); session.playMedia(mediaInfo, shouldLoop, listener); verifyPlayMedia(shouldLoop, subtitleInfo, mediaInfo); } private void verifyPlayMedia(boolean shouldLoop, SubtitleInfo subtitleInfo, MediaInfo mediaInfo) throws JSONException { ArgumentCaptor<JSONObject> argPacket = ArgumentCaptor.forClass(JSONObject.class); Mockito.verify(socket).sendMessage(argPacket.capture(), Mockito.isNull(JSONObject.class)); JSONObject capturedPacket = argPacket.getValue(); JSONObject expectedPacket = getPlayMediaExpectedRequest(shouldLoop, subtitleInfo, mediaInfo); Assert.assertEquals(expectedPacket.toString(), capturedPacket.toString()); } @NonNull private JSONObject getPlayMediaExpectedRequest(final boolean shouldLoop, final SubtitleInfo subtitleInfo, final MediaInfo mediaInfo) throws JSONException { return new JSONObject() {{ put("type", "p2p"); put("to", "com.webos.app.webapphost.MediaPlayer"); put("payload", new JSONObject() {{ putOpt("contentType", "connectsdk.mediaCommand"); putOpt("mediaCommand", new JSONObject() {{ putOpt("type", "playMedia"); putOpt("mediaURL", mediaInfo.getUrl()); if (mediaInfo.getImages() != null) { putOpt("iconURL", mediaInfo.getImages().get(0).getUrl()); } putOpt("title", mediaInfo.getTitle()); putOpt("description", mediaInfo.getDescription()); putOpt("mimeType", mediaInfo.getMimeType()); putOpt("shouldLoop", shouldLoop); put("requestId", "req1"); if (subtitleInfo != null) { putOpt("subtitles", new JSONObject() {{ putOpt("default", "1"); putOpt("enabled", "1"); putOpt("tracks", new JSONArray() {{ put(new JSONObject() {{ putOpt("id", "1"); putOpt("language", subtitleInfo.getLanguage()); putOpt("source", subtitleInfo.getUrl()); putOpt("label", subtitleInfo.getLabel()); }}); }}); }}); } }}); }}); }}; } }
package com.vc.util.testclient; import com.vc.model.Message; import com.vivialconnect.VivialConnectManager; import org.json.simple.parser.ParseException; import java.util.ArrayList; import java.util.List; /** * Created by cparish on 4/17/2017. */ public class VCMessageSender implements Runnable { private String fromNumber; private List<String> toNumbers; private long minMessageSleepTimeMs; private long maxMessageSleepTimeMs; private int totalMessages; private List<VCThreadedMessageResult> results = null; private String[] randomBaseMsgs; private String publicKey = ""; private String secretKey = ""; private String accountId = ""; public String info() { StringBuffer buffer = new StringBuffer(); buffer.append("fromNumber: " + fromNumber); buffer.append("toNumbers Count : " + toNumbers.size() ); buffer.append("toNumbers: " + toNumbers ); buffer.append("total messages: " + totalMessages ); return buffer.toString(); } public VCMessageSender(String fromNumber, List<String> toNumbers, long minMessageSleepTimeMs, int maxMessageSleepTimeMs, int totalMessages, List<VCThreadedMessageResult> results, String[] randomBaseMsgs, String publicKey, String secretKey, String accountId) { String[] toNumberArr = new String[toNumbers.size()]; for (int i=0;i<toNumbers.size();i++) { toNumberArr[i]= toNumbers.get(i); } this.fromNumber = fromNumber; this.toNumbers = getToNumberList(fromNumber, toNumberArr); this.minMessageSleepTimeMs = minMessageSleepTimeMs; this.maxMessageSleepTimeMs = maxMessageSleepTimeMs; this.totalMessages = totalMessages; this.results = results; this.publicKey = publicKey; this.secretKey = secretKey; this.accountId = accountId; this.randomBaseMsgs = randomBaseMsgs; } /* public VCMessageSender(String fromNumber, String[] toNumbers, long minMessageSleepTimeMs, int maxMessageSleepTimeMs, int totalMessages, List<VCThreadedMessageResult> results) { this.fromNumber = fromNumber; this.toNumbers = getToNumberList(fromNumber, toNumbers); this.minMessageSleepTimeMs = minMessageSleepTimeMs; this.maxMessageSleepTimeMs = maxMessageSleepTimeMs; this.totalMessages = totalMessages; this.results = results; } */ private List<String> getToNumberList(String fromNumber, String[] toNumbers){ List<String> phoneNumbers = new ArrayList<String>(); String cFromNumber = cleanNumber(fromNumber); for (int i=0; i<toNumbers.length; i++) { String tNumber = cleanNumber(toNumbers[i]); if (!cFromNumber.equals(tNumber)) { phoneNumbers.add(tNumber); } } return phoneNumbers; } public static String cleanNumber(String phoneNumber) { String newNumber = ""; for(int i=0; i<phoneNumber.length(); i++ ) { char c = phoneNumber.charAt(i); if (Character.isDigit(c)) { if (newNumber.length() == 0 && c=='1') continue; newNumber += c; } } return "+1" + newNumber; } private String generateRandomMessage(int msgCounter ) { int j = getRandomIndex(randomBaseMsgs.length); StringBuffer buffer = new StringBuffer(); buffer.append("msg-").append(msgCounter).append("-").append(randomBaseMsgs[j]); return buffer.toString(); } private String sendMessage(String toNumber, String fromNumber, String body) { boolean debug = true; if(debug) { return sendMockMessage(toNumber, fromNumber, body); } else { return sendRealMessage(toNumber, fromNumber, body); } } private String sendMockMessage(String toNumber, String fromNumber, String body) { int min = 200; int max = 800; long sleepTime = getRandomNumber(200, 800); try { Thread.sleep(sleepTime); } catch (InterruptedException ex) { ex.printStackTrace(); } return "123"; } private String sendRealMessage(String toNumber, String fromNumber, String body) { VivialConnectManager vivialConnectManager = new VivialConnectManager(secretKey, publicKey, accountId); try { Message message = vivialConnectManager.sendSMSMessage(body, toNumber, fromNumber); return "" + message.getId(); } catch (ParseException e) { e.printStackTrace(); return e.toString(); } } public void run() { int msgCounter = 1; while (msgCounter <= totalMessages) { int j = msgCounter % toNumbers.size(); String toNumber = toNumbers.get(j); String messageBody = generateRandomMessage(msgCounter); long start = System.currentTimeMillis(); String messageId = sendMessage(toNumber, fromNumber, messageBody); long endTime = System.currentTimeMillis(); long callTime = endTime - start; long sleepTime = getRandomNumber((int) minMessageSleepTimeMs, (int) maxMessageSleepTimeMs); long diff = sleepTime - callTime; if (diff > 0) { try { Thread.currentThread().sleep(diff); } catch (InterruptedException e) { e.printStackTrace(); } } StringBuffer logBuffer = new StringBuffer(); logBuffer.append("Thread: ").append(Thread.currentThread().getName()); logBuffer.append("counterId: ").append(msgCounter); logBuffer.append("fromNumber: ").append(fromNumber); logBuffer.append(" toNumber: ").append(toNumber); logBuffer.append(" sendTime: ").append(callTime); logBuffer.append(" sleepTime: ").append(sleepTime); logBuffer.append(" smsMsg: ").append(messageBody); System.out.println(logBuffer.toString()); VCThreadedMessageResult vcThreadedMessageResult = null; vcThreadedMessageResult = new VCThreadedMessageResult(start, endTime, callTime, Thread.currentThread().getName(), toNumber, fromNumber, sleepTime, messageBody, messageId, msgCounter); if (results != null) { results.add(vcThreadedMessageResult); } msgCounter++; } } private static int getRandomNumber(int min, int max) { int range = max - min + 1; int random = (int)(Math.random() * range) + min; return random; } private static int getRandomIndex(int length) { //int range = length; return getRandomNumber(0, (length-1)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package gobblin.writer; import java.io.Closeable; import java.io.IOException; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nullable; import gobblin.annotation.Alpha; /** * An async data writer which can achieve the buffering and batching capability. * Internally it uses {@link BatchAccumulator} to accumulate input records. The records * will be converted to batches according to the accumulator implementation. The {@link RecordProcessor} * is responsible to iterate all available batches and write each batch via a {@link BatchAsyncDataWriter} * * @param <D> data record type */ @Alpha public abstract class BufferedAsyncDataWriter<D> implements AsyncDataWriter<D> { private RecordProcessor<D> processor; private BatchAccumulator<D> accumulator; private ExecutorService service; private volatile boolean running; private final long startTime; private static final Logger LOG = LoggerFactory.getLogger(BufferedAsyncDataWriter.class); private static final WriteResponseMapper<RecordMetadata> WRITE_RESPONSE_WRAPPER = new WriteResponseMapper<RecordMetadata>() { @Override public WriteResponse wrap(final RecordMetadata recordMetadata) { return new WriteResponse<RecordMetadata>() { @Override public RecordMetadata getRawResponse() { return recordMetadata; } @Override public String getStringResponse() { return recordMetadata.toString(); } @Override public long bytesWritten() { // Don't know how many bytes were written return -1; } }; } }; public BufferedAsyncDataWriter (BatchAccumulator<D> accumulator, BatchAsyncDataWriter<D> dataWriter) { this.processor = new RecordProcessor (accumulator, dataWriter); this.accumulator = accumulator; this.service = Executors.newFixedThreadPool(1); this.running = true; this.startTime = System.currentTimeMillis(); try { this.service.execute(this.processor); this.service.shutdown(); } catch (Exception e) { LOG.error("Cannot start internal thread to consume the data"); } } private class RecordProcessor<D> implements Runnable, Closeable{ BatchAccumulator<D> accumulator; BatchAsyncDataWriter<D> writer; public void close() throws IOException { this.writer.close(); } public RecordProcessor (BatchAccumulator<D> accumulator, BatchAsyncDataWriter<D> writer) { this.accumulator = accumulator; this.writer = writer; } public void run() { LOG.info ("Start iterating accumulator"); /** * A main loop to process available batches */ while (running) { Batch<D> batch = this.accumulator.getNextAvailableBatch(); if (batch != null) { this.writer.write(batch, this.createBatchCallback(batch)); } } // Wait until all the ongoing appends finished accumulator.waitClose(); LOG.info ("Start to process remaining batches"); /** * A main loop to process remaining batches */ Batch<D> batch; while ((batch = this.accumulator.getNextAvailableBatch()) != null) { this.writer.write(batch, this.createBatchCallback(batch)); } // Wait until all the batches get acknowledged accumulator.flush(); } /** * A callback which handles the post-processing logic after a batch has sent out and * receives the result */ private WriteCallback createBatchCallback (final Batch<D> batch) { return new WriteCallback<Object>() { @Override public void onSuccess(WriteResponse writeResponse) { LOG.info ("Batch " + batch.getId() + " is on success with size " + batch.getCurrentSizeInByte() + " num of record " + batch.getRecords().size()); batch.onSuccess(writeResponse); batch.done(); accumulator.deallocate(batch); } @Override public void onFailure(Throwable throwable) { LOG.info ("Batch " + batch.getId() + " is on failure"); batch.onFailure(throwable); batch.done(); accumulator.deallocate(batch); } }; } } /** * Asynchronously write a record, execute the callback on success/failure */ public Future<WriteResponse> write(D record, @Nullable WriteCallback callback) { try { Future<RecordMetadata> future = this.accumulator.append(record, callback); return new WriteResponseFuture (future, WRITE_RESPONSE_WRAPPER); } catch (InterruptedException e) { throw new RuntimeException(e); } } /** * Flushes all pending writes */ public void flush() throws IOException { this.accumulator.flush(); } /** * Force to close all the resources and drop all the pending requests */ public void forceClose() { LOG.info ("Force to close the buffer data writer (not supported)"); } /** * Close all the resources, this will be blocked until all the request are sent and gets acknowledged */ public void close() throws IOException { try { this.running = false; this.accumulator.close(); if (!this.service.awaitTermination(60, TimeUnit.SECONDS)) { forceClose(); } else { LOG.info ("Closed properly: elapsed " + (System.currentTimeMillis() - startTime) + " milliseconds"); } } catch (InterruptedException e) { LOG.error ("Interruption happened during close " + e.toString()); } finally { this.processor.close(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package gobblin.publisher; import java.io.IOException; import java.sql.Connection; import java.sql.SQLException; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import javax.sql.DataSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import gobblin.configuration.ConfigurationKeys; import gobblin.configuration.State; import gobblin.configuration.WorkUnitState; import gobblin.source.extractor.JobCommitPolicy; import gobblin.util.ForkOperatorUtils; import gobblin.util.jdbc.DataSourceBuilder; import gobblin.writer.commands.JdbcWriterCommands; import gobblin.writer.commands.JdbcWriterCommandsFactory; /** * Publishes data into JDBC RDBMS. Expects all the data has been already in staging table. */ public class JdbcPublisher extends DataPublisher { public static final String JDBC_PUBLISHER_PREFIX = "jdbc.publisher."; public static final String JDBC_PUBLISHER_DATABASE_NAME = JDBC_PUBLISHER_PREFIX + "database_name"; public static final String JDBC_PUBLISHER_FINAL_TABLE_NAME = JDBC_PUBLISHER_PREFIX + "table_name"; public static final String JDBC_PUBLISHER_REPLACE_FINAL_TABLE = JDBC_PUBLISHER_PREFIX + "replace_table"; public static final String JDBC_PUBLISHER_USERNAME = JDBC_PUBLISHER_PREFIX + "username"; public static final String JDBC_PUBLISHER_PASSWORD = JDBC_PUBLISHER_PREFIX + "password"; public static final String JDBC_PUBLISHER_ENCRYPTION_KEY_LOC = JDBC_PUBLISHER_PREFIX + "encrypt_key_loc"; public static final String JDBC_PUBLISHER_URL = JDBC_PUBLISHER_PREFIX + "url"; public static final String JDBC_PUBLISHER_TIMEOUT = JDBC_PUBLISHER_PREFIX + "timeout"; public static final String JDBC_PUBLISHER_DRIVER = JDBC_PUBLISHER_PREFIX + "driver"; private static final Logger LOG = LoggerFactory.getLogger(JdbcPublisher.class); private final JdbcWriterCommandsFactory jdbcWriterCommandsFactory; /** * Expects all data is in staging table ready to be published. To validate this, it checks COMMIT_ON_FULL_SUCCESS and PUBLISH_DATA_AT_JOB_LEVEL * @param state * @param jdbcWriterCommandsFactory * @param conn */ @VisibleForTesting public JdbcPublisher(State state, JdbcWriterCommandsFactory jdbcWriterCommandsFactory) { super(state); this.jdbcWriterCommandsFactory = jdbcWriterCommandsFactory; validate(getState()); } public JdbcPublisher(State state) { this(state, new JdbcWriterCommandsFactory()); validate(getState()); } /** * @param state * @throws IllegalArgumentException If job commit policy is not COMMIT_ON_FULL_SUCCESS or is not on PUBLISH_DATA_AT_JOB_LEVEL */ private void validate(State state) { JobCommitPolicy jobCommitPolicy = JobCommitPolicy.getCommitPolicy(this.getState().getProperties()); if (JobCommitPolicy.COMMIT_ON_FULL_SUCCESS != jobCommitPolicy) { throw new IllegalArgumentException(this.getClass().getSimpleName() + " won't publish as already commited by task. Job commit policy " + jobCommitPolicy); } if (!state.getPropAsBoolean(ConfigurationKeys.PUBLISH_DATA_AT_JOB_LEVEL, ConfigurationKeys.DEFAULT_PUBLISH_DATA_AT_JOB_LEVEL)) { throw new IllegalArgumentException(this.getClass().getSimpleName() + " won't publish as " + ConfigurationKeys.PUBLISH_DATA_AT_JOB_LEVEL + " is set as false"); } } @VisibleForTesting public Connection createConnection() { DataSource dataSource = DataSourceBuilder.builder().url(this.state.getProp(JDBC_PUBLISHER_URL)) .driver(this.state.getProp(JDBC_PUBLISHER_DRIVER)).userName(this.state.getProp(JDBC_PUBLISHER_USERNAME)) .passWord(this.state.getProp(JDBC_PUBLISHER_PASSWORD)) .cryptoKeyLocation(this.state.getProp(JDBC_PUBLISHER_ENCRYPTION_KEY_LOC)).maxActiveConnections(1) .maxIdleConnections(1).state(this.state).build(); try { return dataSource.getConnection(); } catch (SQLException e) { throw new RuntimeException(e); } } @Override public void close() throws IOException {} @Override public void initialize() throws IOException {} /** * 1. Truncate destination table if requested * 2. Move data from staging to destination * 3. Update Workunit state * * TODO: Research on running this in parallel. While testing publishing it in parallel, it turns out delete all from the table locks the table * so that copying table threads wait until transaction lock times out and throwing exception(MySQL). Is there a way to avoid this? * * {@inheritDoc} * @see gobblin.publisher.DataPublisher#publishData(java.util.Collection) */ @Override public void publishData(Collection<? extends WorkUnitState> states) throws IOException { LOG.info("Start publishing data"); int branches = this.state.getPropAsInt(ConfigurationKeys.FORK_BRANCHES_KEY, 1); Set<String> emptiedDestTables = Sets.newHashSet(); final Connection conn = createConnection(); final JdbcWriterCommands commands = this.jdbcWriterCommandsFactory.newInstance(this.state, conn); try { conn.setAutoCommit(false); for (int i = 0; i < branches; i++) { final String destinationTable = this.state .getProp(ForkOperatorUtils.getPropertyNameForBranch(JDBC_PUBLISHER_FINAL_TABLE_NAME, branches, i)); final String databaseName = this.state.getProp(ForkOperatorUtils.getPropertyNameForBranch(JDBC_PUBLISHER_DATABASE_NAME, branches, i)); Preconditions.checkNotNull(destinationTable); if (this.state.getPropAsBoolean( ForkOperatorUtils.getPropertyNameForBranch(JDBC_PUBLISHER_REPLACE_FINAL_TABLE, branches, i), false) && !emptiedDestTables.contains(destinationTable)) { LOG.info("Deleting table " + destinationTable); commands.deleteAll(databaseName, destinationTable); emptiedDestTables.add(destinationTable); } Map<String, List<WorkUnitState>> stagingTables = getStagingTables(states, branches, i); for (Map.Entry<String, List<WorkUnitState>> entry : stagingTables.entrySet()) { String stagingTable = entry.getKey(); LOG.info("Copying data from staging table " + stagingTable + " into destination table " + destinationTable); commands.copyTable(databaseName, stagingTable, destinationTable); for (WorkUnitState workUnitState : entry.getValue()) { workUnitState.setWorkingState(WorkUnitState.WorkingState.COMMITTED); } } } LOG.info("Commit publish data"); conn.commit(); } catch (Exception e) { try { LOG.error("Failed publishing. Rolling back."); conn.rollback(); } catch (SQLException se) { LOG.error("Failed rolling back.", se); } throw new RuntimeException("Failed publishing", e); } finally { try { conn.close(); } catch (SQLException e) { throw new RuntimeException(e); } } } private static Map<String, List<WorkUnitState>> getStagingTables(Collection<? extends WorkUnitState> states, int branches, int i) { Map<String, List<WorkUnitState>> stagingTables = Maps.newHashMap(); for (WorkUnitState workUnitState : states) { String stagingTableKey = ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_STAGING_TABLE, branches, i); String stagingTable = Preconditions.checkNotNull(workUnitState.getProp(stagingTableKey)); List<WorkUnitState> existing = stagingTables.get(stagingTable); if (existing == null) { existing = Lists.newArrayList(); stagingTables.put(stagingTable, existing); } existing.add(workUnitState); } return stagingTables; } @Override public void publishMetadata(Collection<? extends WorkUnitState> states) throws IOException {} }
package org.andengine.opengl.texture.atlas.bitmap; import java.io.IOException; import org.andengine.opengl.texture.atlas.bitmap.source.AssetBitmapTextureAtlasSource; import org.andengine.opengl.texture.atlas.bitmap.source.IBitmapTextureAtlasSource; import org.andengine.opengl.texture.atlas.bitmap.source.ResourceBitmapTextureAtlasSource; import org.andengine.opengl.texture.atlas.buildable.BuildableTextureAtlasTextureRegionFactory; import org.andengine.opengl.texture.region.TextureRegion; import org.andengine.opengl.texture.region.TextureRegionFactory; import org.andengine.opengl.texture.region.TiledTextureRegion; import org.andengine.util.exception.AndEngineRuntimeException; import android.content.Context; import android.content.res.AssetManager; import android.content.res.Resources; /** * (c) 2010 Nicolas Gramlich * (c) 2011 Zynga Inc. * * @author Nicolas Gramlich * @since 18:15:14 - 09.03.2010 */ public final class BitmapTextureAtlasTextureRegionFactory { // =========================================================== // Constants // =========================================================== // =========================================================== // Fields // =========================================================== private static String sAssetBasePath = ""; // =========================================================== // Constructors // =========================================================== private BitmapTextureAtlasTextureRegionFactory() { } // =========================================================== // Getter & Setter // =========================================================== /** * @param pAssetBasePath must end with '<code>/</code>' or have <code>.length() == 0</code>. */ public static void setAssetBasePath(final String pAssetBasePath) { if (pAssetBasePath.endsWith("/") || pAssetBasePath.length() == 0) { BitmapTextureAtlasTextureRegionFactory.sAssetBasePath = pAssetBasePath; } else { throw new IllegalArgumentException("pAssetBasePath must end with '/' or be length zero."); } } public static String getAssetBasePath() { return BitmapTextureAtlasTextureRegionFactory.sAssetBasePath; } public static void reset() { BitmapTextureAtlasTextureRegionFactory.setAssetBasePath(""); } // =========================================================== // Methods for/from SuperClass/Interfaces // =========================================================== // =========================================================== // Methods // =========================================================== // =========================================================== // Methods using BitmapTexture // =========================================================== public static TextureRegion createFromAsset(final BitmapTextureAtlas pBitmapTextureAtlas, final Context pContext, final String pAssetPath, final int pTextureX, final int pTextureY) { return BitmapTextureAtlasTextureRegionFactory.createFromAsset(pBitmapTextureAtlas, pContext.getAssets(), pAssetPath, pTextureX, pTextureY); } public static TextureRegion createFromAsset(final BitmapTextureAtlas pBitmapTextureAtlas, final AssetManager pAssetManager, final String pAssetPath, final int pTextureX, final int pTextureY) { final IBitmapTextureAtlasSource bitmapTextureAtlasSource = AssetBitmapTextureAtlasSource.create(pAssetManager, BitmapTextureAtlasTextureRegionFactory.sAssetBasePath + pAssetPath); return BitmapTextureAtlasTextureRegionFactory.createFromSource(pBitmapTextureAtlas, bitmapTextureAtlasSource, pTextureX, pTextureY); } public static TiledTextureRegion createTiledFromAsset(final BitmapTextureAtlas pBitmapTextureAtlas, final Context pContext, final String pAssetPath, final int pTextureX, final int pTextureY, final int pTileColumns, final int pTileRows) { return BitmapTextureAtlasTextureRegionFactory.createTiledFromAsset(pBitmapTextureAtlas, pContext.getAssets(), pAssetPath, pTextureX, pTextureY, pTileColumns, pTileRows); } public static TiledTextureRegion createTiledFromAsset(final BitmapTextureAtlas pBitmapTextureAtlas, final AssetManager pAssetManager, final String pAssetPath, final int pTextureX, final int pTextureY, final int pTileColumns, final int pTileRows) { final IBitmapTextureAtlasSource bitmapTextureAtlasSource = AssetBitmapTextureAtlasSource.create(pAssetManager, BitmapTextureAtlasTextureRegionFactory.sAssetBasePath + pAssetPath); return BitmapTextureAtlasTextureRegionFactory.createTiledFromSource(pBitmapTextureAtlas, bitmapTextureAtlasSource, pTextureX, pTextureY, pTileColumns, pTileRows); } public static TextureRegion createFromResource(final BitmapTextureAtlas pBitmapTextureAtlas, final Context pContext, final int pDrawableResourceID, final int pTextureX, final int pTextureY) { return BitmapTextureAtlasTextureRegionFactory.createFromResource(pBitmapTextureAtlas, pContext.getResources(), pDrawableResourceID, pTextureX, pTextureY); } public static TextureRegion createFromResource(final BitmapTextureAtlas pBitmapTextureAtlas, final Resources pResources, final int pDrawableResourceID, final int pTextureX, final int pTextureY) { final IBitmapTextureAtlasSource bitmapTextureAtlasSource = ResourceBitmapTextureAtlasSource.create(pResources, pDrawableResourceID); return BitmapTextureAtlasTextureRegionFactory.createFromSource(pBitmapTextureAtlas, bitmapTextureAtlasSource, pTextureX, pTextureY); } public static TiledTextureRegion createTiledFromResource(final BitmapTextureAtlas pBitmapTextureAtlas, final Context pContext, final int pDrawableResourceID, final int pTextureX, final int pTextureY, final int pTileColumns, final int pTileRows) { return BitmapTextureAtlasTextureRegionFactory.createTiledFromResource(pBitmapTextureAtlas, pContext.getResources(), pDrawableResourceID, pTextureX, pTextureY, pTileColumns, pTileRows); } public static TiledTextureRegion createTiledFromResource(final BitmapTextureAtlas pBitmapTextureAtlas, final Resources pResources, final int pDrawableResourceID, final int pTextureX, final int pTextureY, final int pTileColumns, final int pTileRows) { final IBitmapTextureAtlasSource bitmapTextureAtlasSource = ResourceBitmapTextureAtlasSource.create(pResources, pDrawableResourceID); return BitmapTextureAtlasTextureRegionFactory.createTiledFromSource(pBitmapTextureAtlas, bitmapTextureAtlasSource, pTextureX, pTextureY, pTileColumns, pTileRows); } public static TextureRegion createFromSource(final BitmapTextureAtlas pBitmapTextureAtlas, final IBitmapTextureAtlasSource pBitmapTextureAtlasSource, final int pTextureX, final int pTextureY) { return TextureRegionFactory.createFromSource(pBitmapTextureAtlas, pBitmapTextureAtlasSource, pTextureX, pTextureY); } public static TiledTextureRegion createTiledFromSource(final BitmapTextureAtlas pBitmapTextureAtlas, final IBitmapTextureAtlasSource pBitmapTextureAtlasSource, final int pTextureX, final int pTextureY, final int pTileColumns, final int pTileRows) { return TextureRegionFactory.createTiledFromSource(pBitmapTextureAtlas, pBitmapTextureAtlasSource, pTextureX, pTextureY, pTileColumns, pTileRows); } // =========================================================== // Methods using BuildableTexture // =========================================================== public static TextureRegion createFromAsset(final BuildableBitmapTextureAtlas pBuildableBitmapTextureAtlas, final Context pContext, final String pAssetPath) { return BitmapTextureAtlasTextureRegionFactory.createFromAsset(pBuildableBitmapTextureAtlas, pContext.getAssets(), pAssetPath); } public static TextureRegion createFromAsset(final BuildableBitmapTextureAtlas pBuildableBitmapTextureAtlas, final AssetManager pAssetManager, final String pAssetPath) { return BitmapTextureAtlasTextureRegionFactory.createFromAsset(pBuildableBitmapTextureAtlas, pAssetManager, pAssetPath, false); } public static TextureRegion createFromAsset(final BuildableBitmapTextureAtlas pBuildableBitmapTextureAtlas, final Context pContext, final String pAssetPath, final boolean pRotated) { return BitmapTextureAtlasTextureRegionFactory.createFromAsset(pBuildableBitmapTextureAtlas, pContext.getAssets(), pAssetPath, pRotated); } public static TextureRegion createFromAsset(final BuildableBitmapTextureAtlas pBuildableBitmapTextureAtlas, final AssetManager pAssetManager, final String pAssetPath, final boolean pRotated) { final IBitmapTextureAtlasSource bitmapTextureAtlasSource = AssetBitmapTextureAtlasSource.create(pAssetManager, BitmapTextureAtlasTextureRegionFactory.sAssetBasePath + pAssetPath); return BitmapTextureAtlasTextureRegionFactory.createFromSource(pBuildableBitmapTextureAtlas, bitmapTextureAtlasSource, pRotated); } public static TiledTextureRegion createTiledFromAsset(final BuildableBitmapTextureAtlas pBuildableBitmapTextureAtlas, final Context pContext, final String pAssetPath, final int pTileColumns, final int pTileRows) { return BitmapTextureAtlasTextureRegionFactory.createTiledFromAsset(pBuildableBitmapTextureAtlas, pContext.getAssets(), pAssetPath, pTileColumns, pTileRows); } public static TiledTextureRegion createTiledFromAsset(final BuildableBitmapTextureAtlas pBuildableBitmapTextureAtlas, final AssetManager pAssetManager, final String pAssetPath, final int pTileColumns, final int pTileRows) { final IBitmapTextureAtlasSource bitmapTextureAtlasSource = AssetBitmapTextureAtlasSource.create(pAssetManager, BitmapTextureAtlasTextureRegionFactory.sAssetBasePath + pAssetPath); return BitmapTextureAtlasTextureRegionFactory.createTiledFromSource(pBuildableBitmapTextureAtlas, bitmapTextureAtlasSource, pTileColumns, pTileRows); } public static TextureRegion createFromResource(final BuildableBitmapTextureAtlas pBuildableBitmapTextureAtlas, final Context pContext, final int pDrawableResourceID) { return BitmapTextureAtlasTextureRegionFactory.createFromResource(pBuildableBitmapTextureAtlas, pContext.getResources(), pDrawableResourceID); } public static TextureRegion createFromResource(final BuildableBitmapTextureAtlas pBuildableBitmapTextureAtlas, final Resources pResources, final int pDrawableResourceID) { return BitmapTextureAtlasTextureRegionFactory.createFromResource(pBuildableBitmapTextureAtlas, pResources, pDrawableResourceID, false); } public static TextureRegion createFromResource(final BuildableBitmapTextureAtlas pBuildableBitmapTextureAtlas, final Context pContext, final int pDrawableResourceID, final boolean pRotated) { return BitmapTextureAtlasTextureRegionFactory.createFromResource(pBuildableBitmapTextureAtlas, pContext.getResources(), pDrawableResourceID, pRotated); } public static TextureRegion createFromResource(final BuildableBitmapTextureAtlas pBuildableBitmapTextureAtlas, final Resources pResources, final int pDrawableResourceID, final boolean pRotated) { final IBitmapTextureAtlasSource bitmapTextureAtlasSource = ResourceBitmapTextureAtlasSource.create(pResources, pDrawableResourceID); return BitmapTextureAtlasTextureRegionFactory.createFromSource(pBuildableBitmapTextureAtlas, bitmapTextureAtlasSource, pRotated); } public static TiledTextureRegion createTiledFromResource(final BuildableBitmapTextureAtlas pBuildableBitmapTextureAtlas, final Context pContext, final int pDrawableResourceID, final int pTileColumns, final int pTileRows) { return BitmapTextureAtlasTextureRegionFactory.createTiledFromResource(pBuildableBitmapTextureAtlas, pContext.getResources(), pDrawableResourceID, pTileColumns, pTileRows); } public static TiledTextureRegion createTiledFromResource(final BuildableBitmapTextureAtlas pBuildableBitmapTextureAtlas, final Resources pResources, final int pDrawableResourceID, final int pTileColumns, final int pTileRows) { final IBitmapTextureAtlasSource bitmapTextureAtlasSource = ResourceBitmapTextureAtlasSource.create(pResources, pDrawableResourceID); return BitmapTextureAtlasTextureRegionFactory.createTiledFromSource(pBuildableBitmapTextureAtlas, bitmapTextureAtlasSource, pTileColumns, pTileRows); } public static TextureRegion createFromSource(final BuildableBitmapTextureAtlas pBuildableBitmapTextureAtlas, final IBitmapTextureAtlasSource pBitmapTextureAtlasSource) { return BitmapTextureAtlasTextureRegionFactory.createFromSource(pBuildableBitmapTextureAtlas, pBitmapTextureAtlasSource, false); } public static TextureRegion createFromSource(final BuildableBitmapTextureAtlas pBuildableBitmapTextureAtlas, final IBitmapTextureAtlasSource pBitmapTextureAtlasSource, final boolean pRotated) { return BuildableTextureAtlasTextureRegionFactory.createFromSource(pBuildableBitmapTextureAtlas, pBitmapTextureAtlasSource, pRotated); } public static TiledTextureRegion createTiledFromSource(final BuildableBitmapTextureAtlas pBuildableBitmapTextureAtlas, final IBitmapTextureAtlasSource pBitmapTextureAtlasSource, final int pTileColumns, final int pTileRows) { return BuildableTextureAtlasTextureRegionFactory.createTiledFromSource(pBuildableBitmapTextureAtlas, pBitmapTextureAtlasSource, pTileColumns, pTileRows); } /** * Loads all files from a given assets directory (in alphabetical order) as consecutive tiles of an {@link TiledTextureRegion}. * * @param pBuildableBitmapTextureAtlas * @param pAssetManager * @param pAssetSubdirectory to load all files from "gfx/flowers" put "flowers" here (assuming, that you've used {@link BitmapTextureAtlasTextureRegionFactory#setAssetBasePath(String)} with "gfx/" before.) * @return */ public static TiledTextureRegion createTiledFromAssetDirectory(final BuildableBitmapTextureAtlas pBuildableBitmapTextureAtlas, final AssetManager pAssetManager, final String pAssetSubdirectory) { final String[] files; try { files = pAssetManager.list(BitmapTextureAtlasTextureRegionFactory.sAssetBasePath + pAssetSubdirectory); } catch (final IOException e) { throw new AndEngineRuntimeException("Listing assets subdirectory: '" + BitmapTextureAtlasTextureRegionFactory.sAssetBasePath + pAssetSubdirectory + "' failed. Does it exist?", e); } final int fileCount = files.length; final TextureRegion[] textures = new TextureRegion[fileCount]; for (int i = 0; i < fileCount; i++) { final String assetPath = pAssetSubdirectory + "/" + files[i]; textures[i] = BitmapTextureAtlasTextureRegionFactory.createFromAsset(pBuildableBitmapTextureAtlas, pAssetManager, assetPath); } return new TiledTextureRegion(pBuildableBitmapTextureAtlas, textures); } // =========================================================== // Inner and Anonymous Classes // =========================================================== }
/* Copyright 2007-2009 WebDriver committers Copyright 2007-2009 Google Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.openqa.selenium; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.openqa.selenium.Ignore.Driver.CHROME; import static org.openqa.selenium.Ignore.Driver.FIREFOX; import static org.openqa.selenium.Ignore.Driver.IE; import static org.openqa.selenium.Ignore.Driver.IPHONE; import static org.openqa.selenium.Ignore.Driver.SELENESE; import java.io.File; import java.io.IOException; import java.util.List; public class CorrectEventFiringTest extends AbstractDriverTestCase { @Ignore(value = {CHROME, FIREFOX}, reason = "Webkit bug 22261. Firefox 3.6 wants focus") @JavascriptEnabled public void testShouldFireFocusEventWhenClicking() { driver.get(pages.javascriptPage); clickOnElementWhichRecordsEvents(); assertEventFired("focus"); } @JavascriptEnabled public void testShouldFireClickEventWhenClicking() { driver.get(pages.javascriptPage); clickOnElementWhichRecordsEvents(); assertEventFired("click"); } @JavascriptEnabled @Ignore(SELENESE) public void testShouldFireMouseDownEventWhenClicking() { driver.get(pages.javascriptPage); clickOnElementWhichRecordsEvents(); assertEventFired("mousedown"); } @JavascriptEnabled @Ignore(SELENESE) public void testShouldFireMouseUpEventWhenClicking() { driver.get(pages.javascriptPage); clickOnElementWhichRecordsEvents(); assertEventFired("mouseup"); } @JavascriptEnabled @Ignore(value = {SELENESE, CHROME}) public void testShouldFireMouseOverEventWhenClicking() { driver.get(pages.javascriptPage); clickOnElementWhichRecordsEvents(); assertEventFired("mouseover"); } @JavascriptEnabled @Ignore({SELENESE, CHROME, FIREFOX}) public void testShouldFireMouseMoveEventWhenClicking() { driver.get(pages.javascriptPage); clickOnElementWhichRecordsEvents(); assertEventFired("mousemove"); } @Ignore(value = {CHROME, SELENESE, FIREFOX}, reason = "Webkit bug 22261. Firefox 3.6 wants focus") @JavascriptEnabled public void testShouldFireEventsInTheRightOrder() { driver.get(pages.javascriptPage); clickOnElementWhichRecordsEvents(); String text = driver.findElement(By.id("result")).getText(); int lastIndex = -1; for (String event : new String[]{"mousedown", "focus", "mouseup", "click"}) { int index = text.indexOf(event); assertTrue(event + " did not fire at all", index != -1); assertTrue(event + " did not fire in the correct order", index > lastIndex); } } @JavascriptEnabled @Ignore(SELENESE) public void testsShouldIssueMouseDownEvents() { driver.get(pages.javascriptPage); driver.findElement(By.id("mousedown")).click(); String result = driver.findElement(By.id("result")).getText(); assertThat(result, equalTo("mouse down")); } @JavascriptEnabled public void testShouldIssueClickEvents() { driver.get(pages.javascriptPage); driver.findElement(By.id("mouseclick")).click(); String result = driver.findElement(By.id("result")).getText(); assertThat(result, equalTo("mouse click")); } @JavascriptEnabled @Ignore(SELENESE) public void testShouldIssueMouseUpEvents() { driver.get(pages.javascriptPage); driver.findElement(By.id("mouseup")).click(); String result = driver.findElement(By.id("result")).getText(); assertThat(result, equalTo("mouse up")); } @JavascriptEnabled @Ignore(value = {IPHONE, SELENESE}) public void testMouseEventsShouldBubbleUpToContainingElements() { driver.get(pages.javascriptPage); driver.findElement(By.id("child")).click(); String result = driver.findElement(By.id("result")).getText(); assertThat(result, equalTo("mouse down")); } @JavascriptEnabled @Ignore(value = {IPHONE, SELENESE}) public void testShouldEmitOnChangeEventsWhenSelectingElements() { driver.get(pages.javascriptPage); //Intentionally not looking up the select tag. See selenium r7937 for details. List<WebElement> allOptions = driver.findElements(By.xpath("//select[@id='selector']//option")); String initialTextValue = driver.findElement(By.id("result")).getText(); WebElement foo = allOptions.get(0); WebElement bar = allOptions.get(1); foo.setSelected(); assertThat(driver.findElement(By.id("result")).getText(), equalTo(initialTextValue)); bar.setSelected(); assertThat(driver.findElement(By.id("result")).getText(), equalTo("bar")); } @JavascriptEnabled @Ignore(SELENESE) public void testShouldEmitOnChangeEventsWhenChangingTheStateOfACheckbox() { driver.get(pages.javascriptPage); WebElement checkbox = driver.findElement(By.id("checkbox")); checkbox.setSelected(); assertThat(driver.findElement(By.id("result")).getText(), equalTo("checkbox thing")); } @JavascriptEnabled public void testShouldEmitClickEventWhenClickingOnATextInputElement() { driver.get(pages.javascriptPage); WebElement clicker = driver.findElement(By.id("clickField")); clicker.click(); assertThat(clicker.getValue(), equalTo("Clicked")); } @JavascriptEnabled public void testClearingAnElementShouldCauseTheOnChangeHandlerToFire() { driver.get(pages.javascriptPage); WebElement element = driver.findElement(By.id("clearMe")); element.clear(); WebElement result = driver.findElement(By.id("result")); assertThat(result.getText(), equalTo("Cleared")); } @JavascriptEnabled @Ignore(value = {SELENESE, IPHONE}, reason = "Chrome: Non-native event firing is broken in .\n" + " Selenese: Fails when running in firefox.\n" + " iPhone: sendKeys implementation is incorrect") public void testSendingKeysToAnotherElementShouldCauseTheBlurEventToFire() { if (browserNeedsFocusOnThisOs(driver)) { System.out.println("Skipping this test because browser demands focus"); return; } driver.get(pages.javascriptPage); WebElement element = driver.findElement(By.id("theworks")); element.sendKeys("foo"); WebElement element2 = driver.findElement(By.id("changeable")); element2.sendKeys("bar"); assertEventFired("blur"); } @JavascriptEnabled @Ignore(value = {SELENESE, CHROME, IPHONE}, reason = ": Non-native event firing is broken in Chrome.\n" + " Selenese: Fails when running in firefox.\n" + " iPhone: sendKeys implementation is incorrect") public void testSendingKeysToAnElementShouldCauseTheFocusEventToFire() { if (browserNeedsFocusOnThisOs(driver)) { System.out.println("Skipping this test because browser demands focus"); return; } driver.get(pages.javascriptPage); WebElement element = driver.findElement(By.id("theworks")); element.sendKeys("foo"); assertEventFired("focus"); } @JavascriptEnabled @Ignore(SELENESE) public void testSendingKeysToAFocusedElementShouldNotBlurThatElement() { if (browserNeedsFocusOnThisOs(driver)) { System.out.println("Skipping this test because browser demands focus"); return; } driver.get(pages.javascriptPage); WebElement element = driver.findElement(By.id("theworks")); element.click(); //Wait until focused boolean focused = false; WebElement result = driver.findElement(By.id("result")); for (int i = 0; i < 5; ++i) { String fired = result.getText(); if (fired.contains("focus")) { focused = true; break; } try { Thread.sleep(200); } catch (InterruptedException e) { } } if (!focused) { fail("Clicking on element didn't focus it in time - can't proceed so failing"); } element.sendKeys("a"); assertEventNotFired("blur"); } @JavascriptEnabled @Ignore({IE, SELENESE}) public void testSubmittingFormFromFormElementShouldFireOnSubmitForThatForm() { driver.get(pages.javascriptPage); WebElement formElement = driver.findElement(By.id("submitListeningForm")); formElement.submit(); assertEventFired("form-onsubmit"); } @JavascriptEnabled @Ignore({IE, SELENESE}) public void testSubmittingFormFromFormInputSubmitElementShouldFireOnSubmitForThatForm() { driver.get(pages.javascriptPage); WebElement submit = driver.findElement(By.id("submitListeningForm-submit")); submit.submit(); assertEventFired("form-onsubmit"); } @JavascriptEnabled @Ignore({IE, SELENESE}) public void testSubmittingFormFromFormInputTextElementShouldFireOnSubmitForThatFormAndNotClickOnThatInput() { driver.get(pages.javascriptPage); WebElement submit = driver.findElement(By.id("submitListeningForm-submit")); submit.submit(); assertEventFired("form-onsubmit"); assertEventNotFired("text-onclick"); } @JavascriptEnabled @Ignore(value = {CHROME, SELENESE, IPHONE}, reason = "Does not yet support file uploads") public void testUploadingFileShouldFireOnChangeEvent() throws IOException { driver.get(pages.formPage); WebElement uploadElement = driver.findElement(By.id("upload")); WebElement result = driver.findElement(By.id("fileResults")); assertThat(result.getText(), equalTo("")); File file = File.createTempFile("test", "txt"); file.deleteOnExit(); uploadElement.sendKeys(file.getAbsolutePath()); // Shift focus to something else because send key doesn't make the focus leave driver.findElement(By.tagName("body")).click(); assertThat(result.getText(), equalTo("changed")); } private void clickOnElementWhichRecordsEvents() { driver.findElement(By.id("plainButton")).click(); } private void assertEventFired(String eventName) { WebElement result = driver.findElement(By.id("result")); String text = result.getText(); assertTrue("No " + eventName + " fired: " + text, text.contains(eventName)); } private void assertEventNotFired(String eventName) { WebElement result = driver.findElement(By.id("result")); String text = result.getText(); assertFalse(eventName + " fired: " + text, text.contains(eventName)); } }
package org.jetbrains.plugins.textmate.bundles; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonIOException; import com.google.gson.JsonSyntaxException; import com.intellij.openapi.util.io.FileUtilRt; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.containers.FactoryMap; import com.intellij.util.containers.MultiMap; import org.jetbrains.annotations.NotNull; import org.jetbrains.plugins.textmate.plist.PListValue; import org.jetbrains.plugins.textmate.plist.Plist; import org.jetbrains.plugins.textmate.plist.PlistReader; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.util.*; import java.util.stream.Collectors; import java.util.stream.Stream; import static java.util.Collections.emptyList; import static org.jetbrains.plugins.textmate.Constants.*; import static org.jetbrains.plugins.textmate.plist.PListValue.*; public class VSCBundle extends Bundle { private final Map<String, Collection<String>> grammarToExtensions = new HashMap<>(); private final MultiMap<String, String> configToScopes = new MultiMap<>(); public VSCBundle(@NotNull String name, @NotNull String bundle) { super(name, bundle, BundleType.VSCODE); } @NotNull @Override public Collection<File> getGrammarFiles() { loadExtensions(); //noinspection SSBasedInspection return grammarToExtensions.keySet().stream().map((path) -> new File(bundleFile, path)).collect(Collectors.toList()); } @Override public Collection<String> getExtensions(@NotNull File file, @NotNull Plist plist) { HashSet<String> result = new HashSet<>(super.getExtensions(file, plist)); loadExtensions(); result.addAll(grammarToExtensions.getOrDefault(FileUtilRt.toSystemIndependentName( Objects.requireNonNull(FileUtilRt.getRelativePath(bundleFile, file))), emptyList())); return result; } private void loadExtensions() { if (!grammarToExtensions.isEmpty()) return; File packageJson = new File(bundleFile, "package.json"); try { Object json = new Gson().fromJson(new FileReader(packageJson), Object.class); if (json instanceof Map) { Object contributes = ((Map)json).get("contributes"); if (contributes instanceof Map) { Object languages = ((Map)contributes).get("languages"); Object grammars = ((Map)contributes).get("grammars"); if (languages instanceof ArrayList && grammars instanceof ArrayList) { Map<String, Collection<String>> idToExtension = FactoryMap.create(s -> new HashSet<>()); Map<String, String> idToConfig = new HashMap<>(); for (Object language : (ArrayList)languages) { if (language instanceof Map) { Object id = ((Map)language).get("id"); if (id instanceof String) { Object extensions = ((Map)language).get("extensions"); if (extensions instanceof ArrayList) { Stream<String> stream = ((ArrayList)extensions).stream().map(ext -> StringUtil.trimStart((String)ext, ".")); idToExtension.get(id).addAll(stream.collect(Collectors.toList())); } Object filenames = ((Map)language).get("filenames"); if (filenames instanceof ArrayList) { idToExtension.get(id).addAll((ArrayList)filenames); } Object configuration = ((Map)language).get("configuration"); if (configuration instanceof String) { idToConfig.put((String)id, FileUtilRt.toSystemIndependentName((String)configuration)); } } } } Map<String, Collection<String>> grammarExtensions = new HashMap<>(); MultiMap<String, String> scopeConfig = new MultiMap<>(); for (Object grammar : (ArrayList)grammars) { if (grammar instanceof Map) { Object path = ((Map)grammar).get("path"); Object language = ((Map)grammar).get("language"); Collection<String> extensions = idToExtension.get(language); if (path instanceof String) { grammarExtensions.put((String)path, extensions); } Object scopeName = ((Map)grammar).get("scopeName"); String config = idToConfig.get(language); if (scopeName instanceof String && config != null) { scopeConfig.putValue(config, (String)scopeName); } Object embedded = ((Map)grammar).get("embeddedLanguages"); if (embedded instanceof Map) { for (Object embeddedScope : ((Map)embedded).keySet()) { Object embeddedLanguage = ((Map)embedded).get(embeddedScope); if (embeddedScope instanceof String && embeddedLanguage instanceof String) { String embeddedConfig = idToConfig.get(embeddedLanguage); if (embeddedConfig != null) { scopeConfig.putValue(embeddedConfig, (String)embeddedScope); } } } } } } grammarToExtensions.putAll(grammarExtensions); configToScopes.putAllValues(scopeConfig); } } } } catch (FileNotFoundException | JsonSyntaxException ignored) { } } @NotNull @Override public Collection<File> getPreferenceFiles() { //noinspection SSBasedInspection return configToScopes.keySet().stream().map(config -> new File(bundleFile, config)).collect(Collectors.toList()); } @Override public List<Map.Entry<String, Plist>> loadPreferenceFile(@NotNull File file, @NotNull PlistReader plistReader) throws IOException { Plist fromJson = loadLanguageConfig(file); //noinspection SSBasedInspection return configToScopes.get(FileUtilRt.toSystemIndependentName( Objects.requireNonNull(FileUtilRt.getRelativePath(bundleFile, file)))).stream() .map(scope -> new AbstractMap.SimpleImmutableEntry<>(scope, fromJson)) .collect(Collectors.toList()); } @NotNull private static Plist loadLanguageConfig(File languageConfig) throws IOException { Gson gson = new GsonBuilder().setLenient().create(); try { Object json = gson.fromJson(new FileReader(languageConfig), Object.class); Plist settings = new Plist(); if (json instanceof Map) { settings.setEntry(HIGHLIGHTING_PAIRS_KEY, loadBrackets((Map)json, "brackets")); settings.setEntry(SMART_TYPING_PAIRS_KEY, loadBrackets((Map)json, "surroundingPairs")); settings.setEntry(SHELL_VARIABLES_KEY, array(loadComments((Map)json))); } return settings; } catch (JsonSyntaxException | JsonIOException e) { throw new IOException(e); } } private static PListValue loadBrackets(Map json, String key) { Object brackets = json.get(key); if (!(brackets instanceof ArrayList)) { return null; } List<PListValue> pairs = new ArrayList<>(); for (Object bracket : (ArrayList)brackets) { if (bracket instanceof ArrayList && ((ArrayList)bracket).size() == 2) { pairs.add(array(string(((ArrayList)bracket).get(0).toString()), string(((ArrayList)bracket).get(1).toString()))); } } return array(pairs); } private static List<PListValue> loadComments(Map json) { List<PListValue> variables = new ArrayList<>(); Object comments = json.get("comments"); if (comments instanceof Map) { Object line = ((Map)comments).get("lineComment"); boolean hasLine = line instanceof String; if (hasLine) { variables.add(variable(COMMENT_START_VARIABLE, ((String)line).trim() + " ")); } Object block = ((Map)comments).get("blockComment"); if (block instanceof ArrayList && ((ArrayList)block).size() == 2) { String suffix = hasLine ? "_2" : ""; variables.add(variable(COMMENT_START_VARIABLE + suffix, ((ArrayList)block).get(0).toString().trim() + " ")); variables.add(variable(COMMENT_END_VARIABLE + suffix, " " + ((ArrayList)block).get(1).toString().trim())); } } return variables; } private static PListValue variable(String name, String value) { Plist variable = new Plist(); variable.setEntry(NAME_KEY, string(name)); variable.setEntry(VALUE_KEY, string(value)); return dict(variable); } }
/* * Copyright 2014 Google Inc. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.vrtoolkit.cardboard.samples.treasurehunt; /** * Created by cjr on 6/18/14. */ public final class WorldLayoutData { public static final float[] CUBE_COORDS = new float[] { // Front face -1.0f, 1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, -1.0f, 1.0f, 1.0f, 1.0f, 1.0f, // Right face 1.0f, 1.0f, 1.0f, 1.0f, -1.0f, 1.0f, 1.0f, 1.0f, -1.0f, 1.0f, -1.0f, 1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, -1.0f, // Back face 1.0f, 1.0f, -1.0f, 1.0f, -1.0f, -1.0f, -1.0f, 1.0f, -1.0f, 1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, 1.0f, -1.0f, // Left face -1.0f, 1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, 1.0f, 1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, 1.0f, -1.0f, 1.0f, 1.0f, // Top face -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, -1.0f, // Bottom face 1.0f, -1.0f, -1.0f, 1.0f, -1.0f, 1.0f, -1.0f, -1.0f, -1.0f, 1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, -1.0f, -1.0f, -1.0f, }; public static final float[] CUBE_COLORS = new float[] { // front, green 0f, 0.5273f, 0.2656f, 1.0f, 0f, 0.5273f, 0.2656f, 1.0f, 0f, 0.5273f, 0.2656f, 1.0f, 0f, 0.5273f, 0.2656f, 1.0f, 0f, 0.5273f, 0.2656f, 1.0f, 0f, 0.5273f, 0.2656f, 1.0f, // right, blue 0.0f, 0.3398f, 0.9023f, 1.0f, 0.0f, 0.3398f, 0.9023f, 1.0f, 0.0f, 0.3398f, 0.9023f, 1.0f, 0.0f, 0.3398f, 0.9023f, 1.0f, 0.0f, 0.3398f, 0.9023f, 1.0f, 0.0f, 0.3398f, 0.9023f, 1.0f, // back, also green 0f, 0.5273f, 0.2656f, 1.0f, 0f, 0.5273f, 0.2656f, 1.0f, 0f, 0.5273f, 0.2656f, 1.0f, 0f, 0.5273f, 0.2656f, 1.0f, 0f, 0.5273f, 0.2656f, 1.0f, 0f, 0.5273f, 0.2656f, 1.0f, // left, also blue 0.0f, 0.3398f, 0.9023f, 1.0f, 0.0f, 0.3398f, 0.9023f, 1.0f, 0.0f, 0.3398f, 0.9023f, 1.0f, 0.0f, 0.3398f, 0.9023f, 1.0f, 0.0f, 0.3398f, 0.9023f, 1.0f, 0.0f, 0.3398f, 0.9023f, 1.0f, // top, red 0.8359375f, 0.17578125f, 0.125f, 1.0f, 0.8359375f, 0.17578125f, 0.125f, 1.0f, 0.8359375f, 0.17578125f, 0.125f, 1.0f, 0.8359375f, 0.17578125f, 0.125f, 1.0f, 0.8359375f, 0.17578125f, 0.125f, 1.0f, 0.8359375f, 0.17578125f, 0.125f, 1.0f, // bottom, also red 0.8359375f, 0.17578125f, 0.125f, 1.0f, 0.8359375f, 0.17578125f, 0.125f, 1.0f, 0.8359375f, 0.17578125f, 0.125f, 1.0f, 0.8359375f, 0.17578125f, 0.125f, 1.0f, 0.8359375f, 0.17578125f, 0.125f, 1.0f, 0.8359375f, 0.17578125f, 0.125f, 1.0f, }; public static final float[] CUBE_FOUND_COLORS = new float[] { // front, yellow 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, // right, yellow 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, // back, yellow 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, // left, yellow 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, // top, yellow 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, // bottom, yellow 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, 1.0f, 0.0f, 0.6523f, 1.0f, }; public static final float[] CUBE_NORMALS = new float[] { // Front face 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, // Right face 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, // Back face 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, // Left face -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, // Top face 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, // Bottom face 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, -1.0f, 0.0f }; public static final float[] FLOOR_COORDS = new float[] { 200f, 0, -200f, -200f, 0, -200f, -200f, 0, 200f, 200f, 0, -200f, -200f, 0, 200f, 200f, 0, 200f, }; public static final float[] FLOOR_NORMALS = new float[] { 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, }; public static final float[] FLOOR_COLORS = new float[] { 0.0f, 0.3398f, 0.9023f, 1.0f, 0.0f, 0.3398f, 0.9023f, 1.0f, 0.0f, 0.3398f, 0.9023f, 1.0f, 0.0f, 0.3398f, 0.9023f, 1.0f, 0.0f, 0.3398f, 0.9023f, 1.0f, 0.0f, 0.3398f, 0.9023f, 1.0f, }; }
package com.khs.microservice.whirlpool.service; import com.google.common.io.Resources; import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.google.gson.Gson; import com.khs.microservice.whirlpool.common.Command; import com.khs.microservice.whirlpool.common.CommandResponse; import com.khs.microservice.whirlpool.common.MessageConstants; import org.apache.kafka.clients.consumer.CommitFailedException; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerRecord; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.InputStream; import java.time.Duration; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicBoolean; /** * This class contains the common code for all the services */ public abstract class BaseService { protected static final Logger logger = LoggerFactory.getLogger(BaseService.class); protected ExecutorService consumerExecutor; protected ExecutorService producerExecutor; protected ExecutorService dataExecutor; protected final Queue<String> responseQueue = new ConcurrentLinkedQueue<>(); protected final AtomicBoolean keepRunning = new AtomicBoolean(true); // Keep track of the subscriptions each user has asked for info about protected static Map<String, List<String>> allSubscriptions = new ConcurrentHashMap<>(); private static final ThreadFactory consumerThreadFactory = new ThreadFactoryBuilder() .setDaemon(true) .setNameFormat("consumer-%d") .build(); private static final ThreadFactory producerThreadFactory = new ThreadFactoryBuilder() .setDaemon(true) .setNameFormat("producer-%d") .build(); private static final ThreadFactory dataThreadFactory = new ThreadFactoryBuilder() .setDaemon(true) .setNameFormat("data-%d") .build(); public BaseService() { } protected abstract String getCommandType(); protected abstract void collectData(Gson gson, String user, List<String> subscriptions); public void startServer(String commandTopic, String producerTopic) { consumerExecutor = Executors.newSingleThreadExecutor(consumerThreadFactory); producerExecutor = Executors.newSingleThreadExecutor(producerThreadFactory); dataExecutor = Executors.newSingleThreadExecutor(dataThreadFactory); FutureTask<String> sendTickers = new FutureTask<>(new SendDataCallable(producerTopic)); producerExecutor.execute(sendTickers); // tell the Executor there will be no further tasks once the thread ends. producerExecutor.shutdown(); FutureTask<String> readTickers = new FutureTask<>(new ReaderCallable(commandTopic)); consumerExecutor.execute(readTickers); consumerExecutor.shutdown(); FutureTask<String> dataTickers = new FutureTask<>(new DataCollectorCallable()); dataExecutor.execute(dataTickers); dataExecutor.shutdown(); Runtime.getRuntime().addShutdownHook(new Thread(() -> { logger.info("Shutting down..."); keepRunning.set(false); })); } /** * This class runs as a thread. It looks for data on the configured topic and updates the * appropriate subscription. * */ public class ReaderCallable implements Callable<String> { // one per callable as it is stateless, but not thread safe private Gson gson = new Gson(); private String topic; public ReaderCallable(String topic) { this.topic = topic; } @Override public String call() throws Exception { // and the consumer KafkaConsumer<String, String> consumer; try (InputStream props = Resources.getResource("consumer.props").openStream()) { Properties properties = new Properties(); properties.load(props); consumer = new KafkaConsumer<>(properties); } consumer.subscribe(Collections.singletonList(topic)); int timeouts = 0; try { while (keepRunning.get()) { // read records with a short timeout. If we time out, we don't really care. ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(500)); if (records.count() == 0) { timeouts++; } else { logger.trace(String.format("Got %d records after %d timeouts\n", records.count(), timeouts)); timeouts = 0; } CommandResponse response = new CommandResponse(); response.setType(getCommandType()); for (ConsumerRecord<String, String> record : records) { if (record.topic().equals(topic)) { List<String> items; Command command = gson.fromJson(record.value(), Command.class); String commandId = command.getId(); response.setCommand(command.getCommand()); response.setSubscription(command.getSubscription()); response.setId(commandId); response.setErrorMessage(null); if (command.getCommand() != null) { if ("add".equals(command.getCommand())) { items = allSubscriptions.get(commandId); if (items == null) { items = new CopyOnWriteArrayList<>(); } items.add(command.getSubscription()); allSubscriptions.put(commandId, items); response.setResult(MessageConstants.SUCCESS); } else if ("remove".equals(command.getCommand())) { items = allSubscriptions.get(commandId); if (items.contains(command.getSubscription())) { items.remove(command.getSubscription()); allSubscriptions.put(commandId, items); response.setResult(MessageConstants.SUCCESS); } else { response.setResult(MessageConstants.FAILURE); response.setCommand(command.getCommand()); response.setErrorMessage("Subscription: (" + command.getSubscription() + ") was not found"); } } else if ("refresh".equals(command.getCommand())) { items = allSubscriptions.get(commandId); if (items != null && items.size() > 0) { collectData(gson, commandId, items); } response.setResult(MessageConstants.SUCCESS); response.setCommand(command.getCommand()); } else { response.setResult(MessageConstants.FAILURE); response.setErrorMessage("Command not recognized. " + record.value()); } } else { response.setResult(MessageConstants.FAILURE); response.setErrorMessage("Command not recognized. " + record.value()); } try { consumer.commitSync(); } catch (CommitFailedException e) { logger.error("commit failed", e); } responseQueue.add(gson.toJson(response)); } else { try { consumer.commitSync(); } catch (CommitFailedException e) { logger.error("commit failed", e); } throw new IllegalStateException("Shouldn't be possible to get message on topic " + record.topic()); } } } } finally { consumer.close(); } return "done"; } } /** * This class runs as a thread. It periodically asks the service to collect data. The service places the data * on the responseQueue. * */ public class DataCollectorCallable implements Callable<String> { // one per callable as it is stateless, but not thread safe private Gson gson = new Gson(); public DataCollectorCallable() { } @Override public String call() throws Exception { try { while(keepRunning.get()) { if (!allSubscriptions.isEmpty()) { for (String user : allSubscriptions.keySet()) { List<String> subscriptions = allSubscriptions.get(user); if (!subscriptions.isEmpty()) { collectData(gson, user, subscriptions); } } } // only collect data every 30 seconds so remote services aren't overwhelmed with messages Thread.sleep(30000L); } } catch (Throwable throwable) { logger.error(throwable.getMessage(), throwable); } return "done"; } } /** * This class runs as a thread. It periodically checks for messages waiting to be sent and places them on * the configured topic. * */ class SendDataCallable implements Callable<String> { private String topic; public SendDataCallable(String topic) { this.topic = topic; } @Override public String call() throws Exception { // set up the producer KafkaProducer<String, String> producer; try (InputStream props = Resources.getResource("producer.props").openStream()) { Properties properties = new Properties(); properties.load(props); producer = new KafkaProducer<>(properties); } String message; try { while (keepRunning.get()) { while ((message = responseQueue.poll()) != null) { logger.debug(String.format("Sending message: '%s' to topic: '%s'", message, topic)); producer.send(new ProducerRecord<>(topic, message), (metadata, e) -> { if (e != null) { logger.error(e.getMessage(), e); } logger.trace(String.format("The offset of the record we just sent is: %d", metadata.offset())); }); } producer.flush(); // Don't busy wait Thread.sleep(500L); } } catch (Throwable throwable) { logger.error(throwable.getMessage(), throwable); } finally { producer.close(); } return "done"; } } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.externalSystem.model; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.UserDataHolderBase; import com.intellij.openapi.util.UserDataHolderEx; import com.intellij.openapi.util.registry.Registry; import com.intellij.util.containers.ContainerUtilRt; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.*; import java.util.*; import java.util.function.Function; /** * This class provides a generic graph infrastructure with ability to store particular data. The main purpose is to * allow easy extensible data domain construction. * <p/> * Example: we might want to describe project model like 'project' which has multiple 'module' children where every * 'module' has a collection of child 'content root' and dependencies nodes etc. When that is done, plugins can easily * enhance any project. For example, particular framework can add facet settings as one more 'project' node's child. * <p/> * Not thread-safe. * * @author Denis Zhdanov */ public class DataNode<T> implements Serializable, UserDataHolderEx { private static final long serialVersionUID = 1L; private static final Logger LOG = Logger.getInstance(DataNode.class); @NotNull private final List<DataNode<?>> myChildren = ContainerUtilRt.newArrayList(); @NotNull private transient List<DataNode<?>> myChildrenView = Collections.unmodifiableList(myChildren); @NotNull private transient UserDataHolderBase myUserData = new UserDataHolderBase(); @NotNull private final Key<T> myKey; private transient T myData; private byte[] myRawData; private boolean myIgnored; @Nullable private DataNode<?> myParent; public DataNode(@NotNull Key<T> key, @NotNull T data, @Nullable DataNode<?> parent) { myKey = key; myData = data; myParent = parent; } private DataNode(@NotNull Key<T> key) { myKey = key; } @Nullable public DataNode<?> getParent() { return myParent; } @NotNull public <T> DataNode<T> createChild(@NotNull Key<T> key, @NotNull T data) { DataNode<T> result = new DataNode<>(key, data, this); myChildren.add(result); return result; } @NotNull public Key<T> getKey() { return myKey; } @NotNull public T getData() { if (myData == null) { prepareData(getClass().getClassLoader(), Thread.currentThread().getContextClassLoader()); } return myData; } public boolean isIgnored() { return myIgnored; } public void setIgnored(boolean ignored) { myIgnored = ignored; } /** * This class is a generic holder for any kind of project data. That project data might originate from different locations, e.g. * core ide plugins, non-core ide plugins, third-party plugins etc. That means that when a service from a core plugin needs to * unmarshall {@link DataNode} object, its content should not be unmarshalled as well because its class might be unavailable here. * <p/> * That's why the content is delivered as a raw byte array and this method allows to build actual java object from it using * the right class loader. * <p/> * This method is a no-op if the content is already built. * * @param loaders class loaders which are assumed to be able to build object of the target content class */ @SuppressWarnings({"IOResourceOpenedButNotSafelyClosed"}) public void prepareData(@NotNull final ClassLoader ... loaders) { if (myData != null) { return; } try { myData = getSerializer().readData(myRawData, loaders); assert myData != null; myRawData = null; } catch (IOException|ClassNotFoundException e) { throw new IllegalStateException( String.format("Can't deserialize target data of key '%s'. Given class loaders: %s", myKey, Arrays.toString(loaders)), e ); } } /** * Allows to replace or modify data. If function returns null, data is left unchanged * @param visitor visitor. Must accept argument of type T and return value of type T */ public void visitData(@Nullable Function visitor) { if (visitor == null) { return; } final T newData = (T) visitor.apply(getData()); if (newData != null) { myData = newData; myRawData = null; } } /** * Allows to retrieve data stored for the given key at the current node or any of its parents. * * @param key target data's key * @param <T> target data type * @return data stored for the current key and available via the current node (if any) */ @SuppressWarnings("unchecked") @Nullable public <T> T getData(@NotNull Key<T> key) { if (myKey.equals(key)) { return (T)myData; } for (DataNode<?> p = myParent; p != null; p = p.myParent) { if (p.myKey.equals(key)) { return (T)p.myData; } } return null; } @SuppressWarnings("unchecked") @Nullable public <T> DataNode<T> getDataNode(@NotNull Key<T> key) { if (myKey.equals(key)) { return (DataNode<T>)this; } for (DataNode<?> p = myParent; p != null; p = p.myParent) { if (p.myKey.equals(key)) { return (DataNode<T>)p; } } return null; } @SuppressWarnings("unchecked") @Nullable public <P> DataNode<P> getParent(@NotNull Class<P> dataClass) { if (dataClass.isInstance(myData)) { return (DataNode<P>)this; } for (DataNode<?> p = myParent; p != null; p = p.myParent) { if (dataClass.isInstance(p.myData)) { return (DataNode<P>)p; } } return null; } public void addChild(@NotNull DataNode<?> child) { child.myParent = this; myChildren.add(child); } @NotNull public Collection<DataNode<?>> getChildren() { return myChildrenView; } private void writeObject(ObjectOutputStream out) throws IOException { try { myRawData = getDataBytes(); } catch (IOException e) { LOG.warn("Unable to serialize the data node - " + toString()); throw e; } out.defaultWriteObject(); } private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); myChildrenView = Collections.unmodifiableList(myChildren); myUserData = new UserDataHolderBase(); } public void checkIsSerializable() throws IOException { if (myRawData != null) return; try (ObjectOutputStream oOut = new ObjectOutputStream(NoopOutputStream.getInstance())) { oOut.writeObject(myData); } } public byte[] getDataBytes() throws IOException { if (myRawData != null) return myRawData; return getSerializer().getBytes(myData); } @Override public int hashCode() { // We can't use myChildren.hashCode() because it iterates whole subtree. This should not produce many collisions because 'getData()' // usually refers to different objects return 31 * myKey.hashCode() + getData().hashCode(); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DataNode node = (DataNode)o; if (!myChildren.equals(node.myChildren)) return false; if (!getData().equals(node.getData())) return false; if (!myKey.equals(node.myKey)) return false; return true; } @Override public String toString() { String dataDescription; try { dataDescription = getData().toString(); } catch (Exception e) { dataDescription = "failed to load"; LOG.debug(e); } return String.format("%s: %s", myKey, dataDescription); } public void clear(boolean removeFromGraph) { if (removeFromGraph && myParent != null) { for (Iterator<DataNode<?>> iterator = myParent.myChildren.iterator(); iterator.hasNext(); ) { DataNode<?> dataNode = iterator.next(); if (System.identityHashCode(dataNode) == System.identityHashCode(this)) { iterator.remove(); break; } } } myParent = null; myRawData = null; myChildren.clear(); } private DataNodeSerializer<T> getSerializer() { switch (Registry.stringValue("ext.project.data.serializer")) { case "auto": if (SystemInfo.IS_AT_LEAST_JAVA9) { return JDKSerializer.getInstance(); } else { return FSTSerializer.getInstance(); } case "jdk": return JDKSerializer.getInstance(); case "fst": return FSTSerializer.getInstance(); } return JDKSerializer.getInstance(); } @NotNull public DataNode<T> graphCopy() { return copy(this, null); } @NotNull public DataNode<T> nodeCopy() { return nodeCopy(this); } @Nullable @Override public <U> U getUserData(@NotNull com.intellij.openapi.util.Key<U> key) { return (U)myUserData.getUserData(key); } @Override public <U> void putUserData(@NotNull com.intellij.openapi.util.Key<U> key, U value) { myUserData.putUserData(key, value); } public <U> void removeUserData(@NotNull com.intellij.openapi.util.Key<U> key) { myUserData.putUserData(key, null); } @NotNull @Override public <T> T putUserDataIfAbsent(@NotNull com.intellij.openapi.util.Key<T> key, @NotNull T value) { return myUserData.putUserDataIfAbsent(key, value); } @Override public <T> boolean replace(@NotNull com.intellij.openapi.util.Key<T> key, @Nullable T oldValue, @Nullable T newValue) { return myUserData.replace(key, oldValue, newValue); } public <T> void putCopyableUserData(@NotNull com.intellij.openapi.util.Key<T> key, T value) { myUserData.putCopyableUserData(key, value); } public boolean isUserDataEmpty() { return myUserData.isUserDataEmpty(); } public <T> T getCopyableUserData(@NotNull com.intellij.openapi.util.Key<T> key) { return myUserData.getCopyableUserData(key); } @NotNull public static <T> DataNode<T> nodeCopy(@NotNull DataNode<T> dataNode) { DataNode<T> copy = new DataNode<>(dataNode.myKey); copy.myData = dataNode.myData; copy.myRawData = dataNode.myRawData; copy.myIgnored = dataNode.myIgnored; dataNode.myUserData.copyCopyableDataTo(copy.myUserData); return copy; } @NotNull private static <T> DataNode<T> copy(@NotNull DataNode<T> dataNode, @Nullable DataNode<?> newParent) { DataNode<T> copy = nodeCopy(dataNode); copy.myParent = newParent; for (DataNode<?> child : dataNode.myChildren) { copy.addChild(copy(child, copy)); } return copy; } private static class NoopOutputStream extends OutputStream { @SuppressWarnings("IOResourceOpenedButNotSafelyClosed") private static final NoopOutputStream ourInstance = new NoopOutputStream(); public static NoopOutputStream getInstance() { return ourInstance; } private NoopOutputStream() {} @Override public void write(int b) throws IOException {} } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.sql.calcite.http; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; import io.druid.query.QueryInterruptedException; import io.druid.sql.calcite.planner.Calcites; import io.druid.sql.calcite.planner.DruidOperatorTable; import io.druid.sql.calcite.planner.PlannerConfig; import io.druid.sql.calcite.planner.PlannerContext; import io.druid.sql.calcite.planner.PlannerFactory; import io.druid.sql.calcite.util.CalciteTests; import io.druid.sql.calcite.util.QueryLogHook; import io.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker; import io.druid.sql.http.SqlQuery; import io.druid.sql.http.SqlResource; import org.apache.calcite.schema.SchemaPlus; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.rules.TemporaryFolder; import javax.ws.rs.core.Response; import javax.ws.rs.core.StreamingOutput; import java.io.ByteArrayOutputStream; import java.util.List; import java.util.Map; public class SqlResourceTest { private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper(); @Rule public ExpectedException expectedException = ExpectedException.none(); @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder(); @Rule public QueryLogHook queryLogHook = QueryLogHook.create(); private SpecificSegmentsQuerySegmentWalker walker = null; private SqlResource resource; @Before public void setUp() throws Exception { Calcites.setSystemProperties(); walker = CalciteTests.createMockWalker(temporaryFolder.newFolder()); final PlannerConfig plannerConfig = new PlannerConfig(); final SchemaPlus rootSchema = Calcites.createRootSchema( CalciteTests.createMockSchema(walker, plannerConfig) ); final DruidOperatorTable operatorTable = CalciteTests.createOperatorTable(); resource = new SqlResource(JSON_MAPPER, new PlannerFactory(rootSchema, walker, operatorTable, plannerConfig)); } @After public void tearDown() throws Exception { walker.close(); walker = null; } @Test public void testCountStar() throws Exception { final List<Map<String, Object>> rows = doPost( new SqlQuery("SELECT COUNT(*) AS cnt FROM druid.foo", null) ); Assert.assertEquals( ImmutableList.of( ImmutableMap.of("cnt", 6) ), rows ); } @Test public void testTimestampsInResponse() throws Exception { final List<Map<String, Object>> rows = doPost( new SqlQuery("SELECT __time, CAST(__time AS DATE) AS t2 FROM druid.foo LIMIT 1", null) ); Assert.assertEquals( ImmutableList.of( ImmutableMap.of("__time", "2000-01-01T00:00:00.000Z", "t2", "2000-01-01T00:00:00.000Z") ), rows ); } @Test public void testTimestampsInResponseLosAngelesTimeZone() throws Exception { final List<Map<String, Object>> rows = doPost( new SqlQuery( "SELECT __time, CAST(__time AS DATE) AS t2 FROM druid.foo LIMIT 1", ImmutableMap.<String, Object>of(PlannerContext.CTX_SQL_TIME_ZONE, "America/Los_Angeles") ) ); Assert.assertEquals( ImmutableList.of( ImmutableMap.of("__time", "1999-12-31T16:00:00.000-08:00", "t2", "1999-12-31T00:00:00.000-08:00") ), rows ); } @Test public void testFieldAliasingSelect() throws Exception { final List<Map<String, Object>> rows = doPost( new SqlQuery("SELECT dim2 \"x\", dim2 \"y\" FROM druid.foo LIMIT 1", null) ); Assert.assertEquals( ImmutableList.of( ImmutableMap.of("x", "a", "y", "a") ), rows ); } @Test public void testFieldAliasingGroupBy() throws Exception { final List<Map<String, Object>> rows = doPost( new SqlQuery("SELECT dim2 \"x\", dim2 \"y\" FROM druid.foo GROUP BY dim2", null) ); Assert.assertEquals( ImmutableList.of( ImmutableMap.of("x", "", "y", ""), ImmutableMap.of("x", "a", "y", "a"), ImmutableMap.of("x", "abc", "y", "abc") ), rows ); } @Test public void testExplainCountStar() throws Exception { final List<Map<String, Object>> rows = doPost( new SqlQuery("EXPLAIN PLAN FOR SELECT COUNT(*) AS cnt FROM druid.foo", null) ); Assert.assertEquals( ImmutableList.of( ImmutableMap.<String, Object>of( "PLAN", "DruidQueryRel(dataSource=[foo], dimensions=[[]], aggregations=[[Aggregation{aggregatorFactories=[CountAggregatorFactory{name='a0'}], postAggregator=null, finalizingPostAggregatorFactory=null}]])\n" ) ), rows ); } @Test public void testCannotValidate() throws Exception { expectedException.expect(QueryInterruptedException.class); expectedException.expectMessage("Column 'dim3' not found in any table"); doPost( new SqlQuery("SELECT dim3 FROM druid.foo", null) ); Assert.fail(); } @Test public void testCannotConvert() throws Exception { expectedException.expect(QueryInterruptedException.class); expectedException.expectMessage("Cannot build plan for query: SELECT TRIM(dim1) FROM druid.foo"); // TRIM unsupported doPost(new SqlQuery("SELECT TRIM(dim1) FROM druid.foo", null)); Assert.fail(); } private List<Map<String, Object>> doPost(final SqlQuery query) throws Exception { final Response response = resource.doPost(query); if (response.getStatus() == 200) { final StreamingOutput output = (StreamingOutput) response.getEntity(); final ByteArrayOutputStream baos = new ByteArrayOutputStream(); output.write(baos); return JSON_MAPPER.readValue( baos.toByteArray(), new TypeReference<List<Map<String, Object>>>() { } ); } else { throw JSON_MAPPER.readValue((byte[]) response.getEntity(), QueryInterruptedException.class); } } }
/* * Copyright 2001-2013 Stephen Colebourne * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.joda.time; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import junit.framework.TestCase; import junit.framework.TestSuite; /** * This class is a Junit unit test for Hours. * * @author Stephen Colebourne */ public class TestHours extends TestCase { // Test in 2002/03 as time zones are more well known // (before the late 90's they were all over the place) private static final DateTimeZone PARIS = DateTimeZone.forID("Europe/Paris"); public static void main(String[] args) { junit.textui.TestRunner.run(suite()); } public static TestSuite suite() { return new TestSuite(TestHours.class); } public TestHours(String name) { super(name); } protected void setUp() throws Exception { } protected void tearDown() throws Exception { } //----------------------------------------------------------------------- public void testConstants() { assertEquals(0, Hours.ZERO.getHours()); assertEquals(1, Hours.ONE.getHours()); assertEquals(2, Hours.TWO.getHours()); assertEquals(3, Hours.THREE.getHours()); assertEquals(4, Hours.FOUR.getHours()); assertEquals(5, Hours.FIVE.getHours()); assertEquals(6, Hours.SIX.getHours()); assertEquals(7, Hours.SEVEN.getHours()); assertEquals(8, Hours.EIGHT.getHours()); assertEquals(Integer.MAX_VALUE, Hours.MAX_VALUE.getHours()); assertEquals(Integer.MIN_VALUE, Hours.MIN_VALUE.getHours()); } //----------------------------------------------------------------------- public void testFactory_hours_int() { assertSame(Hours.ZERO, Hours.hours(0)); assertSame(Hours.ONE, Hours.hours(1)); assertSame(Hours.TWO, Hours.hours(2)); assertSame(Hours.THREE, Hours.hours(3)); assertSame(Hours.FOUR, Hours.hours(4)); assertSame(Hours.FIVE, Hours.hours(5)); assertSame(Hours.SIX, Hours.hours(6)); assertSame(Hours.SEVEN, Hours.hours(7)); assertSame(Hours.EIGHT, Hours.hours(8)); assertSame(Hours.MAX_VALUE, Hours.hours(Integer.MAX_VALUE)); assertSame(Hours.MIN_VALUE, Hours.hours(Integer.MIN_VALUE)); assertEquals(-1, Hours.hours(-1).getHours()); assertEquals(9, Hours.hours(9).getHours()); } //----------------------------------------------------------------------- public void testFactory_hoursBetween_RInstant() { DateTime start = new DateTime(2006, 6, 9, 12, 0, 0, 0, PARIS); DateTime end1 = new DateTime(2006, 6, 9, 15, 0, 0, 0, PARIS); DateTime end2 = new DateTime(2006, 6, 9, 18, 0, 0, 0, PARIS); assertEquals(3, Hours.hoursBetween(start, end1).getHours()); assertEquals(0, Hours.hoursBetween(start, start).getHours()); assertEquals(0, Hours.hoursBetween(end1, end1).getHours()); assertEquals(-3, Hours.hoursBetween(end1, start).getHours()); assertEquals(6, Hours.hoursBetween(start, end2).getHours()); } public void testFactory_hoursBetween_RPartial() { LocalTime start = new LocalTime(12, 0); LocalTime end1 = new LocalTime(15, 0); @SuppressWarnings("deprecation") TimeOfDay end2 = new TimeOfDay(18, 0); assertEquals(3, Hours.hoursBetween(start, end1).getHours()); assertEquals(0, Hours.hoursBetween(start, start).getHours()); assertEquals(0, Hours.hoursBetween(end1, end1).getHours()); assertEquals(-3, Hours.hoursBetween(end1, start).getHours()); assertEquals(6, Hours.hoursBetween(start, end2).getHours()); } public void testFactory_hoursIn_RInterval() { DateTime start = new DateTime(2006, 6, 9, 12, 0, 0, 0, PARIS); DateTime end1 = new DateTime(2006, 6, 9, 15, 0, 0, 0, PARIS); DateTime end2 = new DateTime(2006, 6, 9, 18, 0, 0, 0, PARIS); assertEquals(0, Hours.hoursIn((ReadableInterval) null).getHours()); assertEquals(3, Hours.hoursIn(new Interval(start, end1)).getHours()); assertEquals(0, Hours.hoursIn(new Interval(start, start)).getHours()); assertEquals(0, Hours.hoursIn(new Interval(end1, end1)).getHours()); assertEquals(6, Hours.hoursIn(new Interval(start, end2)).getHours()); } public void testFactory_standardHoursIn_RPeriod() { assertEquals(0, Hours.standardHoursIn((ReadablePeriod) null).getHours()); assertEquals(0, Hours.standardHoursIn(Period.ZERO).getHours()); assertEquals(1, Hours.standardHoursIn(new Period(0, 0, 0, 0, 1, 0, 0, 0)).getHours()); assertEquals(123, Hours.standardHoursIn(Period.hours(123)).getHours()); assertEquals(-987, Hours.standardHoursIn(Period.hours(-987)).getHours()); assertEquals(1, Hours.standardHoursIn(Period.minutes(119)).getHours()); assertEquals(2, Hours.standardHoursIn(Period.minutes(120)).getHours()); assertEquals(2, Hours.standardHoursIn(Period.minutes(121)).getHours()); assertEquals(48, Hours.standardHoursIn(Period.days(2)).getHours()); try { Hours.standardHoursIn(Period.months(1)); fail(); } catch (IllegalArgumentException ex) { // expeceted } } public void testFactory_parseHours_String() { assertEquals(0, Hours.parseHours((String) null).getHours()); assertEquals(0, Hours.parseHours("PT0H").getHours()); assertEquals(1, Hours.parseHours("PT1H").getHours()); assertEquals(-3, Hours.parseHours("PT-3H").getHours()); assertEquals(2, Hours.parseHours("P0Y0M0DT2H").getHours()); assertEquals(2, Hours.parseHours("PT2H0M").getHours()); try { Hours.parseHours("P1Y1D"); fail(); } catch (IllegalArgumentException ex) { // expeceted } try { Hours.parseHours("P1DT1H"); fail(); } catch (IllegalArgumentException ex) { // expeceted } } //----------------------------------------------------------------------- public void testGetMethods() { Hours test = Hours.hours(20); assertEquals(20, test.getHours()); } public void testGetFieldType() { Hours test = Hours.hours(20); assertEquals(DurationFieldType.hours(), test.getFieldType()); } public void testGetPeriodType() { Hours test = Hours.hours(20); assertEquals(PeriodType.hours(), test.getPeriodType()); } //----------------------------------------------------------------------- public void testIsGreaterThan() { assertEquals(true, Hours.THREE.isGreaterThan(Hours.TWO)); assertEquals(false, Hours.THREE.isGreaterThan(Hours.THREE)); assertEquals(false, Hours.TWO.isGreaterThan(Hours.THREE)); assertEquals(true, Hours.ONE.isGreaterThan(null)); assertEquals(false, Hours.hours(-1).isGreaterThan(null)); } public void testIsLessThan() { assertEquals(false, Hours.THREE.isLessThan(Hours.TWO)); assertEquals(false, Hours.THREE.isLessThan(Hours.THREE)); assertEquals(true, Hours.TWO.isLessThan(Hours.THREE)); assertEquals(false, Hours.ONE.isLessThan(null)); assertEquals(true, Hours.hours(-1).isLessThan(null)); } //----------------------------------------------------------------------- public void testToString() { Hours test = Hours.hours(20); assertEquals("PT20H", test.toString()); test = Hours.hours(-20); assertEquals("PT-20H", test.toString()); } //----------------------------------------------------------------------- public void testSerialization() throws Exception { Hours test = Hours.SEVEN; ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(baos); oos.writeObject(test); byte[] bytes = baos.toByteArray(); oos.close(); ByteArrayInputStream bais = new ByteArrayInputStream(bytes); ObjectInputStream ois = new ObjectInputStream(bais); Hours result = (Hours) ois.readObject(); ois.close(); assertSame(test, result); } //----------------------------------------------------------------------- public void testToStandardWeeks() { Hours test = Hours.hours(24 * 7 * 2); Weeks expected = Weeks.weeks(2); assertEquals(expected, test.toStandardWeeks()); } public void testToStandardDays() { Hours test = Hours.hours(24 * 2); Days expected = Days.days(2); assertEquals(expected, test.toStandardDays()); } public void testToStandardMinutes() { Hours test = Hours.hours(3); Minutes expected = Minutes.minutes(3 * 60); assertEquals(expected, test.toStandardMinutes()); try { Hours.MAX_VALUE.toStandardMinutes(); fail(); } catch (ArithmeticException ex) { // expected } } public void testToStandardSeconds() { Hours test = Hours.hours(3); Seconds expected = Seconds.seconds(3 * 60 * 60); assertEquals(expected, test.toStandardSeconds()); try { Hours.MAX_VALUE.toStandardSeconds(); fail(); } catch (ArithmeticException ex) { // expected } } public void testToStandardDuration() { Hours test = Hours.hours(20); Duration expected = new Duration(20L * DateTimeConstants.MILLIS_PER_HOUR); assertEquals(expected, test.toStandardDuration()); expected = new Duration(((long) Integer.MAX_VALUE) * DateTimeConstants.MILLIS_PER_HOUR); assertEquals(expected, Hours.MAX_VALUE.toStandardDuration()); } //----------------------------------------------------------------------- public void testPlus_int() { Hours test2 = Hours.hours(2); Hours result = test2.plus(3); assertEquals(2, test2.getHours()); assertEquals(5, result.getHours()); assertEquals(1, Hours.ONE.plus(0).getHours()); try { Hours.MAX_VALUE.plus(1); fail(); } catch (ArithmeticException ex) { // expected } } public void testPlus_Hours() { Hours test2 = Hours.hours(2); Hours test3 = Hours.hours(3); Hours result = test2.plus(test3); assertEquals(2, test2.getHours()); assertEquals(3, test3.getHours()); assertEquals(5, result.getHours()); assertEquals(1, Hours.ONE.plus(Hours.ZERO).getHours()); assertEquals(1, Hours.ONE.plus((Hours) null).getHours()); try { Hours.MAX_VALUE.plus(Hours.ONE); fail(); } catch (ArithmeticException ex) { // expected } } public void testMinus_int() { Hours test2 = Hours.hours(2); Hours result = test2.minus(3); assertEquals(2, test2.getHours()); assertEquals(-1, result.getHours()); assertEquals(1, Hours.ONE.minus(0).getHours()); try { Hours.MIN_VALUE.minus(1); fail(); } catch (ArithmeticException ex) { // expected } } public void testMinus_Hours() { Hours test2 = Hours.hours(2); Hours test3 = Hours.hours(3); Hours result = test2.minus(test3); assertEquals(2, test2.getHours()); assertEquals(3, test3.getHours()); assertEquals(-1, result.getHours()); assertEquals(1, Hours.ONE.minus(Hours.ZERO).getHours()); assertEquals(1, Hours.ONE.minus((Hours) null).getHours()); try { Hours.MIN_VALUE.minus(Hours.ONE); fail(); } catch (ArithmeticException ex) { // expected } } public void testMultipliedBy_int() { Hours test = Hours.hours(2); assertEquals(6, test.multipliedBy(3).getHours()); assertEquals(2, test.getHours()); assertEquals(-6, test.multipliedBy(-3).getHours()); assertSame(test, test.multipliedBy(1)); Hours halfMax = Hours.hours(Integer.MAX_VALUE / 2 + 1); try { halfMax.multipliedBy(2); fail(); } catch (ArithmeticException ex) { // expected } } public void testDividedBy_int() { Hours test = Hours.hours(12); assertEquals(6, test.dividedBy(2).getHours()); assertEquals(12, test.getHours()); assertEquals(4, test.dividedBy(3).getHours()); assertEquals(3, test.dividedBy(4).getHours()); assertEquals(2, test.dividedBy(5).getHours()); assertEquals(2, test.dividedBy(6).getHours()); assertSame(test, test.dividedBy(1)); try { Hours.ONE.dividedBy(0); fail(); } catch (ArithmeticException ex) { // expected } } public void testNegated() { Hours test = Hours.hours(12); assertEquals(-12, test.negated().getHours()); assertEquals(12, test.getHours()); try { Hours.MIN_VALUE.negated(); fail(); } catch (ArithmeticException ex) { // expected } } //----------------------------------------------------------------------- public void testAddToLocalDate() { Hours test = Hours.hours(26); LocalDateTime date = new LocalDateTime(2006, 6, 1, 0, 0, 0, 0); LocalDateTime expected = new LocalDateTime(2006, 6, 2, 2, 0, 0, 0); assertEquals(expected, date.plus(test)); } }
package gov.nih.nci.ncicb.cadsr.common.persistence.dao.jdbc; import gov.nih.nci.ncicb.cadsr.common.dto.AddressTransferObject; import gov.nih.nci.ncicb.cadsr.common.dto.ContactCommunicationV2TransferObject; import gov.nih.nci.ncicb.cadsr.common.dto.ContactTransferObject; import gov.nih.nci.ncicb.cadsr.common.dto.PersonTransferObject; import gov.nih.nci.ncicb.cadsr.common.persistence.dao.ContactCommunicationV2DAO; import gov.nih.nci.ncicb.cadsr.common.resource.Address; import gov.nih.nci.ncicb.cadsr.common.resource.Contact; import gov.nih.nci.ncicb.cadsr.common.resource.ContactCommunicationV2; import gov.nih.nci.ncicb.cadsr.common.resource.Person; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import javax.sql.DataSource; import org.apache.log4j.Logger; import org.springframework.dao.DataAccessException; import org.springframework.jdbc.core.RowMapper; import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; import org.springframework.jdbc.object.MappingSqlQuery; public class JDBCContactCommunicationDAOV2 extends JDBCAdminComponentDAOV2 implements ContactCommunicationV2DAO { private static Logger logger = Logger.getLogger(JDBCContactCommunicationDAOV2.class.getName()); public JDBCContactCommunicationDAOV2(DataSource dataSource) { super(dataSource); } public int createContactCommnunication(String ac_idseq, String org_idseq, ContactCommunicationV2TransferObject contact) { //relationship: sbr.AC_CONTACTS_VIEW //contact commun record: sbrext.contacts_view_ext String seqid = generateGUID(); String sql = "INSERT INTO sbr.contact_comms_view " + " (ccomm_idseq, org_idseq, per_idseq, ctl_name, rank_order, " + " cyber_address, created_By) " + " VALUES (:ccomm_idseq, :org_idseq, :per_idseq, :ctl_name, :rank_order, " + " :cyber_address, :createdBy)"; MapSqlParameterSource params = new MapSqlParameterSource(); params.addValue("ccomm_idseq", seqid); params.addValue("org_idseq", org_idseq); //un-nillable: otherwise, get "unique constraint violated" error params.addValue("per_idseq", null); params.addValue("ctl_name", contact.getType()); params.addValue("rank_order", contact.getRankOrder()); //un-nullable params.addValue("cyber_address", contact.getValue()); params.addValue("createdBy", contact.getCreatedBy()); try { int res = this.namedParameterJdbcTemplate.update(sql, params); return res; } catch (DataAccessException de) { logger.debug(de.getMessage()); throw de; } } public String getOrganizationIdseqByName(String org_name) { String sql = "select ORG_IDSEQ from sbr.ORGANIZATIONS_VIEW where name=:org_name"; MapSqlParameterSource params = new MapSqlParameterSource(); params.addValue("org_name", org_name); List<String> latest = this.namedParameterJdbcTemplate.query(sql, params, new RowMapper<String>() { public String mapRow(ResultSet rs, int rowNum) throws SQLException { return rs.getString("ORG_IDSEQ"); } }); return null; } // (based on JDBCAdminComponentDAO#getContacts) public List<ContactCommunicationV2> getContactCommunicationV2sForAC(String acIdseq) { List<ContactCommunicationV2> ccV2List = new ArrayList(); PersonContact2ByACIdQuery personQuery = new PersonContact2ByACIdQuery(); personQuery.setDataSource(getDataSource()); List<Contact> personContacts = personQuery.getPersonContacts(acIdseq); ContactCommunicationsV2Query commQuery = new ContactCommunicationsV2Query(); commQuery.setDataSource(getDataSource()); Iterator<Contact> perIter=personContacts.iterator(); while (perIter.hasNext()) { Person person = perIter.next().getPerson(); ccV2List.addAll(commQuery.getContactCommsbyPerson(person)); } OrgContactDataByACIdQuery orgQuery = new OrgContactDataByACIdQuery(); orgQuery.setDataSource(getDataSource()); List<OrganizationData> orgContacts = orgQuery.getOrgContacts(acIdseq); Iterator<OrganizationData> orgIter=orgContacts.iterator(); while (orgIter.hasNext()) { OrganizationData orgData = orgIter.next(); ccV2List.addAll(commQuery.getContactCommsbyOrg(orgData)); } return ccV2List; } class ContactCommunicationsV2Query extends MappingSqlQuery { ContactCommunicationsV2Query() { super(); } public void setQuerySql(String idType, String idSeq) { String querySql = " select cc.CCOMM_IDSEQ, cc.CTL_NAME, cc.CYBER_ADDRESS, " + " cc.RANK_ORDER, cc.DATE_CREATED, cc.CREATED_BY, cc.DATE_MODIFIED, cc.MODIFIED_BY " + " from sbr.contact_comms_view cc " + " where " + idType + " = '" + idSeq + "'" + " and ( CTL_NAME='PHONE' OR CTL_NAME='EMAIL' OR CTL_NAME='FAX' OR CTL_NAME='In Person') " + " ORDER BY rank_order"; super.setSql(querySql); // Note: We are only supporting types in V2 form format. (i.e. no "MAIL") } protected Object mapRow(ResultSet rs, int rownum) throws SQLException { ContactCommunicationV2 cc = new ContactCommunicationV2TransferObject(); cc.setId(rs.getString("ccomm_idseq")); cc.setType(rs.getString("ctl_name")); cc.setValue(rs.getString("cyber_address")); cc.setRankOrder(rs.getInt("rank_order")); cc.setDateCreated(rs.getTimestamp("date_created")); cc.setCreatedBy(rs.getString("created_by")); cc.setDateModified(rs.getTimestamp("date_modified")); cc.setModifiedBy(rs.getString("modified_by")); return cc; } protected List<ContactCommunicationV2> getContactCommsbyPerson( Person person) { this.setQuerySql("per_idseq", person.getId()); List ccList = execute(); Iterator it = ccList.iterator(); while (it.hasNext()) { ContactCommunicationV2 cc = (ContactCommunicationV2) it.next(); cc.setPerson(person); // TODO: Person is wrong format } return ccList; } protected List<ContactCommunicationV2> getContactCommsbyOrg( OrganizationData org) { this.setQuerySql("org_idseq", org.org_id); List ccList = execute(); Iterator it = ccList.iterator(); while (it.hasNext()) { ContactCommunicationV2 cc = (ContactCommunicationV2) it.next(); cc.setOrganizationName(org.organizationName); cc.setOrganizationRAI(org.organizationRAI); } return ccList; } } // -- copied from JDBCAdminComponentDAO --- PersonContact extended to fill in needed fields, OrgContact slimmed down and added RAI class PersonContact2ByACIdQuery extends MappingSqlQuery { String last_accId = null; Contact currentContact = null; List contactList = new ArrayList(); Person currPerson = null; PersonContact2ByACIdQuery() { super(); } public void setQuerySql(String acidSeq) { String querySql = " SELECT acc.acc_idseq, acc.org_idseq, acc.per_idseq, acc.contact_role," + " per.LNAME, per.FNAME, addr.CADDR_IDSEQ," + " addr.ADDR_LINE1, addr.ADDR_LINE2, addr.CADDR_IDSEQ, addr.CITY, addr.POSTAL_CODE, addr.STATE_PROV," + " addr.COUNTRY, addr.rank_order as addr_rank_order, addr.atl_name, per.position " + " FROM sbr.ac_contacts_view acc, sbr.persons_view per, sbr.contact_addresses_view addr " + " where acc.ac_idseq = '" + acidSeq + "' and " + " acc.per_idseq = per.per_idseq and addr.PER_IDSEQ = per.PER_IDSEQ " + " and (addr.atl_name = 'MAILING' or addr.atl_name = 'Package Delivery')" + " ORDER BY acc.acc_idseq, acc.rank_order "; // Note: We are only supporting type (atl_name) in V2 form format. super.setSql(querySql); } protected Object mapRow(ResultSet rs, int rownum) throws SQLException { String accId = rs.getString("acc_idseq"); Address address = new AddressTransferObject(); address.setAddressLine1(rs.getString("addr_line1")); address.setAddressLine2(rs.getString("addr_line2")); address.setId(rs.getString("CADDR_IDSEQ")); address.setCity(rs.getString("city")); address.setPostalCode(rs.getString("POSTAL_CODE")); address.setState(rs.getString("STATE_PROV")); address.setCountry(rs.getString("COUNTRY")); address.setRank(rs.getInt("addr_rank_order")); address.setType(rs.getString("atl_name")); String personId = rs.getString("per_idseq"); if (currPerson == null || !currPerson.getId().equals(personId)) { currPerson = new PersonTransferObject(); currPerson.setFirstName(rs.getString("fname")); currPerson.setLastName(rs.getString("lname")); currPerson.setId(rs.getString("per_idseq")); currPerson.setPosition(rs.getString("position")); currPerson.setAddresses(new ArrayList()); } currPerson.getAddresses().add(address); if (currentContact == null || !currentContact.getIdseq().equals(accId)) { currentContact = new ContactTransferObject(); currentContact.setIdseq(accId); currentContact.setContactRole(rs.getString("contact_role")); contactList.add(currentContact); } currentContact.setPerson(currPerson); return currentContact; } protected List getPersonContacts(String acIdSeq) { setQuerySql(acIdSeq); this.execute(); return contactList; } } class OrganizationData { OrganizationData() {} protected String organizationName; protected String organizationRAI; protected String org_id; } class OrgContactDataByACIdQuery extends MappingSqlQuery { OrgContactDataByACIdQuery() { super(); } public void setQuerySql(String acidSeq) { String querySql = " SELECT acc.acc_idseq, acc.rank_order, acc.org_idseq," + " org.name, org.rai" + " FROM sbr.ac_contacts_view acc, sbr.organizations_view org " + " where acc.ac_idseq = '" + acidSeq + "' and " + " acc.org_idseq = org.org_idseq" + " ORDER BY acc.acc_idseq, acc.rank_order "; super.setSql(querySql); } protected Object mapRow(ResultSet rs, int rownum) throws SQLException { OrganizationData currOrg = new OrganizationData(); currOrg.org_id = rs.getString("org_idseq"); currOrg.organizationName = rs.getString("name"); currOrg.organizationRAI = rs.getString("rai"); return currOrg; } protected List getOrgContacts(String acIdSeq) { setQuerySql(acIdSeq); return this.execute(); } } }
/* Copyright (c) 2007, 2008, 2012 Paul Richards <paul.richards@gmail.com> Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ package fractals; import java.awt.Color; import java.awt.Cursor; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.Rectangle; import java.awt.event.WindowEvent; import java.awt.event.WindowListener; import java.awt.geom.AffineTransform; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.concurrent.Future; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import javax.swing.JComponent; public class CanvasView extends JComponent { private static final long serialVersionUID = 6622327481400970118L; private final CollectionOfTiles canvas; private final TileProvider<RenderableTile> source; /** Mutex for modifying the renderingTasks map. */ private final Object lockThing = new Object(); /** All tiles that have ever been submitted to the thread pool gizmo for rendering. Entries are not removed after the rendering has finished, and are instead only removed once the tile has been evicted from the cache. */ private final Map<TilePosition, Future> renderingTasks = new HashMap<TilePosition, Future>(); /** Periodic repaint task. */ private Future updateTask; private AffineTransform transform = new AffineTransform(); CanvasView(int width, int height, TileProvider<RenderableTile> source) { // Configure the canvas with 6 megapixels of cache this.canvas = new CollectionOfTiles((6 * 1000000) / (TilePosition.SIZE * TilePosition.SIZE)); this.source = source; this.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR)); CanvasViewInputHandler listener = new CanvasViewInputHandler(this); this.addMouseListener(listener); this.addMouseMotionListener(listener); this.addMouseWheelListener(listener); this.addKeyListener(listener); this.setFocusable(true); this.setDoubleBuffered(true); } private ScheduledFuture startUpdateTask() { final CanvasView self = this; Runnable r = new Runnable() { public void run() { if (canvas.wouldLookBetterWithAnotherBlit()) { self.repaint(); } } }; return Utilities.getLightThreadPool().scheduleWithFixedDelay(r, 500, 500, TimeUnit.MILLISECONDS); } private final class RenderTileRunner implements Runnable { private final TilePosition position; RenderTileRunner(TilePosition position) { this.position = position; } public void run() { RenderableTile t = source.getTile(position); TilePosition removed = canvas.addTile(t); if (removed != null) { synchronized(lockThing) { renderingTasks.remove(removed); } } } } public void zoomBy(int scales) { double scaleFactor = Math.pow(1.6, scales); AffineTransform zoomTransform = new AffineTransform(); zoomTransform.translate(400, 300); zoomTransform.scale(scaleFactor, scaleFactor); zoomTransform.translate(-400, -300); transform.preConcatenate(zoomTransform); repaint(); } public void moveBy(int dispX, int dispY) { AffineTransform translateTransform = new AffineTransform(); translateTransform.translate(dispX, dispY); transform.preConcatenate(translateTransform); repaint(); } public void rotateBy(double angleInRadians) { AffineTransform rotateTransform = new AffineTransform(); rotateTransform.translate(400, 300); rotateTransform.rotate(angleInRadians); rotateTransform.translate(-400, -300); transform.preConcatenate(rotateTransform); repaint(); } @Override public void paint(Graphics g) { paint((Graphics2D)g); } public void paint(Graphics2D g) { Rectangle bounds = g.getClipBounds(); g.setColor(Color.ORANGE); g.fillRect(bounds.x, bounds.y, bounds.width, bounds.height); g.transform(transform); final Set<TilePosition> neededTiles = canvas.blitImmediately(g); synchronized(lockThing) { for (Iterator<Map.Entry<TilePosition, Future> > i = renderingTasks.entrySet().iterator(); i.hasNext(); ) { final Map.Entry<TilePosition, Future> entry = i.next(); if (neededTiles.contains(entry.getKey())) { // A tile we have already queued for rendering has been requested again, // so don't requeue. neededTiles.remove(entry.getKey()); } else { if (entry.getValue().isDone() == false) { // A tile we have queued but hasn't yet rendered is no longer required. entry.getValue().cancel(true); i.remove(); } } } for (TilePosition pos: neededTiles) { renderingTasks.put(pos, Utilities.getHeavyThreadPool().submit(new RenderTileRunner(pos))); } } } synchronized void stopAllThreads() { updateTask.cancel(false); updateTask = null; for (Future f: renderingTasks.values()) { f.cancel(true); } renderingTasks.clear(); } synchronized void startAllThreads() { updateTask = startUpdateTask(); } /** This component has a number of background threads which it uses for rendering. These must be terminated when the component is no longer visible. The best way I can see to do this is to attach a WindowListener to the top level window. The user of this class is expected to call this method and attach the returned window listener to the top level window. */ WindowListener createWindowListenerForThreadManagement() { return new WindowListener(){ public void windowOpened(WindowEvent e) { startAllThreads(); } public void windowClosing(WindowEvent e) { } public void windowClosed(WindowEvent e) { stopAllThreads(); } public void windowIconified(WindowEvent e) { } public void windowDeiconified(WindowEvent e) { } public void windowActivated(WindowEvent e) { } public void windowDeactivated(WindowEvent e) { } }; } }
/* * $Header: /var/chroot/cvs/cvs/factsheetDesigner/extern/jakarta-slide-webdavclient-src-2.1-IPlus Edit/clientlib/src/java/org/apache/webdav/lib/Lock.java,v 1.1 2006-01-18 10:11:26 peter-cvs Exp $ * $Revision: 1.1 $ * $Date: 2006-01-18 10:11:26 $ * * ==================================================================== * * Copyright 1999-2002 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.webdav.lib; import org.apache.webdav.lib.methods.DepthSupport; /** * This class represents a lock on a resource. * * @version $Revision: 1.1 $ */ public class Lock { // -------------------------------------------------------------- Constants /** * The property name. */ public static final String TAG_NAME = "activelock"; /** * The write constant in the locktype. */ public static final int TYPE_WRITE = 0; /** * Type indicating lock is a transaction lock. */ public static final int TYPE_TRANSACTION = 1; /** * The exclusive constant in the lockscope. */ public static final int SCOPE_EXCLUSIVE = 0; /** * The shared constant in the lockscope. */ public static final int SCOPE_SHARED = 1; // ----------------------------------------------------------- Constructors /** * Default constructor for the lockentry. */ public Lock(int lockScope, int lockType) { this.lockScope = lockScope; this.lockType = lockType; } /** * Default constructor for the activelock. */ public Lock(int lockScope, int lockType, int depth, String owner, int timeout, String lockToken) { this.lockScope = lockScope; this.lockType = lockType; this.depth = depth; this.owner = owner; this.timeout = timeout; this.lockToken = lockToken; } public Lock(int lockScope, int lockType, int depth, String owner, int timeout, String lockToken, String principalUrl) { this.lockScope = lockScope; this.lockType = lockType; this.depth = depth; this.owner = owner; this.timeout = timeout; this.lockToken = lockToken; this.principalUrl = principalUrl; } /** * Default constructor for the activelock. * @deprecated The timeout value MUST NOT be greater than 2^32-1. */ public Lock(int lockScope, int lockType, int depth, String owner, long timeout, String lockToken) { this(lockScope, lockType, depth, owner, (int) timeout, lockToken); } // ------------------------------------------------------ Instance Variable protected int lockScope = -1; protected int lockType = -1; protected int depth = -1; protected String owner = null; protected int timeout = -1; protected String lockToken = null; protected String principalUrl = null; // --------------------------------------------------------- Public Methods /** * Get whether a lock is an exclusive lock, or a shared lock. * * @return The lock scope. If it's not set, it could be -1. */ public int getLockScope() { return lockScope; } /** * Get the access type of a lock. * * @return The lock type. If it's not set, it could be -1. */ public int getLockType() { return lockType; } /** * Get the value of the depth. * * @return The depth vlaue. If it's not set, it could be -1. */ public int getDepth() { return depth; } /** * Get information about the principal taking out a lock. * * @return The owner. */ public String getOwner() { return owner; } /** * Get the <code>principal-URL</code> property of the lock, if one. * @return an URL as String */ public String getPrincipalUrl() { return principalUrl; } /** * Get the timeout associated with a lock. * * @return The timeout vlaue. If it's not set, it could be -1. */ public int getTimeout() { return timeout; } /** * Get the access type of a lock. * * @return The lock token. */ public String getLockToken() { return lockToken; } public String toString() { StringBuffer tmp=new StringBuffer(); if (lockScope==Lock.SCOPE_EXCLUSIVE) { tmp.append("Exclusive"); } else if (lockScope==Lock.SCOPE_SHARED) { tmp.append("Shared"); } if (lockType==Lock.TYPE_WRITE) { tmp.append(" write lock"); } if (depth==DepthSupport.DEPTH_INFINITY) { tmp.append(" depth:infinity"); } else if (depth==-1) { // unknown } else { tmp.append(" depth:" + depth); } if (owner!=null) tmp.append(" owner:" + owner); if (timeout!=-1) tmp.append(" timeout:" + timeout); if (lockToken!=null) tmp.append(" token:" + lockToken); return tmp.toString(); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v9/resources/webpage_view.proto package com.google.ads.googleads.v9.resources; /** * <pre> * A webpage view. * </pre> * * Protobuf type {@code google.ads.googleads.v9.resources.WebpageView} */ public final class WebpageView extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v9.resources.WebpageView) WebpageViewOrBuilder { private static final long serialVersionUID = 0L; // Use WebpageView.newBuilder() to construct. private WebpageView(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private WebpageView() { resourceName_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new WebpageView(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private WebpageView( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); resourceName_ = s; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v9.resources.WebpageViewProto.internal_static_google_ads_googleads_v9_resources_WebpageView_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v9.resources.WebpageViewProto.internal_static_google_ads_googleads_v9_resources_WebpageView_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v9.resources.WebpageView.class, com.google.ads.googleads.v9.resources.WebpageView.Builder.class); } public static final int RESOURCE_NAME_FIELD_NUMBER = 1; private volatile java.lang.Object resourceName_; /** * <pre> * Output only. The resource name of the webpage view. * Webpage view resource names have the form: * `customers/{customer_id}/webpageViews/{ad_group_id}~{criterion_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ @java.lang.Override public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } } /** * <pre> * Output only. The resource name of the webpage view. * Webpage view resource names have the form: * `customers/{customer_id}/webpageViews/{ad_group_id}~{criterion_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ @java.lang.Override public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v9.resources.WebpageView)) { return super.equals(obj); } com.google.ads.googleads.v9.resources.WebpageView other = (com.google.ads.googleads.v9.resources.WebpageView) obj; if (!getResourceName() .equals(other.getResourceName())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER; hash = (53 * hash) + getResourceName().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v9.resources.WebpageView parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v9.resources.WebpageView parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v9.resources.WebpageView parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v9.resources.WebpageView parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v9.resources.WebpageView parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v9.resources.WebpageView parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v9.resources.WebpageView parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v9.resources.WebpageView parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v9.resources.WebpageView parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v9.resources.WebpageView parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v9.resources.WebpageView parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v9.resources.WebpageView parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v9.resources.WebpageView prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A webpage view. * </pre> * * Protobuf type {@code google.ads.googleads.v9.resources.WebpageView} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v9.resources.WebpageView) com.google.ads.googleads.v9.resources.WebpageViewOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v9.resources.WebpageViewProto.internal_static_google_ads_googleads_v9_resources_WebpageView_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v9.resources.WebpageViewProto.internal_static_google_ads_googleads_v9_resources_WebpageView_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v9.resources.WebpageView.class, com.google.ads.googleads.v9.resources.WebpageView.Builder.class); } // Construct using com.google.ads.googleads.v9.resources.WebpageView.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); resourceName_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v9.resources.WebpageViewProto.internal_static_google_ads_googleads_v9_resources_WebpageView_descriptor; } @java.lang.Override public com.google.ads.googleads.v9.resources.WebpageView getDefaultInstanceForType() { return com.google.ads.googleads.v9.resources.WebpageView.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v9.resources.WebpageView build() { com.google.ads.googleads.v9.resources.WebpageView result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v9.resources.WebpageView buildPartial() { com.google.ads.googleads.v9.resources.WebpageView result = new com.google.ads.googleads.v9.resources.WebpageView(this); result.resourceName_ = resourceName_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v9.resources.WebpageView) { return mergeFrom((com.google.ads.googleads.v9.resources.WebpageView)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v9.resources.WebpageView other) { if (other == com.google.ads.googleads.v9.resources.WebpageView.getDefaultInstance()) return this; if (!other.getResourceName().isEmpty()) { resourceName_ = other.resourceName_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.ads.googleads.v9.resources.WebpageView parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.ads.googleads.v9.resources.WebpageView) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object resourceName_ = ""; /** * <pre> * Output only. The resource name of the webpage view. * Webpage view resource names have the form: * `customers/{customer_id}/webpageViews/{ad_group_id}~{criterion_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Output only. The resource name of the webpage view. * Webpage view resource names have the form: * `customers/{customer_id}/webpageViews/{ad_group_id}~{criterion_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Output only. The resource name of the webpage view. * Webpage view resource names have the form: * `customers/{customer_id}/webpageViews/{ad_group_id}~{criterion_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @param value The resourceName to set. * @return This builder for chaining. */ public Builder setResourceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } resourceName_ = value; onChanged(); return this; } /** * <pre> * Output only. The resource name of the webpage view. * Webpage view resource names have the form: * `customers/{customer_id}/webpageViews/{ad_group_id}~{criterion_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearResourceName() { resourceName_ = getDefaultInstance().getResourceName(); onChanged(); return this; } /** * <pre> * Output only. The resource name of the webpage view. * Webpage view resource names have the form: * `customers/{customer_id}/webpageViews/{ad_group_id}~{criterion_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for resourceName to set. * @return This builder for chaining. */ public Builder setResourceNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resourceName_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v9.resources.WebpageView) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v9.resources.WebpageView) private static final com.google.ads.googleads.v9.resources.WebpageView DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v9.resources.WebpageView(); } public static com.google.ads.googleads.v9.resources.WebpageView getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<WebpageView> PARSER = new com.google.protobuf.AbstractParser<WebpageView>() { @java.lang.Override public WebpageView parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new WebpageView(input, extensionRegistry); } }; public static com.google.protobuf.Parser<WebpageView> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<WebpageView> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v9.resources.WebpageView getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright 1999-2010 Luca Garulli (l.garulli--at--orientechnologies.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.orientechnologies.orient.test.database.auto; import java.util.List; import java.util.Map; import org.testng.Assert; import org.testng.annotations.Parameters; import org.testng.annotations.Test; import com.orientechnologies.orient.core.db.document.ODatabaseDocument; import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; import com.orientechnologies.orient.core.db.record.OIdentifiable; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.sql.OCommandSQL; import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery; @Test(groups = "sql-update", sequential = true) public class SQLUpdateTest { private ODatabaseDocument database; private int updatedRecords; @Parameters(value = "url") public SQLUpdateTest(String iURL) { database = new ODatabaseDocumentTx(iURL); } @Test public void updateWithWhereOperator() { database.open("admin", "admin"); Integer records = (Integer) database.command( new OCommandSQL("update Profile set salary = 120.30, location = 3:2, salary_cloned = salary where surname = 'Obama'")) .execute(); Assert.assertEquals(records.intValue(), 3); database.close(); } @Test public void updateWithWhereRid() { database.open("admin", "admin"); List<ODocument> result = database.command(new OCommandSQL("select @rid as rid from Profile where surname = 'Obama'")).execute(); Assert.assertEquals(result.size(), 3); Integer records = (Integer) database.command(new OCommandSQL("update Profile set salary = 133.00 where @rid = ?")).execute( result.get(0).field("rid")); Assert.assertEquals(records.intValue(), 1); database.close(); } @Test(dependsOnMethods = "updateWithWhereOperator") public void updateCollectionsAddWithWhereOperator() { database.open("admin", "admin"); updatedRecords = (Integer) database.command(new OCommandSQL("update Account add addresses = #12:0")).execute(); database.close(); } @Test(dependsOnMethods = "updateCollectionsAddWithWhereOperator") public void updateCollectionsRemoveWithWhereOperator() { database.open("admin", "admin"); final int records = (Integer) database.command(new OCommandSQL("update Account remove addresses = #12:0")).execute(); Assert.assertEquals(records, updatedRecords); database.close(); } @Test(dependsOnMethods = "updateCollectionsRemoveWithWhereOperator") public void updateCollectionsWithSetOperator() { database.open("admin", "admin"); List<ODocument> docs = database.query(new OSQLSynchQuery<ODocument>("select from Account")); for (ODocument doc : docs) { final int records = (Integer) database.command( new OCommandSQL("update Account set addresses = [#12:0, #12:1,#12:2] where @rid = " + doc.getIdentity())).execute(); Assert.assertEquals(records, 1); ODocument loadedDoc = database.load(doc.getIdentity(), "*:-1", true); Assert.assertEquals(((List<?>) loadedDoc.field("addresses")).size(), 3); Assert.assertEquals(((OIdentifiable) ((List<?>) loadedDoc.field("addresses")).get(0)).getIdentity().toString(), "#12:0"); loadedDoc.field("addresses", doc.field("addresses")); database.save(loadedDoc); } database.close(); } @Test(dependsOnMethods = "updateCollectionsRemoveWithWhereOperator") public void updateMapsWithSetOperator() { database.open("admin", "admin"); ODocument doc = (ODocument) database .command( new OCommandSQL( "insert into cluster:default (equaledges, name, properties) values ('no', 'circleUpdate', {'round':'eeee', 'blaaa':'zigzag'} )")) .execute(); Integer records = (Integer) database.command( new OCommandSQL("update " + doc.getIdentity() + " set properties = {'roundOne':'ffff', 'bla':'zagzig','testTestTEST':'okOkOK'}")).execute(); Assert.assertEquals(records.intValue(), 1); ODocument loadedDoc = database.load(doc.getIdentity(), "*:-1", true); Assert.assertTrue(loadedDoc.field("properties") instanceof Map); @SuppressWarnings("unchecked") Map<Object, Object> entries = ((Map<Object, Object>) loadedDoc.field("properties")); Assert.assertEquals(entries.size(), 3); Assert.assertNull(entries.get("round")); Assert.assertNull(entries.get("blaaa")); Assert.assertEquals(entries.get("roundOne"), "ffff"); Assert.assertEquals(entries.get("bla"), "zagzig"); Assert.assertEquals(entries.get("testTestTEST"), "okOkOK"); database.close(); } @Test(dependsOnMethods = "updateCollectionsRemoveWithWhereOperator") public void updateAllOperator() { database.open("admin", "admin"); Long total = database.countClass("Profile"); Integer records = (Integer) database.command(new OCommandSQL("update Profile set sex = 'male'")).execute(); Assert.assertEquals(records.intValue(), total.intValue()); database.close(); } @Test public void updateWithWildcards() { database.open("admin", "admin"); int updated = (Integer) database.command(new OCommandSQL("update Profile set sex = ? where sex = 'male' limit 1")).execute( "male"); Assert.assertEquals(updated, 1); database.close(); } @Test public void updateWithWildcardsOnSetAndWhere() { database.open("admin", "admin"); ODocument doc = new ODocument("Person"); doc.field("name", "Raf"); doc.field("city", "Torino"); doc.field("gender", "fmale"); doc.save(); checkUpdatedDoc(database, "Raf", "Torino", "fmale"); /* THESE COMMANDS ARE OK */ OCommandSQL updatecommand = new OCommandSQL("update Person set gender = 'female' where name = 'Raf'"); database.command(updatecommand).execute("Raf"); checkUpdatedDoc(database, "Raf", "Torino", "female"); updatecommand = new OCommandSQL("update Person set city = 'Turin' where name = ?"); database.command(updatecommand).execute("Raf"); checkUpdatedDoc(database, "Raf", "Turin", "female"); updatecommand = new OCommandSQL("update Person set gender = ? where name = 'Raf'"); database.command(updatecommand).execute("F"); checkUpdatedDoc(database, "Raf", "Turin", "F"); updatecommand = new OCommandSQL("update Person set gender = ?, city = ? where name = 'Raf'"); database.command(updatecommand).execute("FEMALE", "TORINO"); checkUpdatedDoc(database, "Raf", "TORINO", "FEMALE"); updatecommand = new OCommandSQL("update Person set gender = ? where name = ?"); database.command(updatecommand).execute("f", "Raf"); checkUpdatedDoc(database, "Raf", "TORINO", "f"); database.close(); } public void updateIncrement() { database.open("admin", "admin"); List<ODocument> result1 = database.command(new OCommandSQL("select salary from Account where salary is defined")).execute(); Assert.assertFalse(result1.isEmpty()); updatedRecords = (Integer) database.command(new OCommandSQL("update Account increment salary = 10 where salary is defined")).execute(); Assert.assertTrue(updatedRecords > 0); List<ODocument> result2 = database.command(new OCommandSQL("select salary from Account where salary is defined")).execute(); Assert.assertFalse(result2.isEmpty()); Assert.assertEquals(result2.size(), result1.size()); for (int i = 0; i < result1.size(); ++i) { float salary1 = (Float) result1.get(i).field("salary"); float salary2 = (Float) result2.get(i).field("salary"); Assert.assertEquals(salary2, salary1 + 10); } updatedRecords = (Integer) database.command(new OCommandSQL("update Account increment salary = -10 where salary is defined")).execute(); Assert.assertTrue(updatedRecords > 0); List<ODocument> result3 = database.command(new OCommandSQL("select salary from Account where salary is defined")).execute(); Assert.assertFalse(result3.isEmpty()); Assert.assertEquals(result3.size(), result1.size()); for (int i = 0; i < result1.size(); ++i) { float salary1 = (Float) result1.get(i).field("salary"); float salary3 = (Float) result3.get(i).field("salary"); Assert.assertEquals(salary3, salary1); } database.close(); } private void checkUpdatedDoc(ODatabaseDocument database, String expectedName, String expectedCity, String expectedGender) { List<ODocument> result = database.query(new OSQLSynchQuery<Object>("select * from person")); ODocument oDoc = result.get(0); Assert.assertEquals(expectedName, oDoc.field("name")); Assert.assertEquals(expectedCity, oDoc.field("city")); Assert.assertEquals(expectedGender, oDoc.field("gender")); } }
/* * This file is part of SQL Workbench/J, http://www.sql-workbench.net * * Copyright 2002-2015 Thomas Kellerer. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * To contact the author please send an email to: support@sql-workbench.net */ package workbench.sql.parser; import java.io.IOException; import workbench.sql.ScriptCommandDefinition; import org.junit.Test; import static org.junit.Assert.*; /** * * @author Thomas Kellerer */ public class ScriptIteratorTest { public ScriptIteratorTest() { } @Test public void testMixedEmptyLinesWithTerminator() throws Exception { for (ParserType type : ParserType.values()) { doTestMixedEmptyLinesWithTerminator(new LexerBasedParser(type)); } } private void doTestMixedEmptyLinesWithTerminator(ScriptIterator parser) throws Exception { String sql = "select * from foo;\n\n" + "select * from bar;\n"; parser.setEmptyLineIsDelimiter(true); parser.setScript(sql); parser.setStoreStatementText(true); ScriptCommandDefinition cmd = parser.getNextCommand(); assertNotNull(cmd); assertEquals("select * from foo", cmd.getSQL()); cmd = parser.getNextCommand(); assertNotNull(cmd); assertEquals("select * from bar", cmd.getSQL()); sql = "select * from foo;\n" + "select * from bar;\n" + "select * from foobar;\n" + "\n" + "select * from foo;"; parser.setScript(sql); parser.setStoreStatementText(true); cmd = parser.getNextCommand(); assertNotNull(cmd); assertEquals("select * from foo", cmd.getSQL()); cmd = parser.getNextCommand(); assertNotNull(cmd); assertEquals("select * from bar", cmd.getSQL()); cmd = parser.getNextCommand(); assertNotNull(cmd); assertEquals("select * from foobar", cmd.getSQL()); cmd = parser.getNextCommand(); assertNotNull(cmd); assertEquals("select * from foo", cmd.getSQL()); cmd = parser.getNextCommand(); assertNull(cmd); } @Test public void testEmptyLineDelimiter() throws Exception { for (ParserType type : ParserType.values()) { doTestEmptyLineDelimiter(new LexerBasedParser(type)); } } private void doTestEmptyLineDelimiter(final ScriptIterator parser) throws Exception { String sql = "select * from test\n\n" + "select * from person\n"; parser.setScript(sql); parser.setEmptyLineIsDelimiter(true); parser.setStoreStatementText(true); ScriptCommandDefinition cmd = parser.getNextCommand(); assertNotNull(cmd); assertEquals("select * from test", cmd.getSQL().trim()); cmd = parser.getNextCommand(); assertNotNull(cmd); assertEquals("select * from person", cmd.getSQL().trim()); sql = "select a,b,c\r\nfrom test\r\nwhere x = 1"; parser.setScript(sql); parser.setEmptyLineIsDelimiter(true); parser.setStoreStatementText(true); cmd = parser.getNextCommand(); assertNotNull(cmd); assertEquals("select a,b,c\r\nfrom test\r\nwhere x = 1", cmd.getSQL()); sql = "select *\nfrom foo\n\nselect * from bar"; parser.setScript(sql); parser.setStoreStatementText(true); cmd = parser.getNextCommand(); assertNotNull(cmd); assertEquals("select *\nfrom foo", cmd.getSQL()); cmd = parser.getNextCommand(); assertNotNull(cmd); assertEquals("select * from bar", cmd.getSQL()); cmd = parser.getNextCommand(); assertNull(cmd); } @Test public void testQuotedDelimiter() throws Exception { for (ParserType type : ParserType.values()) { doTestQuotedDelimiter(new LexerBasedParser(type)); } } private void doTestQuotedDelimiter(ScriptIterator parser) throws Exception { String sql = "select 'test\n;lines' from test;"; parser.setScript(sql); parser.setStoreStatementText(true); ScriptCommandDefinition cmd = parser.getNextCommand(); assertNotNull(cmd); assertEquals("select 'test\n;lines' from test", cmd.getSQL()); cmd = parser.getNextCommand(); assertNull(cmd); } @Test public void testWhiteSpaceAtEnd() throws Exception { for (ParserType type : ParserType.values()) { doTestWhiteSpaceAtEnd(new LexerBasedParser(type)); } } public void doTestWhiteSpaceAtEnd(ScriptIterator parser) throws IOException { String sql = "create table target_table (id integer);\n" + "wbcopy \n"; parser.setScript(sql); parser.setCheckEscapedQuotes(false); parser.setEmptyLineIsDelimiter(false); parser.setStoreStatementText(false); ScriptCommandDefinition cmd = parser.getNextCommand(); assertNotNull(cmd); assertNull(cmd.getSQL()); cmd = parser.getNextCommand(); assertNotNull(cmd); assertNull(cmd.getSQL()); assertEquals(sql.length(), cmd.getEndPositionInScript()); } @Test public void testEscapedQuotes() { for (ParserType type : ParserType.values()) { doTestEscapedQuotes(new LexerBasedParser(type)); } } public void doTestEscapedQuotes(ScriptIterator parser) { parser.setCheckEscapedQuotes(true); parser.setScript( "insert into foo (data) values ('foo\\'s data1');\n" + "insert into foo (data) values ('foo\\'s data2');" + "commit;\n"); parser.setStoreStatementText(true); ScriptCommandDefinition c = parser.getNextCommand(); assertNotNull(c); assertTrue(c.getSQL().startsWith("insert")); c = parser.getNextCommand(); assertNotNull(c); assertTrue(c.getSQL().startsWith("insert")); c = parser.getNextCommand(); assertNotNull(c); assertEquals("commit", c.getSQL()); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.script; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.watcher.ResourceWatcherService; import org.junit.Before; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.sameInstance; //TODO: this needs to be a base test class, and all scripting engines extend it public class ScriptServiceTests extends ESTestCase { private ResourceWatcherService resourceWatcherService; private ScriptEngineService scriptEngineService; private ScriptEngineService dangerousScriptEngineService; private Map<String, ScriptEngineService> scriptEnginesByLangMap; private ScriptEngineRegistry scriptEngineRegistry; private ScriptContextRegistry scriptContextRegistry; private ScriptSettings scriptSettings; private ScriptContext[] scriptContexts; private ScriptService scriptService; private Path scriptsFilePath; private Settings baseSettings; private static final Map<ScriptType, Boolean> DEFAULT_SCRIPT_ENABLED = new HashMap<>(); static { DEFAULT_SCRIPT_ENABLED.put(ScriptType.FILE, true); DEFAULT_SCRIPT_ENABLED.put(ScriptType.STORED, false); DEFAULT_SCRIPT_ENABLED.put(ScriptType.INLINE, false); } @Before public void setup() throws IOException { Path genericConfigFolder = createTempDir(); baseSettings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(Environment.PATH_CONF_SETTING.getKey(), genericConfigFolder) .put(ScriptService.SCRIPT_MAX_COMPILATIONS_PER_MINUTE.getKey(), 10000) .build(); resourceWatcherService = new ResourceWatcherService(baseSettings, null); scriptEngineService = new TestEngineService(); dangerousScriptEngineService = new TestDangerousEngineService(); TestEngineService defaultScriptServiceEngine = new TestEngineService(Script.DEFAULT_SCRIPT_LANG) {}; scriptEnginesByLangMap = ScriptModesTests.buildScriptEnginesByLangMap( new HashSet<>(Arrays.asList(scriptEngineService, defaultScriptServiceEngine))); //randomly register custom script contexts int randomInt = randomIntBetween(0, 3); //prevent duplicates using map Map<String, ScriptContext.Plugin> contexts = new HashMap<>(); for (int i = 0; i < randomInt; i++) { String plugin; do { plugin = randomAlphaOfLength(randomIntBetween(1, 10)); } while (ScriptContextRegistry.RESERVED_SCRIPT_CONTEXTS.contains(plugin)); String operation; do { operation = randomAlphaOfLength(randomIntBetween(1, 30)); } while (ScriptContextRegistry.RESERVED_SCRIPT_CONTEXTS.contains(operation)); String context = plugin + "_" + operation; contexts.put(context, new ScriptContext.Plugin(plugin, operation)); } scriptEngineRegistry = new ScriptEngineRegistry(Arrays.asList(scriptEngineService, dangerousScriptEngineService, defaultScriptServiceEngine)); scriptContextRegistry = new ScriptContextRegistry(contexts.values()); scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry); scriptContexts = scriptContextRegistry.scriptContexts().toArray(new ScriptContext[scriptContextRegistry.scriptContexts().size()]); logger.info("--> setup script service"); scriptsFilePath = genericConfigFolder.resolve("scripts"); Files.createDirectories(scriptsFilePath); } private void buildScriptService(Settings additionalSettings) throws IOException { Settings finalSettings = Settings.builder().put(baseSettings).put(additionalSettings).build(); Environment environment = new Environment(finalSettings); // TODO: scriptService = new ScriptService(finalSettings, environment, resourceWatcherService, scriptEngineRegistry, scriptContextRegistry, scriptSettings) { @Override StoredScriptSource getScriptFromClusterState(String id, String lang) { //mock the script that gets retrieved from an index return new StoredScriptSource(lang, "100", Collections.emptyMap()); } }; } public void testCompilationCircuitBreaking() throws Exception { buildScriptService(Settings.EMPTY); scriptService.setMaxCompilationsPerMinute(1); scriptService.checkCompilationLimit(); // should pass expectThrows(CircuitBreakingException.class, () -> scriptService.checkCompilationLimit()); scriptService.setMaxCompilationsPerMinute(2); scriptService.checkCompilationLimit(); // should pass scriptService.checkCompilationLimit(); // should pass expectThrows(CircuitBreakingException.class, () -> scriptService.checkCompilationLimit()); int count = randomIntBetween(5, 50); scriptService.setMaxCompilationsPerMinute(count); for (int i = 0; i < count; i++) { scriptService.checkCompilationLimit(); // should pass } expectThrows(CircuitBreakingException.class, () -> scriptService.checkCompilationLimit()); scriptService.setMaxCompilationsPerMinute(0); expectThrows(CircuitBreakingException.class, () -> scriptService.checkCompilationLimit()); scriptService.setMaxCompilationsPerMinute(Integer.MAX_VALUE); int largeLimit = randomIntBetween(1000, 10000); for (int i = 0; i < largeLimit; i++) { scriptService.checkCompilationLimit(); } } public void testNotSupportedDisableDynamicSetting() throws IOException { try { buildScriptService(Settings.builder().put(ScriptService.DISABLE_DYNAMIC_SCRIPTING_SETTING, randomUnicodeOfLength(randomIntBetween(1, 10))).build()); fail("script service should have thrown exception due to non supported script.disable_dynamic setting"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), containsString(ScriptService.DISABLE_DYNAMIC_SCRIPTING_SETTING + " is not a supported setting, replace with fine-grained script settings")); } } public void testScriptsWithoutExtensions() throws IOException { buildScriptService(Settings.EMPTY); Path testFileNoExt = scriptsFilePath.resolve("test_no_ext"); Path testFileWithExt = scriptsFilePath.resolve("test_script.test"); Streams.copy("test_file_no_ext".getBytes("UTF-8"), Files.newOutputStream(testFileNoExt)); Streams.copy("test_file".getBytes("UTF-8"), Files.newOutputStream(testFileWithExt)); resourceWatcherService.notifyNow(); CompiledScript compiledScript = scriptService.compile(new Script(ScriptType.FILE, "test", "test_script", Collections.emptyMap()), ScriptContext.Standard.SEARCH); assertThat(compiledScript.compiled(), equalTo((Object) "compiled_test_file")); Files.delete(testFileNoExt); Files.delete(testFileWithExt); resourceWatcherService.notifyNow(); try { scriptService.compile(new Script(ScriptType.FILE, "test", "test_script", Collections.emptyMap()), ScriptContext.Standard.SEARCH); fail("the script test_script should no longer exist"); } catch (IllegalArgumentException ex) { assertThat(ex.getMessage(), containsString("unable to find file script [test_script] using lang [test]")); } } public void testScriptCompiledOnceHiddenFileDetected() throws IOException { buildScriptService(Settings.EMPTY); Path testHiddenFile = scriptsFilePath.resolve(".hidden_file"); Streams.copy("test_hidden_file".getBytes("UTF-8"), Files.newOutputStream(testHiddenFile)); Path testFileScript = scriptsFilePath.resolve("file_script.test"); Streams.copy("test_file_script".getBytes("UTF-8"), Files.newOutputStream(testFileScript)); resourceWatcherService.notifyNow(); CompiledScript compiledScript = scriptService.compile(new Script(ScriptType.FILE, "test", "file_script", Collections.emptyMap()), ScriptContext.Standard.SEARCH); assertThat(compiledScript.compiled(), equalTo((Object) "compiled_test_file_script")); Files.delete(testHiddenFile); Files.delete(testFileScript); resourceWatcherService.notifyNow(); } public void testInlineScriptCompiledOnceCache() throws IOException { buildScriptService(Settings.EMPTY); CompiledScript compiledScript1 = scriptService.compile(new Script(ScriptType.INLINE, "test", "1+1", Collections.emptyMap()), randomFrom(scriptContexts)); CompiledScript compiledScript2 = scriptService.compile(new Script(ScriptType.INLINE, "test", "1+1", Collections.emptyMap()), randomFrom(scriptContexts)); assertThat(compiledScript1.compiled(), sameInstance(compiledScript2.compiled())); } public void testDefaultBehaviourFineGrainedSettings() throws IOException { Settings.Builder builder = Settings.builder(); //rarely inject the default settings, which have no effect if (rarely()) { builder.put("script.file", "true"); } buildScriptService(builder.build()); createFileScripts("mustache", "dtest"); for (ScriptContext scriptContext : scriptContexts) { // only file scripts are accepted by default assertCompileRejected("dtest", "script", ScriptType.INLINE, scriptContext); assertCompileRejected("dtest", "script", ScriptType.STORED, scriptContext); assertCompileAccepted("dtest", "file_script", ScriptType.FILE, scriptContext); } } public void testFineGrainedSettings() throws IOException { //collect the fine-grained settings to set for this run int numScriptSettings = randomIntBetween(0, ScriptType.values().length); Map<ScriptType, Boolean> scriptSourceSettings = new HashMap<>(); for (int i = 0; i < numScriptSettings; i++) { ScriptType scriptType; do { scriptType = randomFrom(ScriptType.values()); } while (scriptSourceSettings.containsKey(scriptType)); scriptSourceSettings.put(scriptType, randomBoolean()); } int numScriptContextSettings = randomIntBetween(0, this.scriptContextRegistry.scriptContexts().size()); Map<ScriptContext, Boolean> scriptContextSettings = new HashMap<>(); for (int i = 0; i < numScriptContextSettings; i++) { ScriptContext scriptContext; do { scriptContext = randomFrom(this.scriptContexts); } while (scriptContextSettings.containsKey(scriptContext)); scriptContextSettings.put(scriptContext, randomBoolean()); } int numEngineSettings = randomIntBetween(0, ScriptType.values().length * scriptContexts.length); Map<String, Boolean> engineSettings = new HashMap<>(); for (int i = 0; i < numEngineSettings; i++) { String settingKey; do { ScriptType scriptType = randomFrom(ScriptType.values()); ScriptContext scriptContext = randomFrom(this.scriptContexts); settingKey = scriptEngineService.getType() + "." + scriptType + "." + scriptContext.getKey(); } while (engineSettings.containsKey(settingKey)); engineSettings.put(settingKey, randomBoolean()); } //set the selected fine-grained settings Settings.Builder builder = Settings.builder(); for (Map.Entry<ScriptType, Boolean> entry : scriptSourceSettings.entrySet()) { if (entry.getValue()) { builder.put("script" + "." + entry.getKey().getName(), "true"); } else { builder.put("script" + "." + entry.getKey().getName(), "false"); } } for (Map.Entry<ScriptContext, Boolean> entry : scriptContextSettings.entrySet()) { if (entry.getValue()) { builder.put("script" + "." + entry.getKey().getKey(), "true"); } else { builder.put("script" + "." + entry.getKey().getKey(), "false"); } } for (Map.Entry<String, Boolean> entry : engineSettings.entrySet()) { int delimiter = entry.getKey().indexOf('.'); String part1 = entry.getKey().substring(0, delimiter); String part2 = entry.getKey().substring(delimiter + 1); String lang = randomFrom(scriptEnginesByLangMap.get(part1).getType()); if (entry.getValue()) { builder.put("script.engine" + "." + lang + "." + part2, "true"); } else { builder.put("script.engine" + "." + lang + "." + part2, "false"); } } buildScriptService(builder.build()); createFileScripts("expression", "mustache", "dtest"); for (ScriptType scriptType : ScriptType.values()) { //make sure file scripts have a different name than inline ones. //Otherwise they are always considered file ones as they can be found in the static cache. String script = scriptType == ScriptType.FILE ? "file_script" : "script"; for (ScriptContext scriptContext : this.scriptContexts) { //fallback mechanism: 1) engine specific settings 2) op based settings 3) source based settings Boolean scriptEnabled = engineSettings.get(dangerousScriptEngineService.getType() + "." + scriptType + "." + scriptContext.getKey()); if (scriptEnabled == null) { scriptEnabled = scriptContextSettings.get(scriptContext); } if (scriptEnabled == null) { scriptEnabled = scriptSourceSettings.get(scriptType); } if (scriptEnabled == null) { scriptEnabled = DEFAULT_SCRIPT_ENABLED.get(scriptType); } String lang = dangerousScriptEngineService.getType(); if (scriptEnabled) { assertCompileAccepted(lang, script, scriptType, scriptContext); } else { assertCompileRejected(lang, script, scriptType, scriptContext); } } } } public void testCompileNonRegisteredContext() throws IOException { buildScriptService(Settings.EMPTY); String pluginName; String unknownContext; do { pluginName = randomAlphaOfLength(randomIntBetween(1, 10)); unknownContext = randomAlphaOfLength(randomIntBetween(1, 30)); } while(scriptContextRegistry.isSupportedContext(new ScriptContext.Plugin(pluginName, unknownContext))); String type = scriptEngineService.getType(); try { scriptService.compile(new Script(randomFrom(ScriptType.values()), type, "test", Collections.emptyMap()), new ScriptContext.Plugin(pluginName, unknownContext)); fail("script compilation should have been rejected"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), containsString("script context [" + pluginName + "_" + unknownContext + "] not supported")); } } public void testCompileCountedInCompilationStats() throws IOException { buildScriptService(Settings.EMPTY); scriptService.compile(new Script(ScriptType.INLINE, "test", "1+1", Collections.emptyMap()), randomFrom(scriptContexts)); assertEquals(1L, scriptService.stats().getCompilations()); } public void testExecutableCountedInCompilationStats() throws IOException { buildScriptService(Settings.EMPTY); Script script = new Script(ScriptType.INLINE, "test", "1+1", Collections.emptyMap()); CompiledScript compiledScript = scriptService.compile(script, randomFrom(scriptContexts)); scriptService.executable(compiledScript, script.getParams()); assertEquals(1L, scriptService.stats().getCompilations()); } public void testSearchCountedInCompilationStats() throws IOException { buildScriptService(Settings.EMPTY); scriptService.search(null, new Script(ScriptType.INLINE, "test", "1+1", Collections.emptyMap()), randomFrom(scriptContexts)); assertEquals(1L, scriptService.stats().getCompilations()); } public void testMultipleCompilationsCountedInCompilationStats() throws IOException { buildScriptService(Settings.EMPTY); int numberOfCompilations = randomIntBetween(1, 1024); for (int i = 0; i < numberOfCompilations; i++) { scriptService .compile(new Script(ScriptType.INLINE, "test", i + " + " + i, Collections.emptyMap()), randomFrom(scriptContexts)); } assertEquals(numberOfCompilations, scriptService.stats().getCompilations()); } public void testCompilationStatsOnCacheHit() throws IOException { Settings.Builder builder = Settings.builder(); builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getKey(), 1); builder.put("script.inline", "true"); buildScriptService(builder.build()); Script script = new Script(ScriptType.INLINE, "test", "1+1", Collections.emptyMap()); scriptService.compile(script, randomFrom(scriptContexts)); scriptService.compile(script, randomFrom(scriptContexts)); assertEquals(1L, scriptService.stats().getCompilations()); } public void testFileScriptCountedInCompilationStats() throws IOException { buildScriptService(Settings.EMPTY); createFileScripts("test"); scriptService.compile(new Script(ScriptType.FILE, "test", "file_script", Collections.emptyMap()), randomFrom(scriptContexts)); assertEquals(1L, scriptService.stats().getCompilations()); } public void testIndexedScriptCountedInCompilationStats() throws IOException { buildScriptService(Settings.EMPTY); scriptService.compile(new Script(ScriptType.STORED, "test", "script", Collections.emptyMap()), randomFrom(scriptContexts)); assertEquals(1L, scriptService.stats().getCompilations()); } public void testCacheEvictionCountedInCacheEvictionsStats() throws IOException { Settings.Builder builder = Settings.builder(); builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getKey(), 1); builder.put("script.inline", "true"); buildScriptService(builder.build()); scriptService.compile(new Script(ScriptType.INLINE, "test", "1+1", Collections.emptyMap()), randomFrom(scriptContexts)); scriptService.compile(new Script(ScriptType.INLINE, "test", "2+2", Collections.emptyMap()), randomFrom(scriptContexts)); assertEquals(2L, scriptService.stats().getCompilations()); assertEquals(1L, scriptService.stats().getCacheEvictions()); } public void testDefaultLanguage() throws IOException { Settings.Builder builder = Settings.builder(); builder.put("script.inline", "true"); buildScriptService(builder.build()); CompiledScript script = scriptService.compile( new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "1 + 1", Collections.emptyMap()), randomFrom(scriptContexts)); assertEquals(script.lang(), Script.DEFAULT_SCRIPT_LANG); } public void testStoreScript() throws Exception { BytesReference script = XContentFactory.jsonBuilder().startObject() .field("script", "abc") .endObject().bytes(); ScriptMetaData scriptMetaData = ScriptMetaData.putStoredScript(null, "_id", StoredScriptSource.parse("_lang", script, XContentType.JSON)); assertNotNull(scriptMetaData); assertEquals("abc", scriptMetaData.getStoredScript("_id", "_lang").getCode()); } public void testDeleteScript() throws Exception { ScriptMetaData scriptMetaData = ScriptMetaData.putStoredScript(null, "_id", StoredScriptSource.parse("_lang", new BytesArray("{\"script\":\"abc\"}"), XContentType.JSON)); scriptMetaData = ScriptMetaData.deleteStoredScript(scriptMetaData, "_id", "_lang"); assertNotNull(scriptMetaData); assertNull(scriptMetaData.getStoredScript("_id", "_lang")); ScriptMetaData errorMetaData = scriptMetaData; ResourceNotFoundException e = expectThrows(ResourceNotFoundException.class, () -> { ScriptMetaData.deleteStoredScript(errorMetaData, "_id", "_lang"); }); assertEquals("stored script [_id] using lang [_lang] does not exist and cannot be deleted", e.getMessage()); } public void testGetStoredScript() throws Exception { buildScriptService(Settings.EMPTY); ClusterState cs = ClusterState.builder(new ClusterName("_name")) .metaData(MetaData.builder() .putCustom(ScriptMetaData.TYPE, new ScriptMetaData.Builder(null).storeScript("_id", StoredScriptSource.parse("_lang", new BytesArray("{\"script\":\"abc\"}"), XContentType.JSON)).build())) .build(); assertEquals("abc", scriptService.getStoredScript(cs, new GetStoredScriptRequest("_id", "_lang")).getCode()); assertNull(scriptService.getStoredScript(cs, new GetStoredScriptRequest("_id2", "_lang"))); cs = ClusterState.builder(new ClusterName("_name")).build(); assertNull(scriptService.getStoredScript(cs, new GetStoredScriptRequest("_id", "_lang"))); } private void createFileScripts(String... langs) throws IOException { for (String lang : langs) { Path scriptPath = scriptsFilePath.resolve("file_script." + lang); Streams.copy("10".getBytes("UTF-8"), Files.newOutputStream(scriptPath)); } resourceWatcherService.notifyNow(); } private void assertCompileRejected(String lang, String script, ScriptType scriptType, ScriptContext scriptContext) { try { scriptService.compile(new Script(scriptType, lang, script, Collections.emptyMap()), scriptContext); fail("compile should have been rejected for lang [" + lang + "], script_type [" + scriptType + "], scripted_op [" + scriptContext + "]"); } catch(IllegalStateException e) { //all good } } private void assertCompileAccepted(String lang, String script, ScriptType scriptType, ScriptContext scriptContext) { assertThat( scriptService.compile(new Script(scriptType, lang, script, Collections.emptyMap()), scriptContext), notNullValue() ); } public static class TestEngineService implements ScriptEngineService { public static final String NAME = "test"; private final String name; public TestEngineService() { this(NAME); } public TestEngineService(String name) { this.name = name; } @Override public String getType() { return name; } @Override public String getExtension() { return name; } @Override public Object compile(String scriptName, String scriptText, Map<String, String> params) { return "compiled_" + scriptText; } @Override public ExecutableScript executable(final CompiledScript compiledScript, @Nullable Map<String, Object> vars) { return null; } @Override public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, @Nullable Map<String, Object> vars) { return null; } @Override public void close() { } @Override public boolean isInlineScriptEnabled() { return true; } } public static class TestDangerousEngineService implements ScriptEngineService { public static final String NAME = "dtest"; @Override public String getType() { return NAME; } @Override public String getExtension() { return NAME; } @Override public Object compile(String scriptName, String scriptSource, Map<String, String> params) { return "compiled_" + scriptSource; } @Override public ExecutableScript executable(final CompiledScript compiledScript, @Nullable Map<String, Object> vars) { return null; } @Override public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, @Nullable Map<String, Object> vars) { return null; } @Override public void close() { } } }
package org.json; /* Copyright (c) 2002 JSON.org Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. The Software shall be used for Good, not Evil. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ import java.util.Map; /** * The XMLTokener extends the JSONTokener to provide additional methods * for the parsing of XML texts. * @author JSON.org * @version 2010-12-24 */ public class XMLTokener extends JSONTokener { /** The table of entity values. It initially contains Character values for * amp, apos, gt, lt, quot. */ public static final Map entity; static { entity = new java.util.HashMap(8); entity.put("amp", XML.AMP); entity.put("apos", XML.APOS); entity.put("gt", XML.GT); entity.put("lt", XML.LT); entity.put("quot", XML.QUOT); } /** * Construct an XMLTokener from a string. * @param s A source string. */ public XMLTokener(String s) { super(s); } /** * Get the text in the CDATA block. * @return The string up to the <code>]]&gt;</code>. * @throws JSONException If the <code>]]&gt;</code> is not found. */ public String nextCDATA() throws JSONException { char c; int i; StringBuffer sb = new StringBuffer(); for (;;) { c = next(); if (end()) { throw syntaxError("Unclosed CDATA"); } sb.append(c); i = sb.length() - 3; if (i >= 0 && sb.charAt(i) == ']' && sb.charAt(i + 1) == ']' && sb.charAt(i + 2) == '>') { sb.setLength(i); return sb.toString(); } } } /** * Get the next XML outer token, trimming whitespace. There are two kinds * of tokens: the '<' character which begins a markup tag, and the content * text between markup tags. * * @return A string, or a '<' Character, or null if there is no more * source text. * @throws JSONException */ public Object nextContent() throws JSONException { char c; StringBuffer sb; do { c = next(); } while (Character.isWhitespace(c)); if (c == 0) { return null; } if (c == '<') { return XML.LT; } sb = new StringBuffer(); for (;;) { if (c == '<' || c == 0) { back(); return sb.toString().trim(); } if (c == '&') { sb.append(nextEntity(c)); } else { sb.append(c); } c = next(); } } /** * Return the next entity. These entities are translated to Characters: * <code>&amp; &apos; &gt; &lt; &quot;</code>. * @param ampersand An ampersand character. * @return A Character or an entity String if the entity is not recognized. * @throws JSONException If missing ';' in XML entity. */ public Object nextEntity(char ampersand) throws JSONException { StringBuffer sb = new StringBuffer(); for (;;) { char c = next(); if (Character.isLetterOrDigit(c) || c == '#') { sb.append(Character.toLowerCase(c)); } else if (c == ';') { break; } else { throw syntaxError("Missing ';' in XML entity: &" + sb); } } String string = sb.toString(); Object object = entity.get(string); return object != null ? object : ampersand + string + ";"; } /** * Returns the next XML meta token. This is used for skipping over <!...> * and <?...?> structures. * @return Syntax characters (<code>< > / = ! ?</code>) are returned as * Character, and strings and names are returned as Boolean. We don't care * what the values actually are. * @throws JSONException If a string is not properly closed or if the XML * is badly structured. */ public Object nextMeta() throws JSONException { char c; char q; do { c = next(); } while (Character.isWhitespace(c)); switch (c) { case 0: throw syntaxError("Misshaped meta tag"); case '<': return XML.LT; case '>': return XML.GT; case '/': return XML.SLASH; case '=': return XML.EQ; case '!': return XML.BANG; case '?': return XML.QUEST; case '"': case '\'': q = c; for (;;) { c = next(); if (c == 0) { throw syntaxError("Unterminated string"); } if (c == q) { return Boolean.TRUE; } } default: for (;;) { c = next(); if (Character.isWhitespace(c)) { return Boolean.TRUE; } switch (c) { case 0: case '<': case '>': case '/': case '=': case '!': case '?': case '"': case '\'': back(); return Boolean.TRUE; } } } } /** * Get the next XML Token. These tokens are found inside of angle * brackets. It may be one of these characters: <code>/ > = ! ?</code> or it * may be a string wrapped in single quotes or double quotes, or it may be a * name. * @return a String or a Character. * @throws JSONException If the XML is not well formed. */ public Object nextToken() throws JSONException { char c; char q; StringBuffer sb; do { c = next(); } while (Character.isWhitespace(c)); switch (c) { case 0: throw syntaxError("Misshaped element"); case '<': throw syntaxError("Misplaced '<'"); case '>': return XML.GT; case '/': return XML.SLASH; case '=': return XML.EQ; case '!': return XML.BANG; case '?': return XML.QUEST; // Quoted string case '"': case '\'': q = c; sb = new StringBuffer(); for (;;) { c = next(); if (c == 0) { throw syntaxError("Unterminated string"); } if (c == q) { return sb.toString(); } if (c == '&') { sb.append(nextEntity(c)); } else { sb.append(c); } } default: // Name sb = new StringBuffer(); for (;;) { sb.append(c); c = next(); if (Character.isWhitespace(c)) { return sb.toString(); } switch (c) { case 0: return sb.toString(); case '>': case '/': case '=': case '!': case '?': case '[': case ']': back(); return sb.toString(); case '<': case '"': case '\'': throw syntaxError("Bad character in a name"); } } } } /** * Skip characters until past the requested string. * If it is not found, we are left at the end of the source with a result of false. * @param to A string to skip past. * @throws JSONException */ public boolean skipPast(String to) throws JSONException { boolean b; char c; int i; int j; int offset = 0; int length = to.length(); char[] circle = new char[length]; /* * First fill the circle buffer with as many characters as are in the * to string. If we reach an early end, bail. */ for (i = 0; i < length; i += 1) { c = next(); if (c == 0) { return false; } circle[i] = c; } /* * We will loop, possibly for all of the remaining characters. */ for (;;) { j = offset; b = true; /* * Compare the circle buffer with the to string. */ for (i = 0; i < length; i += 1) { if (circle[j] != to.charAt(i)) { b = false; break; } j += 1; if (j >= length) { j -= length; } } /* * If we exit the loop with b intact, then victory is ours. */ if (b) { return true; } /* * Get the next character. If there isn't one, then defeat is ours. */ c = next(); if (c == 0) { return false; } /* * Shove the character in the circle buffer and advance the * circle offset. The offset is mod n. */ circle[offset] = c; offset += 1; if (offset >= length) { offset -= length; } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pirk.schema.query; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.pirk.utils.PIRException; import org.apache.pirk.utils.SystemConfiguration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; /** * Class to load any query schemas specified in the properties file, 'query.schemas' * <p> * Schemas should be specified as follows: * * <pre> * {@code * <schema> * <schemaName> name of the schema </schemaName> * <dataSchemaName> name of the data schema over which this query is run </dataSchemaName> * <selectorName> name of the element in the data schema that will be the selector </selectorName> * <elements> * <name> element name of element in the data schema to include in the query response; just * as with the data schema, the element name is case sensitive</name> * </elements> * <filter> (optional) name of the filter class to use to filter the data </filter> * <filterNames> (optional) * <name> element name of element in the data schema to apply pre-processing filters </name> * </filterNames> * <additional> (optional) additional fields for the query schema, in <key,value> pairs * <field> * <key> key corresponding the the field </key> * <value> value corresponding to the field </value> * </field> * </additional> * </schema> * } * </pre> * <p> * TODO: Allow the schema to specify how many array elements to return per element, if the element is an array type */ public class QuerySchemaLoader { private static final Logger logger = LoggerFactory.getLogger(QuerySchemaLoader.class); static { logger.info("Loading query schemas: "); try { initialize(); } catch (PIRException e) { logger.error(e.getLocalizedMessage()); } } /* Kept for compatibility */ /** * Initializes the static {@link QuerySchemaRegistry} with a list of query schema names. * * @throws PIRException * - failed to initialize */ public static void initialize() throws PIRException { initialize(false, null); } /* Kept for compatibility */ /** * Initializes the static {@link QuerySchemaRegistry} with a list of available query schema names. * * @param hdfs * If true, specifies that the query schema is an hdfs file; if false, that it is a regular file. * @param fs * Used only when {@code hdfs} is true; the {@link FileSystem} handle for the hdfs in which the query schema exists * @throws PIRException * - failed to initialize the query schemas because they could not be read or are invalid. */ public static void initialize(boolean hdfs, FileSystem fs) throws PIRException { String querySchemas = SystemConfiguration.getProperty("query.schemas", "none"); if (querySchemas.equals("none")) { logger.info("query.schemas = none"); return; } String[] querySchemaFiles = querySchemas.split(","); try { for (String schemaFile : querySchemaFiles) { QuerySchema querySchema = readSchemaFile(schemaFile, fs, hdfs); QuerySchemaRegistry.put(querySchema); } } catch (IOException e) { throw new PIRException("Error reading query schema", e); } } private static QuerySchema readSchemaFile(String schemaFile, FileSystem fs, boolean hdfs) throws IOException, PIRException { logger.info("Loading query schemaFile = " + schemaFile); // Parse and load the schema file into a QuerySchema object. QuerySchemaLoader loader = new QuerySchemaLoader(); InputStream is; if (hdfs) { logger.info("hdfs: filePath = " + schemaFile); is = fs.open(new Path(schemaFile)); } else { logger.info("localFS: inputFile = " + schemaFile); is = new FileInputStream(schemaFile); } try { return loader.loadSchema(is); } finally { is.close(); } } /** * Default constructor. */ public QuerySchemaLoader() { } /** * Returns the query schema as defined in XML format on the given stream. * * @param stream * The source of the XML query schema description. * @return The query schema. * @throws IOException * A problem occurred reading from the given stream. * @throws PIRException * The schema description is invalid. */ public QuerySchema loadSchema(InputStream stream) throws IOException, PIRException { // Read in and parse the XML file. Document doc = parseXMLDocument(stream); // Used to build the final schema. QuerySchemaBuilder schemaBuilder = new QuerySchemaBuilder(); // Extract the schemaName. String schemaName = extractValue(doc, "schemaName"); schemaBuilder.setName(schemaName); logger.info("schemaName = " + schemaName); // Extract the dataSchemaName. String dataSchemaName = extractValue(doc, "dataSchemaName"); schemaBuilder.setDataSchemaName(dataSchemaName); logger.info("dataSchemaName = " + dataSchemaName); // Extract the selectorName. String selectorName = extractValue(doc, "selectorName"); schemaBuilder.setSelectorName(selectorName); logger.info("selectorName = " + selectorName); // Extract the query elements. NodeList elementsList = doc.getElementsByTagName("elements"); if (elementsList.getLength() != 1) { throw new PIRException("elementsList.getLength() = " + elementsList.getLength() + " -- should be 1"); } Element elements = (Element) elementsList.item(0); LinkedHashSet<String> elementNames = new LinkedHashSet<>(); NodeList nList = elements.getElementsByTagName("name"); for (int i = 0; i < nList.getLength(); i++) { Node nNode = nList.item(i); if (nNode.getNodeType() == Node.ELEMENT_NODE) { elementNames.add(nNode.getFirstChild().getNodeValue().trim()); } } schemaBuilder.setQueryElementNames(elementNames); // Extract the filter, if it exists if (doc.getElementsByTagName("filter").item(0) != null) { schemaBuilder.setFilterTypeName(doc.getElementsByTagName("filter").item(0).getTextContent().trim()); } // Create a filter over the query elements. schemaBuilder.setFilteredElementNames(extractFilteredElementNames(doc)); // Extract the additional fields, if they exists Map<String,String> additionalFields = new HashMap<>(); if (doc.getElementsByTagName("additional").item(0) != null) { NodeList fieldList = doc.getElementsByTagName("field"); int numFields = fieldList.getLength(); if (numFields == 0) { throw new PIRException("numFields = " + numFields + " -- should be at least one"); } for (int i = 0; i < numFields; ++i) { Element fields = (Element) fieldList.item(i); NodeList kv = fields.getChildNodes(); additionalFields.put(getNodeValue("key", kv), getNodeValue("value", kv)); } } schemaBuilder.setAdditionalFields(additionalFields); // Create and return the query schema object. return schemaBuilder.build(); } /** * Parses and normalizes the XML document available on the given stream. * * @param stream * The input stream. * @return A Document representing the XML document. * @throws IOException * - failed to read input * @throws PIRException * - file could not be parsed */ private Document parseXMLDocument(InputStream stream) throws IOException, PIRException { Document doc; try { DocumentBuilder dBuilder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); doc = dBuilder.parse(stream); } catch (ParserConfigurationException | SAXException e) { throw new PIRException("Schema parsing error", e); } doc.getDocumentElement().normalize(); logger.info("Root element: " + doc.getDocumentElement().getNodeName()); return doc; } /** * Returns the possibly empty set of element names over which the filter is applied, maintaining document order. * * @param doc * An XML document specifying names upon which we will filter the query. * @return The set of names upon which we will filter the query. * @throws PIRException * - Filter lists not found */ private Set<String> extractFilteredElementNames(Document doc) throws PIRException { Set<String> filteredNamesSet = new HashSet<>(); NodeList filterNamesList = doc.getElementsByTagName("filterNames"); if (filterNamesList.getLength() != 0) { if (filterNamesList.getLength() > 1) { throw new PIRException("filterNamesList.getLength() = " + filterNamesList.getLength() + " -- should be 0 or 1"); } // Extract element names from the list. NodeList filterNList = ((Element) filterNamesList.item(0)).getElementsByTagName("name"); for (int i = 0; i < filterNList.getLength(); i++) { Node nNode = filterNList.item(i); if (nNode.getNodeType() == Node.ELEMENT_NODE) { // Pull the name and add to the set. String name = nNode.getFirstChild().getNodeValue().trim(); filteredNamesSet.add(name); logger.info("filterName = " + name); } } } return filteredNamesSet; } /** * Extracts a top level, single value from the XML structure. * * Throws an exception if there is not exactly one tag with the given name. * * @param doc * The XML document from which we extract data * @param tagName * The name of the tag we wish to extract from the {@code doc} * @return The text content of the tag. * @throws PIRException * - XML Document is Empty */ private String extractValue(Document doc, String tagName) throws PIRException { NodeList itemList = doc.getElementsByTagName(tagName); if (itemList.getLength() != 1) { throw new PIRException("itemList.getLength() = " + itemList.getLength() + " -- should be 1"); } return itemList.item(0).getTextContent().trim(); } /** * Extracts the value corresponding to a given tag from the XML nodeList * * @param tagName * The name of the tag for which to extract the value * @param nodes * The NodeList * @return The given value */ private String getNodeValue(String tagName, NodeList nodes) { String value = ""; for (int x = 0; x < nodes.getLength(); x++) { Node node = nodes.item(x); if (node.getNodeName().equals(tagName)) { value = node.getChildNodes().item(0).getNodeValue().trim(); } } return value; } }
package mod.upcraftlp.spookycraft.client.models.skeletals; import mod.upcraftlp.spookycraft.entity.monster.EntitySkeletalRabbit; import net.minecraft.client.model.ModelBase; import net.minecraft.client.model.ModelRenderer; import net.minecraft.client.renderer.GlStateManager; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityLivingBase; import net.minecraft.util.math.MathHelper; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; @SideOnly(Side.CLIENT) public class ModelSkeletalRabbit extends ModelBase { /** The Rabbit's Left Foot */ private final ModelRenderer rabbitLeftFoot; /** The Rabbit's Right Foot */ private final ModelRenderer rabbitRightFoot; /** The Rabbit's Left Thigh */ private final ModelRenderer rabbitLeftThigh; /** The Rabbit's Right Thigh */ private final ModelRenderer rabbitRightThigh; /** The Rabbit's Body */ private final ModelRenderer rabbitBody; /** The Rabbit's Left Arm */ private final ModelRenderer rabbitLeftArm; /** The Rabbit's Right Arm */ private final ModelRenderer rabbitRightArm; /** The Rabbit's Head */ private final ModelRenderer rabbitHead; /** The Rabbit's Right Ear */ private final ModelRenderer rabbitRightEar; /** The Rabbit's Left Ear */ private final ModelRenderer rabbitLeftEar; /** The Rabbit's Tail */ private final ModelRenderer rabbitTail; /** The Rabbit's Nose */ private final ModelRenderer rabbitNose; private float jumpRotation; public ModelSkeletalRabbit() { this.setTextureOffset("head.main", 0, 0); this.setTextureOffset("head.nose", 0, 24); this.setTextureOffset("head.ear1", 0, 10); this.setTextureOffset("head.ear2", 6, 10); this.rabbitLeftFoot = new ModelRenderer(this, 26, 24); this.rabbitLeftFoot.addBox(-1.0F, 5.5F, -3.7F, 2, 1, 7); this.rabbitLeftFoot.setRotationPoint(3.0F, 17.5F, 3.7F); this.rabbitLeftFoot.mirror = true; this.setRotationOffset(this.rabbitLeftFoot, 0.0F, 0.0F, 0.0F); this.rabbitRightFoot = new ModelRenderer(this, 8, 24); this.rabbitRightFoot.addBox(-1.0F, 5.5F, -3.7F, 2, 1, 7); this.rabbitRightFoot.setRotationPoint(-3.0F, 17.5F, 3.7F); this.rabbitRightFoot.mirror = true; this.setRotationOffset(this.rabbitRightFoot, 0.0F, 0.0F, 0.0F); this.rabbitLeftThigh = new ModelRenderer(this, 30, 15); this.rabbitLeftThigh.addBox(-1.0F, 0.0F, 0.0F, 2, 4, 5); this.rabbitLeftThigh.setRotationPoint(3.0F, 17.5F, 3.7F); this.rabbitLeftThigh.mirror = true; this.setRotationOffset(this.rabbitLeftThigh, -0.34906584F, 0.0F, 0.0F); this.rabbitRightThigh = new ModelRenderer(this, 16, 15); this.rabbitRightThigh.addBox(-1.0F, 0.0F, 0.0F, 2, 4, 5); this.rabbitRightThigh.setRotationPoint(-3.0F, 17.5F, 3.7F); this.rabbitRightThigh.mirror = true; this.setRotationOffset(this.rabbitRightThigh, -0.34906584F, 0.0F, 0.0F); this.rabbitBody = new ModelRenderer(this, 0, 0); this.rabbitBody.addBox(-3.0F, -2.0F, -10.0F, 6, 5, 10); this.rabbitBody.setRotationPoint(0.0F, 19.0F, 8.0F); this.rabbitBody.mirror = true; this.setRotationOffset(this.rabbitBody, -0.34906584F, 0.0F, 0.0F); this.rabbitLeftArm = new ModelRenderer(this, 8, 15); this.rabbitLeftArm.addBox(-1.0F, 0.0F, -1.0F, 2, 7, 2); this.rabbitLeftArm.setRotationPoint(3.0F, 17.0F, -1.0F); this.rabbitLeftArm.mirror = true; this.setRotationOffset(this.rabbitLeftArm, -0.17453292F, 0.0F, 0.0F); this.rabbitRightArm = new ModelRenderer(this, 0, 15); this.rabbitRightArm.addBox(-1.0F, 0.0F, -1.0F, 2, 7, 2); this.rabbitRightArm.setRotationPoint(-3.0F, 17.0F, -1.0F); this.rabbitRightArm.mirror = true; this.setRotationOffset(this.rabbitRightArm, -0.17453292F, 0.0F, 0.0F); this.rabbitHead = new ModelRenderer(this, 32, 0); this.rabbitHead.addBox(-2.5F, -4.0F, -5.0F, 5, 4, 5); this.rabbitHead.setRotationPoint(0.0F, 16.0F, -1.0F); this.rabbitHead.mirror = true; this.setRotationOffset(this.rabbitHead, 0.0F, 0.0F, 0.0F); this.rabbitRightEar = new ModelRenderer(this, 52, 0); this.rabbitRightEar.addBox(-2.5F, -9.0F, -1.0F, 2, 5, 1); this.rabbitRightEar.setRotationPoint(0.0F, 16.0F, -1.0F); this.rabbitRightEar.mirror = true; this.setRotationOffset(this.rabbitRightEar, 0.0F, -0.2617994F, 0.0F); this.rabbitLeftEar = new ModelRenderer(this, 58, 0); this.rabbitLeftEar.addBox(0.5F, -9.0F, -1.0F, 2, 5, 1); this.rabbitLeftEar.setRotationPoint(0.0F, 16.0F, -1.0F); this.rabbitLeftEar.mirror = true; this.setRotationOffset(this.rabbitLeftEar, 0.0F, 0.2617994F, 0.0F); this.rabbitTail = new ModelRenderer(this, 52, 6); this.rabbitTail.addBox(-1.5F, -1.5F, 0.0F, 3, 3, 2); this.rabbitTail.setRotationPoint(0.0F, 20.0F, 7.0F); this.rabbitTail.mirror = true; this.setRotationOffset(this.rabbitTail, -0.3490659F, 0.0F, 0.0F); this.rabbitNose = new ModelRenderer(this, 32, 9); this.rabbitNose.addBox(-0.5F, -2.5F, -5.5F, 1, 1, 1); this.rabbitNose.setRotationPoint(0.0F, 16.0F, -1.0F); this.rabbitNose.mirror = true; this.setRotationOffset(this.rabbitNose, 0.0F, 0.0F, 0.0F); } private void setRotationOffset(ModelRenderer renderer, float x, float y, float z) { renderer.rotateAngleX = x; renderer.rotateAngleY = y; renderer.rotateAngleZ = z; } /** * Sets the models various rotation angles then renders the model. */ public void render(Entity entityIn, float limbSwing, float limbSwingAmount, float ageInTicks, float netHeadYaw, float headPitch, float scale) { this.setRotationAngles(limbSwing, limbSwingAmount, ageInTicks, netHeadYaw, headPitch, scale, entityIn); if (this.isChild) { float f = 1.5F; GlStateManager.pushMatrix(); GlStateManager.scale(0.56666666F, 0.56666666F, 0.56666666F); GlStateManager.translate(0.0F, 22.0F * scale, 2.0F * scale); this.rabbitHead.render(scale); this.rabbitLeftEar.render(scale); this.rabbitRightEar.render(scale); this.rabbitNose.render(scale); GlStateManager.popMatrix(); GlStateManager.pushMatrix(); GlStateManager.scale(0.4F, 0.4F, 0.4F); GlStateManager.translate(0.0F, 36.0F * scale, 0.0F); this.rabbitLeftFoot.render(scale); this.rabbitRightFoot.render(scale); this.rabbitLeftThigh.render(scale); this.rabbitRightThigh.render(scale); this.rabbitBody.render(scale); this.rabbitLeftArm.render(scale); this.rabbitRightArm.render(scale); this.rabbitTail.render(scale); GlStateManager.popMatrix(); } else { GlStateManager.pushMatrix(); GlStateManager.scale(0.6F, 0.6F, 0.6F); GlStateManager.translate(0.0F, 16.0F * scale, 0.0F); this.rabbitLeftFoot.render(scale); this.rabbitRightFoot.render(scale); this.rabbitLeftThigh.render(scale); this.rabbitRightThigh.render(scale); this.rabbitBody.render(scale); this.rabbitLeftArm.render(scale); this.rabbitRightArm.render(scale); this.rabbitHead.render(scale); this.rabbitRightEar.render(scale); this.rabbitLeftEar.render(scale); this.rabbitTail.render(scale); this.rabbitNose.render(scale); GlStateManager.popMatrix(); } } /** * Sets the model's various rotation angles. For bipeds, par1 and par2 are * used for animating the movement of arms and legs, where par1 represents * the time(so that arms and legs swing back and forth) and par2 represents * how "far" arms and legs can swing at most. */ public void setRotationAngles(float limbSwing, float limbSwingAmount, float ageInTicks, float netHeadYaw, float headPitch, float scaleFactor, Entity entityIn) { float f = ageInTicks - (float) entityIn.ticksExisted; EntitySkeletalRabbit entityrabbit = (EntitySkeletalRabbit) entityIn; this.rabbitNose.rotateAngleX = headPitch * 0.017453292F; this.rabbitHead.rotateAngleX = headPitch * 0.017453292F; this.rabbitRightEar.rotateAngleX = headPitch * 0.017453292F; this.rabbitLeftEar.rotateAngleX = headPitch * 0.017453292F; this.rabbitNose.rotateAngleY = netHeadYaw * 0.017453292F; this.rabbitHead.rotateAngleY = netHeadYaw * 0.017453292F; this.rabbitRightEar.rotateAngleY = this.rabbitNose.rotateAngleY - 0.2617994F; this.rabbitLeftEar.rotateAngleY = this.rabbitNose.rotateAngleY + 0.2617994F; this.jumpRotation = MathHelper.sin(entityrabbit.setJumpCompletion(f) * (float) Math.PI); this.rabbitLeftThigh.rotateAngleX = (this.jumpRotation * 50.0F - 21.0F) * 0.017453292F; this.rabbitRightThigh.rotateAngleX = (this.jumpRotation * 50.0F - 21.0F) * 0.017453292F; this.rabbitLeftFoot.rotateAngleX = this.jumpRotation * 50.0F * 0.017453292F; this.rabbitRightFoot.rotateAngleX = this.jumpRotation * 50.0F * 0.017453292F; this.rabbitLeftArm.rotateAngleX = (this.jumpRotation * -40.0F - 11.0F) * 0.017453292F; this.rabbitRightArm.rotateAngleX = (this.jumpRotation * -40.0F - 11.0F) * 0.017453292F; } /** * Used for easily adding entity-dependent animations. The second and third * float params here are the same second and third as in the * setRotationAngles method. */ public void setLivingAnimations(EntityLivingBase entitylivingbaseIn, float limbSwing, float limbSwingAmount, float partialTickTime) { super.setLivingAnimations(entitylivingbaseIn, limbSwing, limbSwingAmount, partialTickTime); this.jumpRotation = MathHelper .sin(((EntitySkeletalRabbit) entitylivingbaseIn).setJumpCompletion(partialTickTime) * (float) Math.PI); } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.idea.maven.project; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.containers.ArrayListSet; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.Stack; import consulo.util.collection.HashingStrategy; import consulo.util.collection.Sets; import consulo.util.dataholder.Key; import org.jdom.Element; import org.jdom.output.Format; import org.jdom.output.XMLOutputter; import org.jetbrains.annotations.TestOnly; import org.jetbrains.idea.maven.dom.references.MavenFilteredPropertyPsiReferenceProvider; import org.jetbrains.idea.maven.model.*; import org.jetbrains.idea.maven.server.MavenEmbedderWrapper; import org.jetbrains.idea.maven.server.NativeMavenProjectHolder; import org.jetbrains.idea.maven.utils.*; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.io.*; import java.util.*; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.regex.Pattern; import java.util.zip.CRC32; public class MavenProjectsTree { private static final Logger LOG = Logger.getInstance(MavenProjectsTree.class); private static final String STORAGE_VERSION = MavenProjectsTree.class.getSimpleName() + ".6"; private final Object myStateLock = new Object(); private final ReentrantReadWriteLock myStructureLock = new ReentrantReadWriteLock(); private final Lock myStructureReadLock = myStructureLock.readLock(); private final Lock myStructureWriteLock = myStructureLock.writeLock(); // TODO replace with sets private volatile Set<String> myManagedFilesPaths = new LinkedHashSet<String>(); private volatile List<String> myIgnoredFilesPaths = new ArrayList<String>(); private volatile List<String> myIgnoredFilesPatterns = new ArrayList<String>(); private volatile Pattern myIgnoredFilesPatternsCache; private MavenExplicitProfiles myExplicitProfiles = MavenExplicitProfiles.NONE; private final MavenExplicitProfiles myTemporarilyRemovedExplicitProfiles = new MavenExplicitProfiles(new HashSet<String>(), new HashSet<String>()); private final List<MavenProject> myRootProjects = new ArrayList<MavenProject>(); private final Map<MavenProject, MavenProjectTimestamp> myTimestamps = new HashMap<MavenProject, MavenProjectTimestamp>(); private final MavenWorkspaceMap myWorkspaceMap = new MavenWorkspaceMap(); private final Map<MavenId, MavenProject> myMavenIdToProjectMapping = new HashMap<MavenId, MavenProject>(); private final Map<VirtualFile, MavenProject> myVirtualFileToProjectMapping = new HashMap<VirtualFile, MavenProject>(); private final Map<MavenProject, List<MavenProject>> myAggregatorToModuleMapping = new HashMap<MavenProject, List<MavenProject>>(); private final Map<MavenProject, MavenProject> myModuleToAggregatorMapping = new HashMap<MavenProject, MavenProject>(); private final List<Listener> myListeners = ContainerUtil.createLockFreeCopyOnWriteList(); private final MavenProjectReaderProjectLocator myProjectLocator = new MavenProjectReaderProjectLocator() { public VirtualFile findProjectFile(MavenId coordinates) { MavenProject project = findProject(coordinates); return project == null ? null : project.getFile(); } }; @Nullable public static MavenProjectsTree read(File file) throws IOException { MavenProjectsTree result = new MavenProjectsTree(); DataInputStream in = new DataInputStream(new BufferedInputStream(new FileInputStream(file))); try { try { if(!STORAGE_VERSION.equals(in.readUTF())) { return null; } result.myManagedFilesPaths = readCollection(in, new LinkedHashSet<String>()); result.myIgnoredFilesPaths = readCollection(in, new ArrayList<String>()); result.myIgnoredFilesPatterns = readCollection(in, new ArrayList<String>()); result.myExplicitProfiles = new MavenExplicitProfiles(readCollection(in, new HashSet<String>()), readCollection(in, new HashSet<String>())); result.myRootProjects.addAll(readProjectsRecursively(in, result)); } catch(IOException e) { in.close(); file.delete(); throw e; } catch(Throwable e) { throw new IOException(e); } } finally { in.close(); } return result; } private static <T extends Collection<String>> T readCollection(DataInputStream in, T result) throws IOException { int count = in.readInt(); while(count-- > 0) { result.add(in.readUTF()); } return result; } private static void writeCollection(DataOutputStream out, Collection<String> list) throws IOException { out.writeInt(list.size()); for(String each : list) { out.writeUTF(each); } } private static List<MavenProject> readProjectsRecursively(DataInputStream in, MavenProjectsTree tree) throws IOException { int count = in.readInt(); List<MavenProject> result = new ArrayList<MavenProject>(count); while(count-- > 0) { MavenProject project = MavenProject.read(in); MavenProjectTimestamp timestamp = MavenProjectTimestamp.read(in); List<MavenProject> modules = readProjectsRecursively(in, tree); if(project != null) { result.add(project); tree.myTimestamps.put(project, timestamp); tree.myVirtualFileToProjectMapping.put(project.getFile(), project); tree.fillIDMaps(project); tree.myAggregatorToModuleMapping.put(project, modules); for(MavenProject eachModule : modules) { tree.myModuleToAggregatorMapping.put(eachModule, project); } } } return result; } public void save(File file) throws IOException { synchronized(myStateLock) { readLock(); try { file.getParentFile().mkdirs(); DataOutputStream out = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(file))); try { out.writeUTF(STORAGE_VERSION); writeCollection(out, myManagedFilesPaths); writeCollection(out, myIgnoredFilesPaths); writeCollection(out, myIgnoredFilesPatterns); writeCollection(out, myExplicitProfiles.getEnabledProfiles()); writeCollection(out, myExplicitProfiles.getDisabledProfiles()); writeProjectsRecursively(out, myRootProjects); } finally { out.close(); } } finally { readUnlock(); } } } private void writeProjectsRecursively(DataOutputStream out, List<MavenProject> list) throws IOException { out.writeInt(list.size()); for(MavenProject each : list) { each.write(out); myTimestamps.get(each).write(out); writeProjectsRecursively(out, getModules(each)); } } public List<String> getManagedFilesPaths() { synchronized(myStateLock) { return new ArrayList<String>(myManagedFilesPaths); } } public void resetManagedFilesPathsAndProfiles(List<String> paths, MavenExplicitProfiles profiles) { synchronized(myStateLock) { myManagedFilesPaths = new LinkedHashSet<String>(paths); } setExplicitProfiles(profiles); } @TestOnly public void resetManagedFilesAndProfiles(List<VirtualFile> files, MavenExplicitProfiles profiles) { resetManagedFilesPathsAndProfiles(MavenUtil.collectPaths(files), profiles); } public void addManagedFilesWithProfiles(List<VirtualFile> files, MavenExplicitProfiles profiles) { List<String> newFiles; MavenExplicitProfiles newProfiles; synchronized(myStateLock) { newFiles = new ArrayList<String>(myManagedFilesPaths); newFiles.addAll(MavenUtil.collectPaths(files)); newProfiles = myExplicitProfiles.clone(); newProfiles.getEnabledProfiles().addAll(profiles.getEnabledProfiles()); newProfiles.getDisabledProfiles().addAll(profiles.getDisabledProfiles()); } resetManagedFilesPathsAndProfiles(newFiles, newProfiles); } public void removeManagedFiles(List<VirtualFile> files) { synchronized(myStateLock) { myManagedFilesPaths.removeAll(MavenUtil.collectPaths(files)); } } public List<VirtualFile> getExistingManagedFiles() { List<VirtualFile> result = new ArrayList<VirtualFile>(); for(String path : getManagedFilesPaths()) { VirtualFile f = LocalFileSystem.getInstance().refreshAndFindFileByPath(path); if(f != null) { result.add(f); } } return result; } public List<String> getIgnoredFilesPaths() { synchronized(myStateLock) { return new ArrayList<String>(myIgnoredFilesPaths); } } public void setIgnoredFilesPaths(final List<String> paths) { doChangeIgnoreStatus(new Runnable() { public void run() { myIgnoredFilesPaths = new ArrayList<String>(paths); } }); } public void removeIgnoredFilesPaths(final Collection<String> paths) { doChangeIgnoreStatus(new Runnable() { public void run() { myIgnoredFilesPaths.removeAll(paths); } }); } public boolean getIgnoredState(MavenProject project) { synchronized(myStateLock) { return myIgnoredFilesPaths.contains(project.getPath()); } } public void setIgnoredState(List<MavenProject> projects, boolean ignored) { setIgnoredState(projects, ignored, false); } public void setIgnoredState(List<MavenProject> projects, boolean ignored, boolean fromImport) { doSetIgnoredState(projects, ignored, fromImport); } private void doSetIgnoredState(List<MavenProject> projects, final boolean ignored, boolean fromImport) { final List<String> paths = MavenUtil.collectPaths(MavenUtil.collectFiles(projects)); doChangeIgnoreStatus(new Runnable() { public void run() { if(ignored) { myIgnoredFilesPaths.addAll(paths); } else { myIgnoredFilesPaths.removeAll(paths); } } }, fromImport); } public List<String> getIgnoredFilesPatterns() { synchronized(myStateLock) { return new ArrayList<String>(myIgnoredFilesPatterns); } } public void setIgnoredFilesPatterns(final List<String> patterns) { doChangeIgnoreStatus(new Runnable() { public void run() { myIgnoredFilesPatternsCache = null; myIgnoredFilesPatterns = new ArrayList<String>(patterns); } }); } private void doChangeIgnoreStatus(Runnable runnable) { doChangeIgnoreStatus(runnable, false); } private void doChangeIgnoreStatus(Runnable runnable, boolean fromImport) { List<MavenProject> ignoredBefore; List<MavenProject> ignoredAfter; synchronized(myStateLock) { ignoredBefore = getIgnoredProjects(); runnable.run(); ignoredAfter = getIgnoredProjects(); } List<MavenProject> ignored = new ArrayList<MavenProject>(ignoredAfter); ignored.removeAll(ignoredBefore); List<MavenProject> unignored = new ArrayList<MavenProject>(ignoredBefore); unignored.removeAll(ignoredAfter); if(ignored.isEmpty() && unignored.isEmpty()) { return; } fireProjectsIgnoredStateChanged(ignored, unignored, fromImport); } private List<MavenProject> getIgnoredProjects() { List<MavenProject> result = new ArrayList<MavenProject>(); for(MavenProject each : getProjects()) { if(isIgnored(each)) { result.add(each); } } return result; } public boolean isIgnored(MavenProject project) { String path = project.getPath(); synchronized(myStateLock) { return myIgnoredFilesPaths.contains(path) || matchesIgnoredFilesPatterns(path); } } private boolean matchesIgnoredFilesPatterns(String path) { synchronized(myStateLock) { if(myIgnoredFilesPatternsCache == null) { myIgnoredFilesPatternsCache = Pattern.compile(Strings.translateMasks(myIgnoredFilesPatterns)); } return myIgnoredFilesPatternsCache.matcher(path).matches(); } } public MavenExplicitProfiles getExplicitProfiles() { synchronized(myStateLock) { return myExplicitProfiles.clone(); } } public void setExplicitProfiles(MavenExplicitProfiles explicitProfiles) { synchronized(myStateLock) { myExplicitProfiles = explicitProfiles.clone(); } fireProfilesChanged(); } private void updateExplicitProfiles() { Collection<String> available = getAvailableProfiles(); synchronized(myStateLock) { updateExplicitProfiles(myExplicitProfiles.getEnabledProfiles(), myTemporarilyRemovedExplicitProfiles.getEnabledProfiles(), available); updateExplicitProfiles(myExplicitProfiles.getDisabledProfiles(), myTemporarilyRemovedExplicitProfiles.getDisabledProfiles(), available); } } private void updateExplicitProfiles(Collection<String> explicitProfiles, Collection<String> temporarilyRemovedExplicitProfiles, Collection<String> available) { Collection<String> removedProfiles = new HashSet<String>(explicitProfiles); removedProfiles.removeAll(available); temporarilyRemovedExplicitProfiles.addAll(removedProfiles); Collection<String> restoredProfiles = new HashSet<String>(temporarilyRemovedExplicitProfiles); restoredProfiles.retainAll(available); temporarilyRemovedExplicitProfiles.removeAll(restoredProfiles); explicitProfiles.removeAll(removedProfiles); explicitProfiles.addAll(restoredProfiles); } public Collection<String> getAvailableProfiles() { Collection<String> res = new HashSet<String>(); for(MavenProject each : getProjects()) { res.addAll(each.getProfilesIds()); } return res; } public Collection<Pair<String, MavenProfileKind>> getProfilesWithStates() { Collection<Pair<String, MavenProfileKind>> result = new ArrayListSet<Pair<String, MavenProfileKind>>(); Collection<String> available = new HashSet<String>(); Collection<String> active = new HashSet<String>(); for(MavenProject each : getProjects()) { available.addAll(each.getProfilesIds()); active.addAll(each.getActivatedProfilesIds().getEnabledProfiles()); } Collection<String> enabledProfiles = getExplicitProfiles().getEnabledProfiles(); Collection<String> disabledProfiles = getExplicitProfiles().getDisabledProfiles(); for(String each : available) { MavenProfileKind state; if(disabledProfiles.contains(each)) { state = MavenProfileKind.NONE; } else if(enabledProfiles.contains(each)) { state = MavenProfileKind.EXPLICIT; } else if(active.contains(each)) { state = MavenProfileKind.IMPLICIT; } else { state = MavenProfileKind.NONE; } result.add(Pair.create(each, state)); } return result; } public void updateAll(boolean force, MavenGeneralSettings generalSettings, MavenProgressIndicator process) { List<VirtualFile> managedFiles = getExistingManagedFiles(); MavenExplicitProfiles explicitProfiles = getExplicitProfiles(); MavenProjectReader projectReader = new MavenProjectReader(); update(managedFiles, true, force, explicitProfiles, projectReader, generalSettings, process); List<VirtualFile> obsoleteFiles = getRootProjectsFiles(); obsoleteFiles.removeAll(managedFiles); delete(projectReader, obsoleteFiles, explicitProfiles, generalSettings, process); } public void update(Collection<VirtualFile> files, boolean force, MavenGeneralSettings generalSettings, MavenProgressIndicator process) { update(files, false, force, getExplicitProfiles(), new MavenProjectReader(), generalSettings, process); } private void update(Collection<VirtualFile> files, boolean recursive, boolean force, MavenExplicitProfiles explicitProfiles, MavenProjectReader projectReader, MavenGeneralSettings generalSettings, MavenProgressIndicator process) { if(files.isEmpty()) { return; } UpdateContext updateContext = new UpdateContext(); Stack<MavenProject> updateStack = new Stack<MavenProject>(); for(VirtualFile each : files) { MavenProject mavenProject = findProject(each); if(mavenProject == null) { doAdd(each, recursive, explicitProfiles, updateContext, updateStack, projectReader, generalSettings, process); } else { doUpdate(mavenProject, findAggregator(mavenProject), false, recursive, force, explicitProfiles, updateContext, updateStack, projectReader, generalSettings, process); } } updateExplicitProfiles(); updateContext.fireUpdatedIfNecessary(); } private void doAdd(final VirtualFile f, boolean recursuve, MavenExplicitProfiles explicitProfiles, UpdateContext updateContext, Stack<MavenProject> updateStack, MavenProjectReader reader, MavenGeneralSettings generalSettings, MavenProgressIndicator process) { MavenProject newMavenProject = new MavenProject(f); MavenProject intendedAggregator = null; for(MavenProject each : getProjects()) { if(each.getExistingModuleFiles().contains(f)) { intendedAggregator = each; break; } } doUpdate(newMavenProject, intendedAggregator, true, recursuve, false, explicitProfiles, updateContext, updateStack, reader, generalSettings, process); } private void doUpdate(MavenProject mavenProject, MavenProject aggregator, boolean isNew, boolean recursive, boolean force, MavenExplicitProfiles explicitProfiles, UpdateContext updateContext, Stack<MavenProject> updateStack, MavenProjectReader reader, MavenGeneralSettings generalSettings, MavenProgressIndicator process) { if(updateStack.contains(mavenProject)) { MavenLog.LOG.info("Recursion detected in " + mavenProject.getFile()); return; } updateStack.push(mavenProject); process.setText(ProjectBundle.message("maven.reading.pom", mavenProject.getPath())); process.setText2(""); List<MavenProject> prevModules = getModules(mavenProject); Set<MavenProject> prevInheritors = new HashSet<MavenProject>(); if(!isNew) { prevInheritors.addAll(findInheritors(mavenProject)); } MavenProjectTimestamp timestamp = calculateTimestamp(mavenProject, explicitProfiles, generalSettings); boolean isChanged = force || !timestamp.equals(myTimestamps.get(mavenProject)); MavenProjectChanges changes = force ? MavenProjectChanges.ALL : MavenProjectChanges.NONE; if(isChanged) { writeLock(); try { if(!isNew) { clearIDMaps(mavenProject); } } finally { writeUnlock(); } MavenId oldParentId = mavenProject.getParentId(); changes = changes.mergedWith(mavenProject.read(generalSettings, explicitProfiles, reader, myProjectLocator)); writeLock(); try { myVirtualFileToProjectMapping.put(mavenProject.getFile(), mavenProject); fillIDMaps(mavenProject); } finally { writeUnlock(); } if(!Comparing.equal(oldParentId, mavenProject.getParentId())) { // ensure timestamp reflects actual parent's timestamp timestamp = calculateTimestamp(mavenProject, explicitProfiles, generalSettings); } myTimestamps.put(mavenProject, timestamp); } boolean reconnected = isNew; if(isNew) { connect(aggregator, mavenProject); } else { reconnected = reconnect(aggregator, mavenProject); } if(isChanged || reconnected) { updateContext.update(mavenProject, changes); } List<VirtualFile> existingModuleFiles = mavenProject.getExistingModuleFiles(); List<MavenProject> modulesToRemove = new ArrayList<MavenProject>(); List<MavenProject> modulesToBecomeRoots = new ArrayList<MavenProject>(); for(MavenProject each : prevModules) { VirtualFile moduleFile = each.getFile(); if(!existingModuleFiles.contains(moduleFile)) { if(isManagedFile(moduleFile)) { modulesToBecomeRoots.add(each); } else { modulesToRemove.add(each); } } } for(MavenProject each : modulesToRemove) { removeModule(mavenProject, each); doDelete(mavenProject, each, updateContext); prevInheritors.removeAll(updateContext.deletedProjects); } for(MavenProject each : modulesToBecomeRoots) { if(reconnect(null, each)) { updateContext.update(each, MavenProjectChanges.NONE); } } for(VirtualFile each : existingModuleFiles) { MavenProject module = findProject(each); boolean isNewModule = module == null; if(isNewModule) { module = new MavenProject(each); } else { MavenProject currentAggregator = findAggregator(module); if(currentAggregator != null && currentAggregator != mavenProject) { MavenLog.LOG.info("Module " + each + " is already included into " + mavenProject.getFile()); continue; } } if(isChanged || isNewModule || recursive) { doUpdate(module, mavenProject, isNewModule, recursive, recursive ? force : false, // do not force update modules if only this project was requested to be updated explicitProfiles, updateContext, updateStack, reader, generalSettings, process); } else { if(reconnect(mavenProject, module)) { updateContext.update(module, MavenProjectChanges.NONE); } } } prevInheritors.addAll(findInheritors(mavenProject)); for(MavenProject each : prevInheritors) { doUpdate(each, findAggregator(each), false, false, // no need to go recursively in case of inheritance, only when updating modules false, explicitProfiles, updateContext, updateStack, reader, generalSettings, process); } updateStack.pop(); } private MavenProjectTimestamp calculateTimestamp(final MavenProject mavenProject, final MavenExplicitProfiles explicitProfiles, final MavenGeneralSettings generalSettings) { return ReadAction.compute(() -> { long pomTimestamp = getFileTimestamp(mavenProject.getFile()); MavenProject parent = findParent(mavenProject); long parentLastReadStamp = parent == null ? -1 : parent.getLastReadStamp(); VirtualFile profilesXmlFile = mavenProject.getProfilesXmlFile(); long profilesTimestamp = getFileTimestamp(profilesXmlFile); long userSettingsTimestamp = getFileTimestamp(generalSettings.getEffectiveUserSettingsFile()); long globalSettingsTimestamp = getFileTimestamp(generalSettings.getEffectiveGlobalSettingsFile()); int profilesHashCode = explicitProfiles.hashCode(); return new MavenProjectTimestamp(pomTimestamp, parentLastReadStamp, profilesTimestamp, userSettingsTimestamp, globalSettingsTimestamp, profilesHashCode); }); } private static long getFileTimestamp(VirtualFile file) { if(file == null || !file.isValid()) { return -1; } return file.getTimeStamp(); } public boolean isManagedFile(VirtualFile moduleFile) { return isManagedFile(moduleFile.getPath()); } public boolean isManagedFile(String path) { synchronized(myStateLock) { for(String each : myManagedFilesPaths) { if(FileUtil.pathsEqual(each, path)) { return true; } } return false; } } public boolean isPotentialProject(String path) { if(isManagedFile(path)) { return true; } for(MavenProject each : getProjects()) { if(FileUtil.pathsEqual(path, each.getPath())) { return true; } if(each.getModulePaths().contains(path)) { return true; } } return false; } public void delete(List<VirtualFile> files, MavenGeneralSettings generalSettings, MavenProgressIndicator process) { delete(new MavenProjectReader(), files, getExplicitProfiles(), generalSettings, process); } private void delete(MavenProjectReader projectReader, List<VirtualFile> files, MavenExplicitProfiles explicitProfiles, MavenGeneralSettings generalSettings, MavenProgressIndicator process) { if(files.isEmpty()) { return; } UpdateContext updateContext = new UpdateContext(); Stack<MavenProject> updateStack = new Stack<MavenProject>(); Set<MavenProject> inheritorsToUpdate = new HashSet<MavenProject>(); for(VirtualFile each : files) { MavenProject mavenProject = findProject(each); if(mavenProject == null) { return; } inheritorsToUpdate.addAll(findInheritors(mavenProject)); doDelete(findAggregator(mavenProject), mavenProject, updateContext); } inheritorsToUpdate.removeAll(updateContext.deletedProjects); for(MavenProject each : inheritorsToUpdate) { doUpdate(each, null, false, false, false, explicitProfiles, updateContext, updateStack, projectReader, generalSettings, process); } updateExplicitProfiles(); updateContext.fireUpdatedIfNecessary(); } private void doDelete(MavenProject aggregator, MavenProject project, UpdateContext updateContext) { for(MavenProject each : getModules(project)) { if(isManagedFile(each.getPath())) { if(reconnect(null, each)) { updateContext.update(each, MavenProjectChanges.NONE); } } else { doDelete(project, each, updateContext); } } writeLock(); try { if(aggregator != null) { removeModule(aggregator, project); } else { myRootProjects.remove(project); } myTimestamps.remove(project); myVirtualFileToProjectMapping.remove(project.getFile()); clearIDMaps(project); myAggregatorToModuleMapping.remove(project); myModuleToAggregatorMapping.remove(project); } finally { writeUnlock(); } updateContext.deleted(project); } private void fillIDMaps(MavenProject mavenProject) { MavenId id = mavenProject.getMavenId(); myWorkspaceMap.register(id, new File(mavenProject.getFile().getPath())); myMavenIdToProjectMapping.put(id, mavenProject); } private void clearIDMaps(MavenProject mavenProject) { MavenId id = mavenProject.getMavenId(); myWorkspaceMap.unregister(id); myMavenIdToProjectMapping.remove(id); } private void connect(MavenProject newAggregator, MavenProject project) { writeLock(); try { if(newAggregator != null) { addModule(newAggregator, project); } else { myRootProjects.add(project); } } finally { writeUnlock(); } } private boolean reconnect(MavenProject newAggregator, MavenProject project) { MavenProject prevAggregator = findAggregator(project); if(prevAggregator == newAggregator) { return false; } writeLock(); try { if(prevAggregator != null) { removeModule(prevAggregator, project); } else { myRootProjects.remove(project); } if(newAggregator != null) { addModule(newAggregator, project); } else { myRootProjects.add(project); } } finally { writeUnlock(); } return true; } public boolean hasProjects() { readLock(); try { return !myRootProjects.isEmpty(); } finally { readUnlock(); } } public List<MavenProject> getRootProjects() { readLock(); try { return new ArrayList<MavenProject>(myRootProjects); } finally { readUnlock(); } } private static void updateCrc(CRC32 crc, int x) { crc.update(x & 0xFF); x >>>= 8; crc.update(x & 0xFF); x >>>= 8; crc.update(x & 0xFF); x >>>= 8; crc.update(x); } private static void updateCrc(CRC32 crc, long l) { updateCrc(crc, (int) l); updateCrc(crc, (int) (l >>> 32)); } private static void updateCrc(CRC32 crc, @Nullable String s) { if(s == null) { crc.update(111); } else { updateCrc(crc, s.hashCode()); crc.update(s.length() & 0xFF); } } @Nonnull public static Collection<String> getFilterExclusions(MavenProject mavenProject) { Element config = mavenProject.getPluginConfiguration("org.apache.maven.plugins", "maven-resources-plugin"); if(config == null) { return Collections.emptySet(); } final List<String> customNonFilteredExtensions = MavenJDOMUtil.findChildrenValuesByPath(config, "nonFilteredFileExtensions", "nonFilteredFileExtension"); if(customNonFilteredExtensions.isEmpty()) { return Collections.emptySet(); } return Collections.unmodifiableList(customNonFilteredExtensions); } public int getFilterConfigCrc(ProjectFileIndex fileIndex) { ApplicationManager.getApplication().assertReadAccessAllowed(); readLock(); try { final CRC32 crc = new CRC32(); MavenExplicitProfiles profiles = myExplicitProfiles; if(profiles != null) { updateCrc(crc, profiles.hashCode()); } Collection<MavenProject> allProjects = myVirtualFileToProjectMapping.values(); crc.update(allProjects.size() & 0xFF); for(MavenProject mavenProject : allProjects) { VirtualFile pomFile = mavenProject.getFile(); Module module = fileIndex.getModuleForFile(pomFile); if(module == null) { continue; } if(!Comparing.equal(fileIndex.getContentRootForFile(pomFile), pomFile.getParent())) { continue; } updateCrc(crc, module.getName()); MavenId mavenId = mavenProject.getMavenId(); updateCrc(crc, mavenId.getGroupId()); updateCrc(crc, mavenId.getArtifactId()); updateCrc(crc, mavenId.getVersion()); MavenId parentId = mavenProject.getParentId(); if(parentId != null) { updateCrc(crc, parentId.getGroupId()); updateCrc(crc, parentId.getArtifactId()); updateCrc(crc, parentId.getVersion()); } updateCrc(crc, mavenProject.getDirectory()); updateCrc(crc, MavenFilteredPropertyPsiReferenceProvider.getDelimitersPattern(mavenProject).pattern()); updateCrc(crc, mavenProject.getModelMap().hashCode()); updateCrc(crc, mavenProject.getResources().hashCode()); updateCrc(crc, mavenProject.getTestResources().hashCode()); updateCrc(crc, getFilterExclusions(mavenProject).hashCode()); updateCrc(crc, mavenProject.getProperties().hashCode()); for(String each : mavenProject.getFilterPropertiesFiles()) { File file = new File(each); updateCrc(crc, file.lastModified()); } XMLOutputter outputter = new XMLOutputter(Format.getCompactFormat()); Writer crcWriter = new Writer() { @Override public void write(char[] cbuf, int off, int len) throws IOException { for(int i = off, end = off + len; i < end; i++) { crc.update(cbuf[i]); } } @Override public void flush() throws IOException { } @Override public void close() throws IOException { } }; try { Element resourcePluginCfg = mavenProject.getPluginConfiguration("org.apache.maven.plugins", "maven-resources-plugin"); if(resourcePluginCfg != null) { outputter.output(resourcePluginCfg, crcWriter); } Element warPluginCfg = mavenProject.getPluginConfiguration("org.apache.maven.plugins", "maven-war-plugin"); if(warPluginCfg != null) { outputter.output(warPluginCfg, crcWriter); } } catch(IOException e) { LOG.error(e); } } return (int) crc.getValue(); } finally { readUnlock(); } } public List<VirtualFile> getRootProjectsFiles() { return MavenUtil.collectFiles(getRootProjects()); } public List<MavenProject> getProjects() { readLock(); try { return new ArrayList<MavenProject>(myVirtualFileToProjectMapping.values()); } finally { readUnlock(); } } public List<MavenProject> getNonIgnoredProjects() { readLock(); try { List<MavenProject> result = new ArrayList<MavenProject>(); for(MavenProject each : myVirtualFileToProjectMapping.values()) { if(!isIgnored(each)) { result.add(each); } } return result; } finally { readUnlock(); } } public List<VirtualFile> getProjectsFiles() { readLock(); try { return new ArrayList<VirtualFile>(myVirtualFileToProjectMapping.keySet()); } finally { readUnlock(); } } @Nullable public MavenProject findProject(VirtualFile f) { readLock(); try { return myVirtualFileToProjectMapping.get(f); } finally { readUnlock(); } } @Nullable public MavenProject findProject(MavenId id) { readLock(); try { return myMavenIdToProjectMapping.get(id); } finally { readUnlock(); } } @Nullable public MavenProject findProject(MavenArtifact artifact) { return findProject(artifact.getMavenId()); } private MavenWorkspaceMap getWorkspaceMap() { readLock(); try { return myWorkspaceMap.copy(); } finally { readUnlock(); } } public MavenProject findAggregator(MavenProject project) { readLock(); try { return myModuleToAggregatorMapping.get(project); } finally { readUnlock(); } } public MavenProject findRootProject(@Nonnull MavenProject project) { readLock(); try { MavenProject rootProject = project; while(true) { MavenProject aggregator = myModuleToAggregatorMapping.get(rootProject); if(aggregator == null) { return rootProject; } rootProject = aggregator; } } finally { readUnlock(); } } public boolean isRootProject(@Nonnull MavenProject project) { readLock(); try { return myModuleToAggregatorMapping.get(project) == null; } finally { readUnlock(); } } public List<MavenProject> getModules(MavenProject aggregator) { readLock(); try { List<MavenProject> modules = myAggregatorToModuleMapping.get(aggregator); return modules == null ? Collections.<MavenProject>emptyList() : new ArrayList<MavenProject>(modules); } finally { readUnlock(); } } private void addModule(MavenProject aggregator, MavenProject module) { writeLock(); try { List<MavenProject> modules = myAggregatorToModuleMapping.get(aggregator); if(modules == null) { modules = new ArrayList<MavenProject>(); myAggregatorToModuleMapping.put(aggregator, modules); } modules.add(module); myModuleToAggregatorMapping.put(module, aggregator); } finally { writeUnlock(); } } private void removeModule(MavenProject aggregator, MavenProject module) { writeLock(); try { List<MavenProject> modules = myAggregatorToModuleMapping.get(aggregator); if(modules == null) { return; } modules.remove(module); myModuleToAggregatorMapping.remove(module); } finally { writeUnlock(); } } private MavenProject findParent(MavenProject project) { return findProject(project.getParentId()); } public Collection<MavenProject> findInheritors(MavenProject project) { readLock(); try { List<MavenProject> result = null; MavenId id = project.getMavenId(); for(MavenProject each : myVirtualFileToProjectMapping.values()) { if(each == project) { continue; } if(id.equals(each.getParentId())) { if(result == null) { result = new ArrayList<MavenProject>(); } result.add(each); } } return result == null ? Collections.<MavenProject>emptyList() : result; } finally { readUnlock(); } } public List<MavenProject> getDependentProjects(Collection<MavenProject> projects) { readLock(); try { List<MavenProject> result = null; Set<MavenCoordinate> projectIds = Sets.newHashSet(new MavenCoordinateHashCodeStrategy()); for(MavenProject project : projects) { projectIds.add(project.getMavenId()); } final Set<File> projectPaths = Sets.newHashSet(FileUtil.FILE_HASHING_STRATEGY); for(MavenProject project : projects) { projectPaths.add(new File(project.getFile().getPath())); } for(MavenProject project : myVirtualFileToProjectMapping.values()) { boolean isDependent = false; Set<String> pathsInStack = project.getModulePaths(); for(final String path : pathsInStack) { if(projectPaths.contains(new File(path))) { isDependent = true; break; } } if(!isDependent) { for(MavenArtifact dep : project.getDependencies()) { if(projectIds.contains(dep)) { isDependent = true; break; } } } if(isDependent) { if(result == null) { result = new ArrayList<MavenProject>(); } result.add(project); } } return result == null ? Collections.<MavenProject>emptyList() : result; } finally { readUnlock(); } } @TestOnly public void resolve(@Nonnull Project project, @Nonnull MavenProject mavenProject, @Nonnull MavenGeneralSettings generalSettings, @Nonnull MavenEmbeddersManager embeddersManager, @Nonnull MavenConsole console, @Nonnull MavenProgressIndicator process) throws MavenProcessCanceledException { resolve(project, mavenProject, generalSettings, embeddersManager, console, new ResolveContext(), process); } public void resolve(@Nonnull Project project, @Nonnull MavenProject mavenProject, @Nonnull MavenGeneralSettings generalSettings, @Nonnull MavenEmbeddersManager embeddersManager, @Nonnull MavenConsole console, @Nonnull ResolveContext context, @Nonnull MavenProgressIndicator process) throws MavenProcessCanceledException { MavenEmbedderWrapper embedder = embeddersManager.getEmbedder(MavenEmbeddersManager.FOR_DEPENDENCIES_RESOLVE); embedder.customizeForResolve(getWorkspaceMap(), console, process, generalSettings.isAlwaysUpdateSnapshots()); try { process.checkCanceled(); process.setText(ProjectBundle.message("maven.resolving.pom", mavenProject.getDisplayName())); process.setText2(""); Pair<MavenProjectChanges, NativeMavenProjectHolder> resolveResult = mavenProject.resolve(project, generalSettings, embedder, new MavenProjectReader(), myProjectLocator, context); fireProjectResolved(Pair.create(mavenProject, resolveResult.first), resolveResult.second); } finally { embeddersManager.release(embedder); } } public void resolvePlugins(@Nonnull MavenProject mavenProject, @Nonnull NativeMavenProjectHolder nativeMavenProject, @Nonnull MavenEmbeddersManager embeddersManager, @Nonnull MavenConsole console, @Nonnull MavenProgressIndicator process) throws MavenProcessCanceledException { MavenEmbedderWrapper embedder = embeddersManager.getEmbedder(MavenEmbeddersManager.FOR_PLUGINS_RESOLVE); embedder.customizeForResolve(console, process); embedder.clearCachesFor(mavenProject.getMavenId()); Set<File> filesToRefresh = new HashSet<File>(); try { process.setText(ProjectBundle.message("maven.downloading.pom.plugins", mavenProject.getDisplayName())); for(MavenPlugin each : mavenProject.getDeclaredPlugins()) { process.checkCanceled(); Collection<MavenArtifact> artifacts = embedder.resolvePlugin(each, mavenProject.getRemoteRepositories(), nativeMavenProject, false); for(MavenArtifact artifact : artifacts) { File pluginJar = artifact.getFile(); File pluginDir = pluginJar.getParentFile(); if(pluginDir != null) { filesToRefresh.add(pluginDir); // Refresh both *.pom and *.jar files. } } } mavenProject.resetCache(); firePluginsResolved(mavenProject); } finally { if(filesToRefresh.size() > 0) { LocalFileSystem.getInstance().refreshIoFiles(filesToRefresh); } embeddersManager.release(embedder); } } public void resolveFolders(@Nonnull final MavenProject mavenProject, @Nonnull final MavenImportingSettings importingSettings, @Nonnull final MavenEmbeddersManager embeddersManager, @Nonnull final MavenConsole console, @Nonnull final MavenProgressIndicator process) throws MavenProcessCanceledException { executeWithEmbedder(mavenProject, embeddersManager, MavenEmbeddersManager.FOR_FOLDERS_RESOLVE, console, process, new EmbedderTask() { public void run(MavenEmbedderWrapper embedder) throws MavenProcessCanceledException { process.checkCanceled(); process.setText(ProjectBundle.message("maven.updating.folders.pom", mavenProject.getDisplayName())); process.setText2(""); Pair<Boolean, MavenProjectChanges> resolveResult = mavenProject.resolveFolders(embedder, importingSettings, console); if(resolveResult.first) { fireFoldersResolved(Pair.create(mavenProject, resolveResult.second)); } } }); } public MavenArtifactDownloader.DownloadResult downloadSourcesAndJavadocs(@Nonnull Project project, @Nonnull Collection<MavenProject> projects, @Nullable Collection<MavenArtifact> artifacts, boolean downloadSources, boolean downloadDocs, @Nonnull MavenEmbeddersManager embeddersManager, @Nonnull MavenConsole console, @Nonnull MavenProgressIndicator process) throws MavenProcessCanceledException { MavenEmbedderWrapper embedder = embeddersManager.getEmbedder(MavenEmbeddersManager.FOR_DOWNLOAD); embedder.customizeForResolve(console, process); try { MavenArtifactDownloader.DownloadResult result = MavenArtifactDownloader.download(project, this, projects, artifacts, downloadSources, downloadDocs, embedder, process); for(MavenProject each : projects) { fireArtifactsDownloaded(each); } return result; } finally { embeddersManager.release(embedder); } } public void executeWithEmbedder(@Nonnull MavenProject mavenProject, @Nonnull MavenEmbeddersManager embeddersManager, @Nonnull Key embedderKind, @Nonnull MavenConsole console, @Nonnull MavenProgressIndicator process, @Nonnull EmbedderTask task) throws MavenProcessCanceledException { MavenEmbedderWrapper embedder = embeddersManager.getEmbedder(embedderKind); embedder.customizeForResolve(getWorkspaceMap(), console, process, false); embedder.clearCachesFor(mavenProject.getMavenId()); try { task.run(embedder); } finally { embeddersManager.release(embedder); } } public <Result> Result visit(Visitor<Result> visitor) { for(MavenProject each : getRootProjects()) { if(visitor.isDone()) { break; } doVisit(each, visitor); } return visitor.getResult(); } private <Result> void doVisit(MavenProject project, Visitor<Result> visitor) { if(!visitor.isDone() && visitor.shouldVisit(project)) { visitor.visit(project); for(MavenProject each : getModules(project)) { if(visitor.isDone()) { break; } doVisit(each, visitor); } visitor.leave(project); } } private void writeLock() { myStructureWriteLock.lock(); } private void writeUnlock() { myStructureWriteLock.unlock(); } private void readLock() { myStructureReadLock.lock(); } private void readUnlock() { myStructureReadLock.unlock(); } public void addListener(Listener l) { myListeners.add(l); } private void fireProfilesChanged() { for(Listener each : myListeners) { each.profilesChanged(); } } private void fireProjectsIgnoredStateChanged(List<MavenProject> ignored, List<MavenProject> unignored, boolean fromImport) { for(Listener each : myListeners) { each.projectsIgnoredStateChanged(ignored, unignored, fromImport); } } private void fireProjectsUpdated(List<Pair<MavenProject, MavenProjectChanges>> updated, List<MavenProject> deleted) { for(Listener each : myListeners) { each.projectsUpdated(updated, deleted); } } private void fireProjectResolved(Pair<MavenProject, MavenProjectChanges> projectWithChanges, NativeMavenProjectHolder nativeMavenProject) { for(Listener each : myListeners) { each.projectResolved(projectWithChanges, nativeMavenProject); } } private void firePluginsResolved(MavenProject project) { for(Listener each : myListeners) { each.pluginsResolved(project); } } private void fireFoldersResolved(Pair<MavenProject, MavenProjectChanges> projectWithChanges) { for(Listener each : myListeners) { each.foldersResolved(projectWithChanges); } } private void fireArtifactsDownloaded(MavenProject project) { for(Listener each : myListeners) { each.artifactsDownloaded(project); } } private class UpdateContext { public final Map<MavenProject, MavenProjectChanges> updatedProjectsWithChanges = new LinkedHashMap<MavenProject, MavenProjectChanges>(); public final Set<MavenProject> deletedProjects = new LinkedHashSet<MavenProject>(); public void update(MavenProject project, MavenProjectChanges changes) { deletedProjects.remove(project); updatedProjectsWithChanges.put(project, changes.mergedWith(updatedProjectsWithChanges.get(project))); } public void deleted(MavenProject project) { updatedProjectsWithChanges.remove(project); deletedProjects.add(project); } public void deleted(Collection<MavenProject> projects) { for(MavenProject each : projects) { deleted(each); } } public void fireUpdatedIfNecessary() { if(updatedProjectsWithChanges.isEmpty() && deletedProjects.isEmpty()) { return; } List<MavenProject> mavenProjects = deletedProjects.isEmpty() ? Collections.<MavenProject>emptyList() : new ArrayList<MavenProject>(deletedProjects); List<Pair<MavenProject, MavenProjectChanges>> updated = updatedProjectsWithChanges.isEmpty() ? Collections.<Pair<MavenProject, MavenProjectChanges>>emptyList() : MavenUtil.mapToList (updatedProjectsWithChanges); fireProjectsUpdated(updated, mavenProjects); } } public interface EmbedderTask { void run(MavenEmbedderWrapper embedder) throws MavenProcessCanceledException; } public abstract static class Visitor<Result> { private Result result; public boolean shouldVisit(MavenProject project) { return true; } public abstract void visit(MavenProject project); public void leave(MavenProject node) { } public void setResult(Result result) { this.result = result; } public Result getResult() { return result; } public boolean isDone() { return result != null; } } public abstract static class SimpleVisitor extends Visitor<Object> { } private static class MavenProjectTimestamp { private final long myPomTimestamp; private final long myParentLastReadStamp; private final long myProfilesTimestamp; private final long myUserSettingsTimestamp; private final long myGlobalSettingsTimestamp; private final long myExplicitProfilesHashCode; private MavenProjectTimestamp(long pomTimestamp, long parentLastReadStamp, long profilesTimestamp, long userSettingsTimestamp, long globalSettingsTimestamp, long explicitProfilesHashCode) { myPomTimestamp = pomTimestamp; myParentLastReadStamp = parentLastReadStamp; myProfilesTimestamp = profilesTimestamp; myUserSettingsTimestamp = userSettingsTimestamp; myGlobalSettingsTimestamp = globalSettingsTimestamp; myExplicitProfilesHashCode = explicitProfilesHashCode; } public static MavenProjectTimestamp read(DataInputStream in) throws IOException { return new MavenProjectTimestamp(in.readLong(), in.readLong(), in.readLong(), in.readLong(), in.readLong(), in.readLong()); } public void write(DataOutputStream out) throws IOException { out.writeLong(myPomTimestamp); out.writeLong(myParentLastReadStamp); out.writeLong(myProfilesTimestamp); out.writeLong(myUserSettingsTimestamp); out.writeLong(myGlobalSettingsTimestamp); out.writeLong(myExplicitProfilesHashCode); } @Override public String toString() { return "(" + myPomTimestamp + ":" + myParentLastReadStamp + ":" + myProfilesTimestamp + ":" + myUserSettingsTimestamp + ":" + myGlobalSettingsTimestamp + ":" + myExplicitProfilesHashCode + ")"; } @Override public boolean equals(Object o) { if(this == o) { return true; } if(o == null || getClass() != o.getClass()) { return false; } MavenProjectTimestamp timestamp = (MavenProjectTimestamp) o; if(myPomTimestamp != timestamp.myPomTimestamp) { return false; } if(myParentLastReadStamp != timestamp.myParentLastReadStamp) { return false; } if(myProfilesTimestamp != timestamp.myProfilesTimestamp) { return false; } if(myUserSettingsTimestamp != timestamp.myUserSettingsTimestamp) { return false; } if(myGlobalSettingsTimestamp != timestamp.myGlobalSettingsTimestamp) { return false; } if(myExplicitProfilesHashCode != timestamp.myExplicitProfilesHashCode) { return false; } return true; } @Override public int hashCode() { int result = 0; result = 31 * result + (int) (myPomTimestamp ^ (myPomTimestamp >>> 32)); result = 31 * result + (int) (myParentLastReadStamp ^ (myParentLastReadStamp >>> 32)); result = 31 * result + (int) (myProfilesTimestamp ^ (myProfilesTimestamp >>> 32)); result = 31 * result + (int) (myUserSettingsTimestamp ^ (myUserSettingsTimestamp >>> 32)); result = 31 * result + (int) (myGlobalSettingsTimestamp ^ (myGlobalSettingsTimestamp >>> 32)); result = 31 * result + (int) (myExplicitProfilesHashCode ^ (myExplicitProfilesHashCode >>> 32)); return result; } } public interface Listener extends EventListener { default void profilesChanged() { } default void projectsIgnoredStateChanged(List<MavenProject> ignored, List<MavenProject> unignored, boolean fromImport) { } default void projectsUpdated(List<Pair<MavenProject, MavenProjectChanges>> updated, List<MavenProject> deleted) { } default void projectResolved(Pair<MavenProject, MavenProjectChanges> projectWithChanges, @Nullable NativeMavenProjectHolder nativeMavenProject) { } default void pluginsResolved(MavenProject project) { } default void foldersResolved(Pair<MavenProject, MavenProjectChanges> projectWithChanges) { } default void artifactsDownloaded(MavenProject project) { } } @Deprecated public static class ListenerAdapter implements Listener { public void profilesChanged() { } public void projectsIgnoredStateChanged(List<MavenProject> ignored, List<MavenProject> unignored, boolean fromImport) { } public void projectsUpdated(List<Pair<MavenProject, MavenProjectChanges>> updated, List<MavenProject> deleted) { } public void projectResolved(Pair<MavenProject, MavenProjectChanges> projectWithChanges, @Nullable NativeMavenProjectHolder nativeMavenProject) { } public void pluginsResolved(MavenProject project) { } public void foldersResolved(Pair<MavenProject, MavenProjectChanges> projectWithChanges) { } public void artifactsDownloaded(MavenProject project) { } } private static class MavenCoordinateHashCodeStrategy implements HashingStrategy<MavenCoordinate> { @Override public int hashCode(MavenCoordinate object) { String artifactId = object.getArtifactId(); return artifactId == null ? 0 : artifactId.hashCode(); } @Override public boolean equals(MavenCoordinate o1, MavenCoordinate o2) { return Comparing.equal(o1.getArtifactId(), o2.getArtifactId()) && Comparing.equal(o1.getVersion(), o2.getVersion()) && Comparing.equal(o1.getGroupId(), o2.getGroupId()); } } }
// Copyright (C) 2009 Per M.A. Bothner. // This is free software; for terms and warranty disclaimer see ../../COPYING. package kawa.standard; import kawa.lang.*; import gnu.expr.*; import gnu.lists.*; import gnu.kawa.io.FilePath; import gnu.kawa.io.Path; import gnu.mapping.*; import java.io.File; import gnu.bytecode.ObjectType; import java.util.*; import kawa.lang.Translator.FormStack; /** Implement R6RS import form. * This actually only implements simplified import; * we assumes it has been simplified by import macro defined in syntax.scm. */ public class ImportFromLibrary extends Syntax { public static final ImportFromLibrary instance = new ImportFromLibrary(); public static String[] classPrefixPath = { "", "kawa.lib." }; private static final String BUILTIN = "<builtin>"; private static final String MISSING = null; static final String[][] SRFI97Map = { { "1", "lists", "gnu.kawa.slib.srfi1" }, { "2", "and-let*", "gnu.kawa.slib.srfi2" }, { "5", "let", MISSING }, { "6", "basic-string-ports", BUILTIN }, { "8", "receive", "gnu.kawa.slib.receive" }, { "9", "records", BUILTIN }, { "11", "let-values", BUILTIN }, { "13", "strings", "gnu.kawa.slib.srfi13" }, { "14", "char-sets", "gnu.kawa.slib.srfi14" }, { "16", "case-lambda", BUILTIN }, { "17", "generalized-set!", BUILTIN }, { "18", "multithreading", MISSING }, { "19", "time", MISSING }, { "21", "real-time-multithreading", MISSING }, { "23", "error", BUILTIN }, { "25", "multi-dimensional-arrays", BUILTIN }, { "26", "cut", "gnu.kawa.slib.cut" }, { "27", "random-bits", MISSING }, { "28", "basic-format-strings", BUILTIN }, { "29", "localization", MISSING }, { "31", "rec", MISSING }, { "35", "conditions", "gnu.kawa.slib.conditions" }, { "37", "args-fold", "gnu.kawa.slib.srfi37" }, { "38", "with-shared-structure", MISSING }, { "39", "parameters", BUILTIN }, // Note the default for (srfi :41) should be "streams". We put that last, // since the lable is searched from high index to low index. { "41", "streams.primitive", "gnu.kawa.slib.StreamsPrimitive" }, { "41", "streams.derived", "gnu.kawa.slib.StreamsDerived" }, { "41", "streams", "gnu.kawa.slib.Streams" }, { "42", "eager-comprehensions", MISSING }, { "43", "vectors", MISSING }, { "44", "collections", MISSING }, { "45", "lazy", MISSING }, { "46", "syntax-rules", MISSING }, { "47", "arrays", MISSING }, { "48", "intermediate-format-strings", MISSING }, { "51", "rest-values", MISSING }, { "54", "cat", MISSING }, { "57", "records", MISSING }, { "59", "vicinities", MISSING }, { "60", "integer-bits", "gnu.kawa.slib.srfi60" }, { "61", "cond", MISSING }, { "63", "arrays", MISSING }, { "64", "testing", "gnu.kawa.slib.testing" }, { "66", "octet-vectors", MISSING }, { "67", "compare-procedures", MISSING }, { "69", "basic-hash-tables", "gnu.kawa.slib.srfi69" }, { "71", "let", MISSING }, { "74", "blobs", MISSING }, { "78", "lightweight-testing", MISSING }, { "86", "mu-and-nu", MISSING }, { "87", "case", BUILTIN }, { "95", "sorting-and-merging", "kawa.lib.srfi95" }, { "98", "os-environment-variables", BUILTIN }, { "101", "random-access-lists", "gnu.kawa.slib.ralists" } }; @Override public void scanForm(Pair st, ScopeExp defs, Translator tr) { Object obj = st.getCdr(); while (obj instanceof Pair) { Pair pair = (Pair) obj; Object save1 = tr.pushPositionOf(pair); scanImportSet(pair.getCar(), defs, tr, null); tr.popPositionOf(save1); obj = pair.getCdr(); } if (obj != LList.Empty) tr.error('e', "improper list"); } public static String checkSrfi(String lname, Translator tr) { if (lname.startsWith("srfi.")) { String demangled = Compilation.demangleSymbolic(lname.substring(5)); int dot = demangled.indexOf('.'); String srfiName; StringBuilder badNameBuffer = null; if (dot < 0) { srfiName = null; dot = demangled.length(); } else srfiName = demangled.substring(dot+1); String srfiNumber = null; if (dot > 0) { int numStart = demangled.charAt(0) == ':' ? 1 : 0; for (int i = numStart; ; i++) { if (i == dot) { srfiNumber = demangled.substring(numStart, dot); break; } if (Character.digit(demangled.charAt(i), 10) < 0) break; } } if (srfiNumber == null) { tr.error('e', "SRFI library reference must have the form: (srfi NNN [name]) or (srfi :NNN [name])"); return lname; } int srfiIndex = SRFI97Map.length; for (;;) { if (--srfiIndex < 0) { break; } if (!SRFI97Map[srfiIndex][0].equals(srfiNumber)) continue; String srfiNameExpected = SRFI97Map[srfiIndex][1]; String srfiClass = SRFI97Map[srfiIndex][2]; if (srfiName == null || srfiName.equals(srfiNameExpected)) return srfiClass != MISSING ? srfiClass : lname; if (badNameBuffer == null) { badNameBuffer = new StringBuilder("the name of SRFI "); badNameBuffer.append(srfiNumber); badNameBuffer.append(" should be '"); } else badNameBuffer.append(" or '"); badNameBuffer.append(srfiNameExpected); badNameBuffer.append('\''); } if (badNameBuffer != null) { tr.error('e', badNameBuffer.toString()); return BUILTIN; } } return lname; } void scanImportSet(Object imports, ScopeExp defs, Translator tr, require.DeclSetMapper mapper) { if (imports instanceof SimpleSymbol) { String sname = imports.toString(); handleImport(sname, null, Compilation.mangleQualifiedName(sname), defs, tr, mapper); return; } int specLength = Translator.listLength(imports); if (specLength <= 0) { Object save1 = tr.pushPositionOf(imports); tr.error('e', "import specifier is not a proper list"); tr.popPositionOf(save1); return; } Pair pimport = (Pair) imports; Object first = pimport.getCar(); Object rest = pimport.getCdr(); Pair cdrPair = specLength >= 2 ? (Pair) rest : null; char kind = '\0'; if (first == onlySymbol) kind = 'O'; else if (first == exceptSymbol) kind = 'E'; else if (first == renameSymbol) kind = 'R'; else if (first == prefixSymbol) kind = 'P'; else if (first == librarySymbol && specLength == 2 && cdrPair.getCar() instanceof Pair) pimport = (Pair) cdrPair.getCar(); else if (first == classSymbol && specLength >= 2 && cdrPair.getCar() instanceof SimpleSymbol) { Map<Symbol, Expression> decls = new LinkedHashMap<Symbol, Expression>(); SimpleSymbol name1 = (SimpleSymbol) cdrPair.getCar(); String str1 = name1.getName(); rest = cdrPair.getCdr(); if (rest == LList.Empty) { tr.error('e', "class-prefix must be followed by class-names"); } while (rest != LList.Empty) { cdrPair = (Pair) rest; Object part2 = cdrPair.getCar(); String cname = null; SimpleSymbol dname = null; if (part2 instanceof SimpleSymbol) { dname = (SimpleSymbol) part2; String str2 = dname.getName(); cname = name1+"."+str2; } else if (part2 instanceof Pair && Translator.listLength(part2) == 2) { Pair rpair1 = (Pair) part2; Pair rpair2 = (Pair) rpair1.getCdr(); Object rname1 = rpair1.getCar(); Object rname2 = rpair2.getCar(); if (rname1 instanceof SimpleSymbol && rname2 instanceof SimpleSymbol) { cname = name1 + "." + ((SimpleSymbol) rname1).getName(); dname = (SimpleSymbol) rname2; } } if (dname == null) { tr.error('e', "imported class-name must be NAME or (NAME NEW-NAME)"); } else { try { Class clas = ObjectType.getContextClass(cname); decls.put(dname, tr.makeQuoteExp(clas)); } catch (ClassNotFoundException ex) { tr.error('e', "no class found named "+cname); } } rest = cdrPair.getCdr(); } if (mapper != null) decls = mapper.map(decls, tr); for (Map.Entry<Symbol,Expression> entry : decls.entrySet()) { Symbol aname = entry.getKey(); Declaration decl = tr.define(aname, defs); decl.setAlias(true); decl.setFlag(Declaration.IS_CONSTANT|Declaration.EARLY_INIT); SetExp sexp = new SetExp(decl, entry.getValue()); tr.setLineOf(sexp); decl.noteValueFromSet(sexp); sexp.setDefining (true); tr.formStack.push(sexp); } return; } if (specLength >= 2 && kind != '\0') { ImportSetMapper nmapper = new ImportSetMapper(kind, cdrPair.getCdr(), specLength-2); nmapper.chain = mapper; scanImportSet(cdrPair.getCar(), defs, tr, nmapper); return; } String explicitSource = null; Object versionSpec = null; StringBuilder cbuf = new StringBuilder(); // for class name StringBuilder sbuf = new StringBuilder(); // for source file name Object libref = pimport; while (libref instanceof Pair) { Pair pair = (Pair) libref; Object car = pair.getCar(); Object cdr = pair.getCdr(); if (car instanceof Pair) { if (versionSpec != null) { tr.error('e', "duplicate version reference - was "+versionSpec); } versionSpec = car; } else if (car instanceof String) { if (cdr instanceof Pair) tr.error('e', "source specifier must be last element in library reference"); explicitSource = (String) car; } else { if (cbuf.length() > 0) cbuf.append('.'); if (sbuf.length() > 0) sbuf.append('/'); String part = car.toString(); cbuf.append(Compilation.mangleClassName(part)); sbuf.append(part); } libref = cdr; } handleImport(sbuf.toString(), explicitSource, cbuf.toString(), defs, tr, mapper); } /** Do the actual work of importing a module. * @param implicitSource Source name inferred from library name, * with '/' as separator. Does not include a file extension. * @param explicitSource If non-null, an exlicitly specified * source file name. */ public static void handleImport(String implicitSource, String explicitSource, String requestedClass, ScopeExp defs, Translator tr, require.DeclSetMapper mapper) { ModuleManager mmanager = ModuleManager.getInstance(); ModuleInfo minfo = null; String lname = checkSrfi(requestedClass, tr); if (lname == BUILTIN) return; // nothing to do boolean foundSrfi = lname != requestedClass; int classPrefixPathLength = classPrefixPath.length; Class existingClass = null; for (int i = 0; i < classPrefixPathLength; i++) { String tname = classPrefixPath[i] + lname; minfo = mmanager.searchWithClassName(tname); if (minfo != null) break; try { existingClass = ObjectType.getContextClass(tname); break; } catch (Exception ex) { } catch (NoClassDefFoundError ex) { } } ModuleInfo curinfo = tr.getMinfo(); Path currentSource = tr.getSourceAbsPath(); String currentExtension = currentSource == null ? null : currentSource.getExtension(); if (currentExtension == null) { List<String> langExtensions = tr.getLanguage().getExtensions(); if (! langExtensions.isEmpty()) currentExtension = langExtensions.get(0); } boolean hasDot; boolean isAbsolute; if (explicitSource != null) { hasDot = explicitSource.indexOf("./") >= 0; isAbsolute = Path.valueOf(explicitSource).isAbsolute(); } else { hasDot = false; isAbsolute = false; } String currentClassName = curinfo.getClassName(); // Is the current module a file - as opposed to (say) a tty? boolean currentIsFile = currentSource != null && currentSource.isPlainFile(); Path currentRoot = currentIsFile ? currentSource.getDirectory() : Path.currentPath(); if (currentIsFile && ! (explicitSource != null && (hasDot || isAbsolute))) { int currentDots = 0; String prefix = currentClassName != null ? currentClassName : tr.classPrefix != null ? tr.classPrefix : ""; for (int i = prefix.length(); --i >= 0; ) if (prefix.charAt(i) == '.') currentDots++; if (currentDots > 0) { StringBuilder ups = new StringBuilder(".."); for (int i = currentDots; -- i > 0; ) ups.append("/.."); currentRoot = currentRoot.resolve(ups.toString()); } } List<CharSequence> srcSearchPath; boolean skipSourceSearch = minfo != null && explicitSource == null; if (isAbsolute || hasDot || skipSourceSearch) { srcSearchPath = new ArrayList<CharSequence>(); if (! skipSourceSearch) srcSearchPath.add(currentRoot.toString()); } else srcSearchPath = getImportSearchPath(); String pathStr = null; for (CharSequence searchElement : srcSearchPath) { if (isAbsolute) pathStr = explicitSource; else { String pathElement = searchElement.toString(); int selectorEnd; int star; int prefixLength = 0; StringBuilder pbuf = new StringBuilder(); if (pathElement.length() >= 3 && pathElement.charAt(0) == '<' && (selectorEnd = pathElement.indexOf('>')+1) > 0) { StringBuilder prefixBuf = new StringBuilder(); boolean slashNeeded = false; for (int i = 1; i < selectorEnd-1; i++) { char ch = pathElement.charAt(i); if (ch == ' ') { if (prefixBuf.length() > 0) slashNeeded = true; } else { if (slashNeeded) prefixBuf.append('/'); prefixBuf.append(ch); prefixLength += slashNeeded ? 2 : 1; slashNeeded = false; } } if (! implicitSource.startsWith(prefixBuf.toString())) continue; if (implicitSource.length() != prefixLength) { if (implicitSource.charAt(prefixLength) != '/') continue; prefixLength++; } star = pathElement.indexOf('*', selectorEnd); if (star < 0) { pathElement = pathElement.substring(selectorEnd); } } else { // No "<...>..." selector star = pathElement.indexOf('*'); selectorEnd = 0; if (foundSrfi && explicitSource == null) continue; } if (star >= 0) { pbuf.append(pathElement.substring(selectorEnd, star)); pbuf.append(implicitSource.substring(prefixLength)); pbuf.append(pathElement.substring(star+1)); } else { if (! ".".equals(pathElement)) { pbuf.append(pathElement); pbuf.append('/'); } if (explicitSource != null) pbuf.append(explicitSource); else { pbuf.append(implicitSource); if (currentExtension != null) { pbuf.append('.'); pbuf.append(currentExtension); } } } pathStr = pbuf.toString(); } Path path = currentRoot.resolve(pathStr).getCanonical(); // Might be more efficient to first check the ModuleManager, // before asking the file-system. FIXME long lastModifiedTime = path.getLastModified(); if (lastModifiedTime != 0) { if (minfo != null) { String pstring = path.toString(); Path infoPath = minfo.getSourceAbsPath(); if (infoPath == null || ! (pstring.equals(infoPath.toString()))) { tr.error('w', "ignoring source file at "+pstring +" - instead using class "+minfo.getClassName() +(infoPath==null?"" :(" from "+infoPath.toString()))); } } else minfo = mmanager.findWithSourcePath(path, pathStr); // Should save lastModifiedTime in minfo FIXME if (foundSrfi) lname = requestedClass; break; } } if (existingClass != null) { if (minfo == null) minfo = mmanager.findWithClass(existingClass); else minfo.setModuleClass(existingClass); } if (minfo == null) tr.error('e', "unknown library ("+implicitSource.replace('/', ' ')+")"); else require.importDefinitions(lname, minfo, mapper, tr.formStack, defs, tr); } public Expression rewriteForm(Pair form, Translator tr) { return tr.syntaxError(getName()+" is only allowed in a <body>"); } static class ImportSetMapper implements require.DeclSetMapper { char kind; Object list; int listLength; require.DeclSetMapper chain; public ImportSetMapper(char kind, Object list, int listLength) { this.kind = kind; this.list = list; this.listLength = listLength; } public Map<Symbol, Expression> map(Map<Symbol, Expression> decls, Compilation comp) { Translator tr = (Translator) comp; Object lst = this.list; Map<Symbol,Expression> nmap = decls; switch (kind) { case 'E': // 'except; list has the form (name ...) case 'O': // 'only; list has the form (name ...) if (kind == 'O') nmap = new LinkedHashMap<Symbol,Expression>(); while (lst instanceof Pair) { Pair pair = (Pair) lst; Object save1 = tr.pushPositionOf(pair); Object name = Translator.stripSyntax(pair.getCar()); Symbol oldsym = null; Symbol newsym = null; if (name instanceof Symbol) { oldsym = (Symbol) name; newsym = oldsym; } else if (kind == 'O' && name instanceof Pair && Translator.listLength(name) == 2) { Pair rpair1 = (Pair) name; Object rname1 = rpair1.getCar(); Object rname2 = ((Pair) rpair1.getCdr()).getCar(); if (rname1 instanceof Symbol && rname2 instanceof Symbol) { oldsym = (Symbol) rname1; newsym = (Symbol) rname2; } } if (oldsym == null) tr.error('e', "non-symbol in name list"); else { Expression old = decls.get(oldsym); if (old == null) tr.error('e', "unknown symbol in import set: "+oldsym); else if (kind == 'E') nmap.remove(oldsym); else nmap.put(newsym, old); } tr.popPositionOf(save1); lst = pair.getCdr(); } break; case 'R': // 'rename; list has the form: ((oldname newname) ...) Symbol[] pendingSymbols = new Symbol[listLength]; Expression[] pendingDecls = new Expression[listLength]; int npending = 0; while (lst instanceof Pair) { Pair pair = (Pair) lst; Object save1 = tr.pushPositionOf(pair); Object entry = pair.getCar(); int entryLen = Translator.listLength(entry); if (entryLen == 2) { Pair p1 = (Pair) entry; Object oldname = p1.getCar(); Object newname = ((Pair) p1.getCdr()).getCar(); if (oldname instanceof Symbol && newname instanceof Symbol) { Symbol oldSymbol = (Symbol) oldname; Symbol newSymbol = (Symbol) newname; Expression oldValue = decls.remove(oldSymbol); if (oldValue == null) tr.error('e', "missing binding "+oldSymbol); else { pendingSymbols[npending] = newSymbol; pendingDecls[npending] = oldValue; npending++; } } else entryLen = -1; } if (entryLen != 2) tr.error('e', "entry is not a pair of names"); tr.popPositionOf(save1); lst = pair.getCdr(); } for (int i = 0; i < npending; i++) { Symbol newSymbol = pendingSymbols[i]; Expression decl = pendingDecls[i]; if (decls.put(newSymbol, decl) != null) tr.error('e', "duplicate binding for "+newSymbol); } break; case 'P': // 'prefix; list has the form: (name-prefix) nmap = new LinkedHashMap<Symbol,Expression>(); if (listLength != 1 || ! (((Pair) list).getCar() instanceof SimpleSymbol)) tr.error('e', "bad syntax for prefix import specifier"); else { String prefix = ((SimpleSymbol) ((Pair) list).getCar()).getName(); for (Map.Entry<Symbol,Expression> entry : decls.entrySet()) { Symbol aname = entry.getKey(); Expression old = entry.getValue(); Symbol nname = Symbol.valueOf(prefix+aname); nmap.put(nname, old); } } break; } if (chain != null) nmap = chain.map(nmap, tr); return nmap; } } /** Check if library (in r7rs import syntax) exists. * @return if library exists: class name of (existing) library class, * or the special BUILTIN value; otherwise null. */ public String libraryExists(Object list, Translator tr) { String lname = module_name.listToModuleName(list, tr); lname = checkSrfi(lname, tr); if (lname == BUILTIN) return lname; int classPrefixPathLength = classPrefixPath.length; for (int i = 0; i < classPrefixPathLength; i++) { String className = classPrefixPath[i] + lname; try { ObjectType.getContextClass(className); return className; } catch (Exception ex) { continue; } } return null; } public static final ThreadLocal<List<CharSequence>> searchPath = new InheritableThreadLocal<List<CharSequence>>(); public static List<CharSequence> getImportSearchPath() { return Include.getSearchPath(searchPath, "kawa.import.path", "."); } public static final SimpleSymbol classSymbol = Symbol.valueOf("class"); public static final SimpleSymbol exceptSymbol = Symbol.valueOf("except"); public static final SimpleSymbol librarySymbol = Symbol.valueOf("library"); public static final SimpleSymbol onlySymbol = Symbol.valueOf("only"); public static final SimpleSymbol prefixSymbol = Symbol.valueOf("prefix"); public static final SimpleSymbol renameSymbol = Symbol.valueOf("rename"); }
// Copyright 2018 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.lcovmerger; import com.google.common.annotations.VisibleForTesting; import java.util.Collection; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeMap; import java.util.stream.Collectors; import java.util.stream.Stream; /* * Stores coverage information for a specific source file. */ class SourceFileCoverage { private final String sourceFileName; private final TreeMap<String, Integer> lineNumbers; // function name to line numbers private final TreeMap<String, Integer> functionsExecution; // function name to execution count private final TreeMap<Integer, BranchCoverage> branches; // line number to branch private final TreeMap<Integer, LineCoverage> lines; // line number to line execution SourceFileCoverage(String sourcefile) { this.sourceFileName = sourcefile; this.functionsExecution = new TreeMap<>(); this.lineNumbers = new TreeMap<>(); this.lines = new TreeMap<>(); this.branches = new TreeMap<>(); } SourceFileCoverage(SourceFileCoverage other) { this.sourceFileName = other.sourceFileName; this.functionsExecution = new TreeMap<>(); this.lineNumbers = new TreeMap<>(); this.lines = new TreeMap<>(); this.branches = new TreeMap<>(); this.lineNumbers.putAll(other.lineNumbers); this.functionsExecution.putAll(other.functionsExecution); this.branches.putAll(other.branches); this.lines.putAll(other.lines); } /* * Returns the merged functions found in the two given {@code SourceFileCoverage}s. */ @VisibleForTesting static TreeMap<String, Integer> mergeLineNumbers(SourceFileCoverage s1, SourceFileCoverage s2) { TreeMap<String, Integer> merged = new TreeMap<>(); merged.putAll(s1.lineNumbers); merged.putAll(s2.lineNumbers); return merged; } /* * * Returns the merged execution count found in the two given {@code SourceFileCoverage}s. */ @VisibleForTesting static TreeMap<String, Integer> mergeFunctionsExecution( SourceFileCoverage s1, SourceFileCoverage s2) { return Stream.of( s1.functionsExecution, s2.functionsExecution) .map(Map::entrySet) .flatMap(Collection::stream) .collect( Collectors.toMap( Map.Entry::getKey, Map.Entry::getValue, Integer::sum, TreeMap::new )); } /* * * Returns the merged branches found in the two given {@code SourceFileCoverage}s. */ @VisibleForTesting static TreeMap<Integer, BranchCoverage> mergeBranches( SourceFileCoverage s1, SourceFileCoverage s2) { return Stream.of(s1.branches, s2.branches) .map(Map::entrySet) .flatMap(Collection::stream) .collect( Collectors.toMap( Map.Entry::getKey, Map.Entry::getValue, BranchCoverage::merge, TreeMap::new ) ); } static int getNumberOfBranchesHit(SourceFileCoverage sourceFileCoverage) { return (int) sourceFileCoverage.branches.entrySet().stream() .filter(branch -> branch.getValue().wasExecuted()) .count(); } /* * Returns the merged line execution found in the two given {@code SourceFileCoverage}s. */ @VisibleForTesting static TreeMap<Integer, LineCoverage> mergeLines( SourceFileCoverage s1, SourceFileCoverage s2) { return Stream.of(s1.lines, s2.lines) .map(Map::entrySet) .flatMap(Collection::stream) .collect( Collectors.toMap( Map.Entry::getKey, Map.Entry::getValue, LineCoverage::merge, TreeMap::new ) ); } private static int getNumberOfExecutedLines(SourceFileCoverage sourceFileCoverage) { return (int) sourceFileCoverage.lines.entrySet().stream() .filter(line -> line.getValue().executionCount() > 0) .count(); } /** * Merges all the fields of {@code other} with the current {@link SourceFileCoverage} into a new * {@link SourceFileCoverage} * * Assumes both the current and the given {@link SourceFileCoverage} have the same * {@code sourceFileName}. * * @return a new {@link SourceFileCoverage} that contains the merged coverage. */ static SourceFileCoverage merge(SourceFileCoverage source1, SourceFileCoverage source2) { assert source1.sourceFileName.equals(source2.sourceFileName); SourceFileCoverage merged = new SourceFileCoverage(source2.sourceFileName); merged.addAllLineNumbers(mergeLineNumbers(source1, source2)); merged.addAllFunctionsExecution(mergeFunctionsExecution(source1, source2)); merged.addAllBranches(mergeBranches(source1, source2)); merged.addAllLines(mergeLines(source1, source2)); return merged; } String sourceFileName() { return sourceFileName; } int nrFunctionsFound() { return functionsExecution.size(); } int nrFunctionsHit() { return (int) functionsExecution.entrySet().stream() .filter(function -> function.getValue() > 0) .count(); } int nrBranchesFound() { return branches.size(); } int nrBranchesHit() { return getNumberOfBranchesHit(this); } int nrOfLinesWithNonZeroExecution() { return getNumberOfExecutedLines(this); } int nrOfInstrumentedLines() { return this.lines.size(); } Collection<LineCoverage> getAllLineExecution() { return lines.values(); } @VisibleForTesting TreeMap<String, Integer> getLineNumbers() { return lineNumbers; } Set<Entry<String, Integer>> getAllLineNumbers() { return lineNumbers.entrySet(); } @VisibleForTesting TreeMap<String, Integer> getFunctionsExecution() { return functionsExecution; } Set<Entry<String, Integer>> getAllExecutionCount() { return functionsExecution.entrySet(); } Collection<BranchCoverage> getAllBranches() { return branches.values(); } @VisibleForTesting Map<Integer, LineCoverage> getLines() { return lines; } void addLineNumber(String functionName, Integer lineNumber) { this.lineNumbers.put(functionName, lineNumber); } void addAllLineNumbers(TreeMap<String, Integer> lineNumber) { this.lineNumbers.putAll(lineNumber); } void addFunctionExecution(String functionName, Integer executionCount) { this.functionsExecution.put(functionName, executionCount); } void addAllFunctionsExecution(TreeMap<String, Integer> functionsExecution) { this.functionsExecution.putAll(functionsExecution); } void addBranch(Integer lineNumber, BranchCoverage branch) { if (this.branches.get(lineNumber) != null) { this.branches.put(lineNumber, BranchCoverage.merge(this.branches.get(lineNumber), branch)); return; } this.branches.put(lineNumber, branch); } void addAllBranches(TreeMap<Integer, BranchCoverage> branches) { this.branches.putAll(branches); } void addLine(Integer lineNumber, LineCoverage line) { if (this.lines.get(lineNumber) != null) { this.lines.put(lineNumber, LineCoverage.merge(line, this.lines.get(lineNumber))); return; } this.lines.put(lineNumber, line); } void addAllLines(TreeMap<Integer, LineCoverage> lines) { this.lines.putAll(lines); } }
package com.asi.service.lookup; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.RequestHeader; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; import com.asi.core.repo.lookup.LookupValuesRepo; import com.asi.ext.api.util.ApplicationConstants; import com.asi.service.lookup.vo.CategoriesList; import com.asi.service.lookup.vo.ThemesList; @RestController @RequestMapping("lookup") public class LookupService { @Autowired private LookupValuesRepo lookupValueRepository; @RequestMapping(value = "categoriesList", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<CategoriesList> getcategoriesList() { CategoriesList categoriesList = lookupValueRepository.getAllCategories(); return new ResponseEntity<CategoriesList> (categoriesList, null, HttpStatus.OK); } @RequestMapping(value = "themes", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<ThemesList> getThemesList() { ThemesList themesList = lookupValueRepository.getAllThemes(); return new ResponseEntity<ThemesList> (themesList, null, HttpStatus.OK); } @RequestMapping(value = "colors", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<ColorsList> getColorsList() { ColorsList colorsList = lookupValueRepository.getAllColors(); return new ResponseEntity<ColorsList> (colorsList, null, HttpStatus.OK); } @RequestMapping(value = "materials", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<MaterialsList> getMaterialsList() { MaterialsList materialsList = lookupValueRepository.getAllMaterials(); return new ResponseEntity<MaterialsList> (materialsList, null, HttpStatus.OK); } @RequestMapping(value = "shapes", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<ShapesList> getShapesList() { ShapesList shapesList = lookupValueRepository.getAllShapes(); return new ResponseEntity<ShapesList> (shapesList, null, HttpStatus.OK); } @RequestMapping(value = "packages", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<PackagesList> getPackagingList() { PackagesList packagesList = lookupValueRepository.getAllPackages(); return new ResponseEntity<PackagesList> (packagesList, null, HttpStatus.OK); } @RequestMapping(value = "safetywarnings", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<SafetyWarningsList> getSafetyWarnings() { SafetyWarningsList safetyList = lookupValueRepository.getSafetyWarningsList(); return new ResponseEntity<SafetyWarningsList> (safetyList, null, HttpStatus.OK); } @RequestMapping(value = "imprintmethods", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<ImprintMethodsList> getImprintMethods() { ImprintMethodsList imprintMethodList = lookupValueRepository.getImprintMethodsList(); return new ResponseEntity<ImprintMethodsList> (imprintMethodList, null, HttpStatus.OK); } @RequestMapping(value = "artworks", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<ArtworksList> getArtworks() { ArtworksList artworksList = lookupValueRepository.getArtworksList(); return new ResponseEntity<ArtworksList> (artworksList, null, HttpStatus.OK); } @RequestMapping(value = "compliances", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<ComplianceList> getComplianceCerts() { ComplianceList complianceList = lookupValueRepository.getComplianceList(); return new ResponseEntity<ComplianceList> (complianceList, null, HttpStatus.OK); } @RequestMapping(value = "discountrates", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<DiscountRatesList> getDiscountRates() { DiscountRatesList discountList = lookupValueRepository.getDiscountList(); return new ResponseEntity<DiscountRatesList> (discountList, null, HttpStatus.OK); } @RequestMapping(value = "currencies", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<CurrencyList> getCurrencyNames() { CurrencyList currencyList = lookupValueRepository.getCurrenciesList(); return new ResponseEntity<CurrencyList> (currencyList, null, HttpStatus.OK); } @RequestMapping(value = "criteriacodes", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<CriteriaCodesList> getCriteriaCodesList() { CriteriaCodesList criteriaCodesList = lookupValueRepository.getCriteriaCodesList(); return new ResponseEntity<CriteriaCodesList> (criteriaCodesList, null, HttpStatus.OK); } @RequestMapping(value = "basepricecriterias", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<CriteriaCodesList> getBaseCriteriaCodesList() { CriteriaCodesList criteriaCodesList = lookupValueRepository.getCriteriaCodesListByType("baseprice"); return new ResponseEntity<CriteriaCodesList> (criteriaCodesList, null, HttpStatus.OK); } @RequestMapping(value = "upchargecriterias", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<CriteriaCodesList> getUpchargeCriteriaCodesList() { CriteriaCodesList criteriaCodesList = lookupValueRepository.getCriteriaCodesListByType("upcharge"); return new ResponseEntity<CriteriaCodesList> (criteriaCodesList, null, HttpStatus.OK); } @RequestMapping(value = "productnumbercriterias", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<CriteriaCodesList> getProductNumberCriteriaCodesList() { CriteriaCodesList criteriaCodesList = lookupValueRepository.getCriteriaCodesListByType("productnumber"); return new ResponseEntity<CriteriaCodesList> (criteriaCodesList, null, HttpStatus.OK); } @RequestMapping(value = "availabilitycriterias", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<CriteriaCodesList> getAvailabilityCriteriaCodesList() { CriteriaCodesList criteriaCodesList = lookupValueRepository.getCriteriaCodesList(); return new ResponseEntity<CriteriaCodesList> (criteriaCodesList, null, HttpStatus.OK); } @RequestMapping(value = "upchargetypes", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<UpchargeTypesList> getUpchargeTypesList() { UpchargeTypesList upchargeTypesList = lookupValueRepository.getUpchargeTypes(); return new ResponseEntity<UpchargeTypesList> (upchargeTypesList, null, HttpStatus.OK); } @RequestMapping(value = "upchargelevels", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<UpchargeLevelsList> getUpchargeLevelsList() { UpchargeLevelsList upchargeLevelsList = lookupValueRepository.getUpchargeLevels(); return new ResponseEntity<UpchargeLevelsList> (upchargeLevelsList, null, HttpStatus.OK); } @RequestMapping(value = "pricemodifiers", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<PriceModifiers> getPriceModifiersList() { PriceModifiers priceModifiers = lookupValueRepository.getPriceModifiers(); return new ResponseEntity<PriceModifiers> (priceModifiers, null, HttpStatus.OK); } @RequestMapping(value = "sizes/apparelbra", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<SizeUnits> getApperalBrasList() { SizeUnits sizeUnits = new SizeUnits(); sizeUnits.setSizes(lookupValueRepository.getSizeUnitsInfo(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_BRA,ApplicationConstants.CONST_STRING_UNIT)); return new ResponseEntity<SizeUnits> (sizeUnits, null, HttpStatus.OK); } @RequestMapping(value = "sizes/dressshirt", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<SizeUnits> getDressShirtsList() { SizeUnits sizeUnits = new SizeUnits(); sizeUnits.setSizes(lookupValueRepository.getSizeUnitsInfo(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_DRS_SHRT_SIZE,ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_DRS_SHRT_SIZE_NECK)); //List<String> dressShirtUnits=sizeUnits.getValues(); // sizeUnits=lookupValueRepository.getSizeUnitsInfo(dressShirtUnits,ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_DRS_SHRT_SIZE,ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_DRS_SHRT_SIZE_SLVS); return new ResponseEntity<SizeUnits> (sizeUnits, null, HttpStatus.OK); } @RequestMapping(value = "sizes/hoiseryuniform", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<SizeUnits> getHoiseryUniformList() { SizeUnits sizeUnits = new SizeUnits(); sizeUnits.setSizes(lookupValueRepository.getSizeUnitsInfo(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_HSR_UNIFORM,ApplicationConstants.CONST_STRING_UNIT)); return new ResponseEntity<SizeUnits> (sizeUnits, null, HttpStatus.OK); } @RequestMapping(value = "sizes/infanttoddler", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<SizeUnits> getInfantToddlerList() { SizeUnits sizeUnits = new SizeUnits(); sizeUnits.setSizes(lookupValueRepository.getSizeUnitsInfo(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_INF_TLDR,ApplicationConstants.CONST_STRING_UNIT)); return new ResponseEntity<SizeUnits> (sizeUnits, null, HttpStatus.OK); } @RequestMapping(value = "sizes/apparelpants", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<SizeUnits> getApparelPantsList() { SizeUnits sizeUnits = new SizeUnits(); sizeUnits.setSizes(lookupValueRepository.getSizeUnitsInfo(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_PANT_SIZE,ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_PANT_SIZE_INSEAM)); return new ResponseEntity<SizeUnits> (sizeUnits, null, HttpStatus.OK); } @RequestMapping(value = "sizes/standardnumbered", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<SizeUnits> getStandardNumberedList() { SizeUnits sizeUnits = new SizeUnits(); sizeUnits.setSizes(lookupValueRepository.getSizeUnitsInfo(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_STD_NUM,ApplicationConstants.CONST_STRING_UNIT)); return new ResponseEntity<SizeUnits> (sizeUnits, null, HttpStatus.OK); } @RequestMapping(value = "sizes/volumeweight", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<SizeUnits> getVolumeWeightList() { SizeUnits sizeUnits = new SizeUnits(); sizeUnits.setSizes(lookupValueRepository.getSizeUnitsInfo(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_VOL_WEI,ApplicationConstants.CONST_STRING_UNIT)); return new ResponseEntity<SizeUnits> (sizeUnits, null, HttpStatus.OK); } @RequestMapping(value = "sizes/capacity", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<SizeUnits> getCapacityList() { SizeUnits sizeUnits = new SizeUnits(); sizeUnits.setSizes(lookupValueRepository.getSizeUnitsInfo(ApplicationConstants.CONST_SIZE_GROUP_CAPACITY,ApplicationConstants.CONST_STRING_UNIT)); return new ResponseEntity<SizeUnits> (sizeUnits, null, HttpStatus.OK); } /*@RequestMapping(value = "sizes/dimension/attributes", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<SizeUnits> getDimensionAttributesList() { SizeUnits sizeUnits = null;//lookupValueRepository.getSizeAttributesInfo(null,ApplicationConstants.CONST_SIZE_GROUP_DIMENSION,ApplicationConstants.CONST_STRING_UNIT); return new ResponseEntity<SizeUnits> (sizeUnits, null, HttpStatus.OK); }*/ @RequestMapping(value = "sizes/dimension/units", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<SizeUnits> getDimensionUnitsList() { SizeUnits sizeUnits = lookupValueRepository.getSizeUnitsOfMeasurements(null,ApplicationConstants.CONST_SIZE_GROUP_DIMENSION,ApplicationConstants.CONST_STRING_UNIT); return new ResponseEntity<SizeUnits> (sizeUnits, null, HttpStatus.OK); } @RequestMapping(value = "sizes/capacity/units", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<SizeUnits> getCapacityUnitsList() { SizeUnits sizeUnits = lookupValueRepository.getSizeUnitsOfMeasurements(null,ApplicationConstants.CONST_SIZE_GROUP_CAPACITY,ApplicationConstants.CONST_STRING_UNIT); return new ResponseEntity<SizeUnits> (sizeUnits, null, HttpStatus.OK); } @RequestMapping(value = "sizes/volumeweight/units", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<SizeUnits> getVolumeWeightUnitsList() { SizeUnits sizeUnits = lookupValueRepository.getSizeUnitsOfMeasurements(null,ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_VOL_WEI,ApplicationConstants.CONST_STRING_UNIT); return new ResponseEntity<SizeUnits> (sizeUnits, null, HttpStatus.OK); } @RequestMapping(value = "sizes/other", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<SizeUnits> getOtherSizesList() { SizeUnits sizeUnits = new SizeUnits(); sizeUnits.setSizes(lookupValueRepository.getSizeUnitsInfo(ApplicationConstants.CONST_SIZE_OTHER_CODE,ApplicationConstants.CONST_STRING_UNIT)); return new ResponseEntity<SizeUnits> (sizeUnits, null, HttpStatus.OK); } @RequestMapping(value = "mediacitations", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<MediaCitation> getMediaCitationsList(@RequestHeader("AuthToken") String authToken) { MediaCitation mediaCitation = new MediaCitation(); mediaCitation.setMediaCitation(lookupValueRepository.getMediaCitationsList(authToken)); return new ResponseEntity<MediaCitation> (mediaCitation, null, HttpStatus.OK); } @RequestMapping(value = "selectedlinenames", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<LineNames> getLineNamesList(@RequestHeader("AuthToken") String authToken) { LineNames lineNames = new LineNames(); lineNames.setLineNames(lookupValueRepository.getLineNamesList(authToken)); return new ResponseEntity<LineNames> (lineNames, null, HttpStatus.OK); } @RequestMapping(value = "fobpoints", headers="content-type=application/json, application/xml" ,produces={"application/xml", "application/json"} ) public ResponseEntity<FobPoints> getFobPointsList(@RequestHeader("AuthToken") String authToken) { FobPoints fobPoints = new FobPoints(); fobPoints.setFobpoints(lookupValueRepository.getFobPointsList(authToken)); return new ResponseEntity<FobPoints> (fobPoints, null, HttpStatus.OK); } // }
package gov.cdc.epiinfo.analysis; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.database.Cursor; import android.net.Uri; import android.os.AsyncTask; import android.os.Build; import android.os.Environment; import androidx.core.content.FileProvider; import androidx.core.view.MenuItemCompat; import android.view.MenuItem; import android.widget.Toast; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import gov.cdc.epiinfo.EpiDbHelper; import gov.cdc.epiinfo.FormMetadata; import gov.cdc.epiinfo.R; import gov.cdc.epiinfo.etc.ShareProvider; public class CsvFileGenerator { private FileWriter fileWriter; private Context ctx; private EpiDbHelper mDbHelper; private FormMetadata formMetadata; private String viewName; private String nowString; private MenuItem menuItem; public void Generate(Context ctx, EpiDbHelper mDbHelper, FormMetadata formMetadata, String viewName, MenuItem menuItem) { this.ctx = ctx; this.mDbHelper = mDbHelper; this.formMetadata = formMetadata; this.viewName = viewName; this.menuItem = menuItem; Calendar cal = Calendar.getInstance(); nowString = "_" + cal.get(Calendar.YEAR) + String.format("%02d", cal.get(Calendar.MONTH) + 1) + String.format("%02d", cal.get(Calendar.DATE)) + String.format("%02d", cal.get(Calendar.HOUR)) + String.format("%02d", cal.get(Calendar.MINUTE)); Toast.makeText(ctx, ctx.getString(R.string.please_wait), Toast.LENGTH_LONG).show(); new Generator().executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR); } private class Generator extends AsyncTask<Void,Void,Boolean> { @Override protected void onPostExecute(Boolean success) { if (success) { File file = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS) + "/EpiInfo/Temp/" + viewName + nowString + ".csv"); if (file.exists()) { if (menuItem == null) { Uri path; if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.KITKAT) { path = Uri.fromFile(file); } else { path = FileProvider.getUriForFile(ctx, ctx.getString(R.string.file_provider_authority), file); } Intent fileIntent = new Intent(Intent.ACTION_VIEW); fileIntent.setFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION); fileIntent.setDataAndType(path, "text/csv"); try { ctx.startActivity(fileIntent); } catch (Exception e) { AlertDialog.Builder builder = new AlertDialog.Builder(ctx); builder.setMessage(ctx.getString(R.string.analysis_no_sheets)) .setCancelable(false) .setPositiveButton(ctx.getString(R.string.ok), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { dialog.cancel(); } }); builder.create(); builder.show(); } } else { ShareProvider shareActionProvider = new ShareProvider(ctx); Intent shareIntent = new Intent(Intent.ACTION_SEND); shareIntent.setType("text/csv"); Uri path; if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.KITKAT) { path = Uri.fromFile(file); } else { path = FileProvider.getUriForFile(ctx, ctx.getString(R.string.file_provider_authority), file); } shareIntent.putExtra(Intent.EXTRA_STREAM, path); shareActionProvider.setShareIntent(shareIntent); MenuItemCompat.setActionProvider(menuItem, shareActionProvider); } } } } @Override protected Boolean doInBackground(Void... params) { boolean success = false; try { File path = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS); path.mkdirs(); File tempPath = new File(path, "/EpiInfo/Temp/"); tempPath.mkdirs(); fileWriter = new FileWriter(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS)+"/EpiInfo/Temp/" + viewName + nowString + ".csv"); Cursor c = mDbHelper.fetchWhere_all(null); if (c.moveToFirst()) { for (int x=0;x<formMetadata.DataFields.size();x++) { fileWriter.append(formMetadata.DataFields.get(x).getName()); fileWriter.append(","); } fileWriter.append("globalRecordId"); if (mDbHelper.isRelatedTable) { fileWriter.append(",FKEY"); } fileWriter.append("\n"); do { for (int x=0;x<formMetadata.DataFields.size();x++) { if (formMetadata.DataFields.get(x).getType().equals("11") || formMetadata.DataFields.get(x).getType().equals("12") || formMetadata.DataFields.get(x).getType().equals("17") || formMetadata.DataFields.get(x).getType().equals("18") || formMetadata.DataFields.get(x).getType().equals("19")) { fileWriter.append(c.getInt(c.getColumnIndexOrThrow(formMetadata.DataFields.get(x).getName())) + ""); } else if (formMetadata.DataFields.get(x).getType().equals("5")) { if (c.getDouble(c.getColumnIndexOrThrow(formMetadata.DataFields.get(x).getName())) < Double.POSITIVE_INFINITY) { fileWriter.append("\""); fileWriter.append(c.getDouble(c.getColumnIndexOrThrow(formMetadata.DataFields.get(x).getName())) + ""); fileWriter.append("\""); } } else if (formMetadata.DataFields.get(x).getType().equals("7")) { if (!c.getString(c.getColumnIndexOrThrow(formMetadata.DataFields.get(x).getName())).equals("")) { String jsonDate = ""; try { DateFormat dateFormat = new SimpleDateFormat("M/d/yyyy h:mm:ss a"); Date date = DateFormat.getDateInstance().parse(c.getString(c.getColumnIndexOrThrow(formMetadata.DataFields.get(x).getName()))); jsonDate = dateFormat.format(date); } catch (Exception ex) { jsonDate = c.getString(c.getColumnIndexOrThrow(formMetadata.DataFields.get(x).getName())); } fileWriter.append("\""); fileWriter.append(jsonDate); fileWriter.append("\""); } } else if (formMetadata.DataFields.get(x).getType().equals("10")) { if (c.getInt(c.getColumnIndexOrThrow(formMetadata.DataFields.get(x).getName())) == 1) { fileWriter.append("true"); } else { fileWriter.append("false"); } } else { fileWriter.append("\""); fileWriter.append(c.getString(c.getColumnIndexOrThrow(formMetadata.DataFields.get(x).getName()))); fileWriter.append("\""); } fileWriter.append(","); } String guidValue = c.getString(c.getColumnIndexOrThrow("globalRecordId")); fileWriter.append(guidValue); if (mDbHelper.isRelatedTable) { fileWriter.append(","); fileWriter.append(c.getString(c.getColumnIndexOrThrow("FKEY"))); } fileWriter.append("\n"); } while (c.moveToNext()); } success = true; } catch (Exception ex) { success = false; } finally { try { fileWriter.flush(); fileWriter.close(); } catch (IOException e) { } } return success; } } }
/******************************************************************************* * Caleydo - Visualization for Molecular Biology - http://caleydo.org * Copyright (c) The Caleydo Team. All rights reserved. * Licensed under the new BSD license, available at http://caleydo.org/license ******************************************************************************/ package org.caleydo.core.util.system; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; import java.net.URLConnection; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.Set; import org.caleydo.core.internal.Activator; import org.caleydo.core.manager.GeneralManager; import org.caleydo.core.util.logging.Logger; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.jface.operation.IRunnableWithProgress; import com.google.common.collect.BiMap; import com.google.common.collect.HashBiMap; /** * helper class for handling remote files and local caches of them * * @author Samuel Gratzl * */ public final class RemoteFile implements IRunnableWithProgress { private static final Logger log = Logger.create(RemoteFile.class); private static final String PATTERN = "Downloading: %s (%d MB)"; private static final int BUFFER_SIZE = 4096; private static final int WORK_TRIGGER_FREQUENCY = 64; private static final int CONNECT_TIMEOUT = 10 * 1000; // [ms] private static final boolean WORK_OFFLINE = Boolean.getBoolean("org.caleydo.cache.offline"); private final URL url; private final File file; private boolean successful = true; private Exception caught = null; private RemoteFile(URL url, String localSuffix) { this.url = url; File f = RemoteFileCache.inCache(url); if (f == null) f = RemoteFileCache.reserve(url, localSuffix); this.file = f; } /** * @return the url, see {@link #url} */ public URL getUrl() { return url; } /** * factory for creating a {@link RemoteFile} * * @param url * @return */ public static RemoteFile of(URL url) { return new RemoteFile(url, ""); } public static RemoteFile of(URL url, String localSuffix) { return new RemoteFile(url, localSuffix); } /** * checks whether the file is already in the cache * * @param checkModificationDate * check also whether the remote and local modification date matches * @return */ public boolean inCache(boolean checkModificationDate) { if (WORK_OFFLINE) { return file.exists(); } if (!file.exists()) return false; if (!checkModificationDate) return true; long have = file.lastModified(); Activator.updateProxySettings(url); URLConnection connection; try { connection = url.openConnection(); connection.setConnectTimeout(CONNECT_TIMEOUT); connection.connect(); long expected = connection.getLastModified(); return expected == have; } catch (IOException e) { log.warn("can't check modification date of: " + url, e); return true; // assume correct as we can't verify it } } public boolean delete() { if (file.exists()) return file.delete(); return true; } /** * @return the file, see {@link #file} */ public File getFile() { return file; } /** * the exception caught during {@link #run(IProgressMonitor)} or null if none occurred * * @return the caught, see {@link #caught} */ public Exception getCaught() { return caught; } public File getOrLoad(boolean checkModificationDate, IProgressMonitor monitor) { return getOrLoad(checkModificationDate, monitor, PATTERN); } public File getOrLoad(boolean checkModificationDate, IProgressMonitor monitor, String pattern) { if (!inCache(checkModificationDate)) { if (WORK_OFFLINE && !file.exists()) { // can't load can't download return null; } delete(); run(monitor, pattern); if (!file.exists()) return null; return file; } return file; } @Override public void run(IProgressMonitor monitor) { run(monitor, PATTERN); } public void run(IProgressMonitor monitor, String pattern) { if (inCache(false)) { monitor.done(); return; } if (WORK_OFFLINE) { monitor.done(); successful = false; return; } Activator.updateProxySettings(url); successful = false; caught = null; File tmp = new File(file.getAbsolutePath() + "-tmp"); tmp.getParentFile().mkdirs(); long lastModified = 0; try (OutputStream out = new BufferedOutputStream(new FileOutputStream(tmp))) { URLConnection connection = url.openConnection(); connection.connect(); int length = connection.getContentLength(); lastModified = connection.getLastModified(); if (length < 0) length = IProgressMonitor.UNKNOWN; monitor.beginTask(String.format(pattern, url, length / 1024 / 1024), length); try (InputStream in = new BufferedInputStream(connection.getInputStream())) { byte[] data = new byte[BUFFER_SIZE]; int count = 0; int acc = 0; int i = 0; while ((count = in.read(data)) != -1) { acc += count; out.write(data, 0, count); if (i++ >= WORK_TRIGGER_FREQUENCY) { i -= WORK_TRIGGER_FREQUENCY; if (monitor.isCanceled()) { break; } monitor.worked(acc); acc = 0; } } } if (!monitor.isCanceled()) { monitor.done(); successful = true; } } catch (IOException e) { log.error("can't download file: " + url, e); caught = e; } if (successful) { try { Files.move(tmp.toPath(), file.toPath(), StandardCopyOption.REPLACE_EXISTING); if (lastModified != 0) file.setLastModified(lastModified); RemoteFileCache.putToCache(url, file); } catch (IOException e) { log.error("can't move file: " + url, e); caught = e; } } else { tmp.delete(); } } public static void main(String[] args) throws MalformedURLException, URISyntaxException { File f = new File(RemoteFileCache.cacheDir, "asdfas/tests.txt"); System.out.println(f.getAbsolutePath()); System.out.println(f.getName()); dump("https://data.icg.tugraz.at/caleydo/ download/2.2/caleydo_2.2_linux_x86-64.deb"); dump("https://data.icg.tugraz.at/caleydo/download/2.2/tmp.pnp?adsfa&bsbad"); } /** * @param string * @throws MalformedURLException * @throws URISyntaxException */ private static void dump(String string) throws MalformedURLException, URISyntaxException { URL url = new URL(string); System.out.println("ext:" + url.toExternalForm()); System.out.println("host: " + url.getHost()); System.out.println("path: " + url.getPath()); System.out.println("file: " + url.getFile()); } private static final class RemoteFileCache { private static final File cacheDir = new File(GeneralManager.CALEYDO_HOME_PATH, "cache"); private static final File cacheContentsFile = new File(cacheDir, "contents.properties"); private static final BiMap<String, String> cacheContents = HashBiMap.create(); static { if (cacheContentsFile.exists()) { try { for (String line : Files.readAllLines(cacheContentsFile.toPath(), Charset.forName("UTF-8"))) { if (line.isEmpty()) continue; String[] url_path = line.split("\t"); cacheContents.put(url_path[0], url_path[1]); } } catch (IOException e) { log.error("can't read " + cacheContentsFile, e); } } } static synchronized void putToCache(URL url, File file) { String relative = toRelative(file); cacheContents.put(url.toExternalForm(), relative); try (FileWriter out = new FileWriter(cacheContentsFile, true)) { out.write(url.toExternalForm()); out.write('\t'); out.write(relative); out.write('\n'); } catch (IOException e) { log.error("can't write " + cacheContentsFile, e); } } private static String toRelative(File file) { Path relative = cacheDir.toPath().relativize(file.toPath()); return relative.toString(); } static synchronized File reserve(URL url, String localSuffix) { String path = url.getPath(); int i = path.lastIndexOf('.'); String suffix = localSuffix; if (i > 0) { suffix = path.substring(i); path = path.substring(0, i); } try {// try whether we can use the path as a file name path new File(path).getCanonicalPath(); } catch (IOException e) { path = "unparseable"; } String key = toUnique(path, suffix); cacheContents.put(url.toExternalForm(), key); return new File(cacheDir, key); } /** * converts the given path with the given suffix to a unique derivat that doesn't yet exist * * @param local * @return */ private static String toUnique(String path, String suffix) { Set<String> values = cacheContents.values(); String test = (path + suffix).toLowerCase(); if (!values.contains(test) && !cacheContentsFile.getName().equalsIgnoreCase(suffix)) return path + suffix; int next = 0; do { test = (path + (next++) + suffix).toLowerCase(); } while (values.contains(test) || cacheContentsFile.getName().equalsIgnoreCase(suffix)); return test; } static synchronized File inCache(URL url) { String key = url.toExternalForm(); if (cacheContents.containsKey(key)) { String s = cacheContents.get(key); return new File(cacheDir, s); } return null; } } }
package com.example.testplugin; import android.app.Activity; import android.content.ComponentName; import android.content.Intent; import android.content.ServiceConnection; import android.os.Bundle; import android.os.IBinder; import android.os.Process; import android.os.RemoteException; import android.util.Log; import android.view.View; import android.widget.Button; import android.widget.RemoteViews; public class PluginActivityA extends Activity { private static final String TAG = "PluginActivityA"; private Button mBtn1; private Button mBtn2; private Button mBtn3; private Button mBtn4; private Button mBtn5; private Button mBtn6; private Button mBtn7; private Button mBtn8; private Button mBtn9; private Button mBtn10; private Button mBtn11; private Button mBtn12; private Button mBtn13; private ServiceConnection mConn1 = new ServiceConnection() { @Override public void onServiceConnected(ComponentName name, IBinder service) { Log.d(TAG, "onServiceConnected() mConn1 ComponentName = " + name + ", IBinder = " + service); ITestBinder testString = ITestBinder.Stub.asInterface(service); try { Log.d(TAG, "onServiceConnected() test binder testString.getTestString() = " + testString.getTestString()); } catch (RemoteException e) { e.printStackTrace(); } } @Override public void onServiceDisconnected(ComponentName name) { Log.d(TAG, "onServiceDisconnected() mConn1 ComponentName = " + name); } }; private ServiceConnection mConn2 = new ServiceConnection() { @Override public void onServiceConnected(ComponentName name, IBinder service) { Log.d(TAG, "onServiceConnected() mConn2 ComponentName = " + name + ", IBinder = " + service); } @Override public void onServiceDisconnected(ComponentName name) { Log.d(TAG, "onServiceDisconnected() mConn2 ComponentName = " + name); } }; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_plugin_activity_a); // test RemoteViews RemoteViews remoteViews = new RemoteViews(getPackageName(), 1); remoteViews.getLayoutId(); Intent intent = getIntent(); Bundle extras = intent.getExtras(); Log.d(TAG, "onCreate() intent = " + intent + " , extras = " + (extras != null ? extras.keySet() : "null")); mBtn1 = (Button) findViewById(R.id.btn1); mBtn1.setText("start plugin activity main"); mBtn1.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(PluginActivityA.this, PluginMainActivity.class); startActivity(intent); } }); mBtn2 = (Button) findViewById(R.id.btn2); mBtn2.setText("start plugin service"); mBtn2.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(PluginActivityA.this, PluginService.class); intent.putExtra("plugin_obj", new PluginObject("lavazza")); ComponentName componentName = startService(intent); Log.d(TAG,"startService() return componentName = " + componentName); } }); mBtn3 = (Button) findViewById(R.id.btn3); mBtn3.setText("stop plugin service"); mBtn3.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(PluginActivityA.this, PluginService.class); stopService(intent); } }); mBtn4 = (Button) findViewById(R.id.btn4); mBtn4.setText("bind plugin service with conn1"); mBtn4.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(PluginActivityA.this, PluginService.class); bindService(intent, mConn1, BIND_AUTO_CREATE); } }); mBtn5 = (Button) findViewById(R.id.btn5); mBtn5.setText("unbind plugin service with conn1"); mBtn5.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { unbindService(mConn1); } }); mBtn6 = (Button) findViewById(R.id.btn6); mBtn6.setText("bind plugin service with conn2"); mBtn6.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(PluginActivityA.this, PluginService.class); intent.setAction("asdasd"); bindService(intent, mConn2, BIND_AUTO_CREATE); } }); mBtn7 = (Button) findViewById(R.id.btn7); mBtn7.setText("unbind plugin service conn2"); mBtn7.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { unbindService(mConn2); } }); mBtn8 = (Button) findViewById(R.id.btn8); mBtn8.setText("kill process"); mBtn8.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { android.os.Process.killProcess(Process.myPid()); } }); mBtn9 = (Button) findViewById(R.id.btn9); mBtn9.setText("start plugin serviceA"); mBtn9.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(PluginActivityA.this, PluginServiceA.class); startService(intent); } }); mBtn10 = (Button) findViewById(R.id.btn10); mBtn10.setText("stop plugin serviceA with service.stopSelf"); mBtn10.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(PluginActivityA.this, PluginServiceA.class); intent.setAction("killself"); startService(intent); } }); mBtn11 = (Button) findViewById(R.id.btn11); mBtn11.setText("bing plugin serviceA with conn1"); mBtn11.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(PluginActivityA.this, PluginServiceA.class); bindService(intent, mConn1, BIND_AUTO_CREATE); } }); mBtn12 = (Button) findViewById(R.id.btn12); mBtn12.setText("unbind plugin serviceA with conn1"); mBtn12.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(PluginActivityA.this, PluginServiceA.class); unbindService(mConn1); } }); mBtn13 = (Button) findViewById(R.id.btn13); mBtn13.setText("send plugin broadcast "); mBtn13.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { sendBroadcast(new Intent(PluginMainActivity.BROADCAST_ACTION_1)); sendBroadcast(new Intent(PluginMainActivity.BROADCAST_ACTION_2)); } }); showPluginInfo(); } @Override protected void onResume() { super.onResume(); Log.d(TAG, "onResume() "); } @Override public void onDestroy(){ super.onDestroy(); Log.d(TAG, "onDestroy() "); } private void showPluginInfo() { ClassLoader classLoader = getClassLoader(); Log.d(TAG, "classloader = " + classLoader); Log.d(TAG, "parent classloader = " + classLoader.getParent()); Log.d(TAG, "getApplication() = " + getApplication()); Log.d(TAG, "getPackageName() = " + getPackageName()); } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.network.fluent.models; import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.network.models.ConnectionMonitorDestination; import com.azure.resourcemanager.network.models.ConnectionMonitorEndpoint; import com.azure.resourcemanager.network.models.ConnectionMonitorOutput; import com.azure.resourcemanager.network.models.ConnectionMonitorSource; import com.azure.resourcemanager.network.models.ConnectionMonitorTestConfiguration; import com.azure.resourcemanager.network.models.ConnectionMonitorTestGroup; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; import java.util.Map; /** Parameters that define the operation to create a connection monitor. */ @Fluent public final class ConnectionMonitorInner { @JsonIgnore private final ClientLogger logger = new ClientLogger(ConnectionMonitorInner.class); /* * Connection monitor location. */ @JsonProperty(value = "location") private String location; /* * Connection monitor tags. */ @JsonProperty(value = "tags") @JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS) private Map<String, String> tags; /* * Properties of the connection monitor. */ @JsonProperty(value = "properties", required = true) private ConnectionMonitorParameters innerProperties = new ConnectionMonitorParameters(); /** * Get the location property: Connection monitor location. * * @return the location value. */ public String location() { return this.location; } /** * Set the location property: Connection monitor location. * * @param location the location value to set. * @return the ConnectionMonitorInner object itself. */ public ConnectionMonitorInner withLocation(String location) { this.location = location; return this; } /** * Get the tags property: Connection monitor tags. * * @return the tags value. */ public Map<String, String> tags() { return this.tags; } /** * Set the tags property: Connection monitor tags. * * @param tags the tags value to set. * @return the ConnectionMonitorInner object itself. */ public ConnectionMonitorInner withTags(Map<String, String> tags) { this.tags = tags; return this; } /** * Get the innerProperties property: Properties of the connection monitor. * * @return the innerProperties value. */ private ConnectionMonitorParameters innerProperties() { return this.innerProperties; } /** * Get the source property: Describes the source of connection monitor. * * @return the source value. */ public ConnectionMonitorSource source() { return this.innerProperties() == null ? null : this.innerProperties().source(); } /** * Set the source property: Describes the source of connection monitor. * * @param source the source value to set. * @return the ConnectionMonitorInner object itself. */ public ConnectionMonitorInner withSource(ConnectionMonitorSource source) { if (this.innerProperties() == null) { this.innerProperties = new ConnectionMonitorParameters(); } this.innerProperties().withSource(source); return this; } /** * Get the destination property: Describes the destination of connection monitor. * * @return the destination value. */ public ConnectionMonitorDestination destination() { return this.innerProperties() == null ? null : this.innerProperties().destination(); } /** * Set the destination property: Describes the destination of connection monitor. * * @param destination the destination value to set. * @return the ConnectionMonitorInner object itself. */ public ConnectionMonitorInner withDestination(ConnectionMonitorDestination destination) { if (this.innerProperties() == null) { this.innerProperties = new ConnectionMonitorParameters(); } this.innerProperties().withDestination(destination); return this; } /** * Get the autoStart property: Determines if the connection monitor will start automatically once created. * * @return the autoStart value. */ public Boolean autoStart() { return this.innerProperties() == null ? null : this.innerProperties().autoStart(); } /** * Set the autoStart property: Determines if the connection monitor will start automatically once created. * * @param autoStart the autoStart value to set. * @return the ConnectionMonitorInner object itself. */ public ConnectionMonitorInner withAutoStart(Boolean autoStart) { if (this.innerProperties() == null) { this.innerProperties = new ConnectionMonitorParameters(); } this.innerProperties().withAutoStart(autoStart); return this; } /** * Get the monitoringIntervalInSeconds property: Monitoring interval in seconds. * * @return the monitoringIntervalInSeconds value. */ public Integer monitoringIntervalInSeconds() { return this.innerProperties() == null ? null : this.innerProperties().monitoringIntervalInSeconds(); } /** * Set the monitoringIntervalInSeconds property: Monitoring interval in seconds. * * @param monitoringIntervalInSeconds the monitoringIntervalInSeconds value to set. * @return the ConnectionMonitorInner object itself. */ public ConnectionMonitorInner withMonitoringIntervalInSeconds(Integer monitoringIntervalInSeconds) { if (this.innerProperties() == null) { this.innerProperties = new ConnectionMonitorParameters(); } this.innerProperties().withMonitoringIntervalInSeconds(monitoringIntervalInSeconds); return this; } /** * Get the endpoints property: List of connection monitor endpoints. * * @return the endpoints value. */ public List<ConnectionMonitorEndpoint> endpoints() { return this.innerProperties() == null ? null : this.innerProperties().endpoints(); } /** * Set the endpoints property: List of connection monitor endpoints. * * @param endpoints the endpoints value to set. * @return the ConnectionMonitorInner object itself. */ public ConnectionMonitorInner withEndpoints(List<ConnectionMonitorEndpoint> endpoints) { if (this.innerProperties() == null) { this.innerProperties = new ConnectionMonitorParameters(); } this.innerProperties().withEndpoints(endpoints); return this; } /** * Get the testConfigurations property: List of connection monitor test configurations. * * @return the testConfigurations value. */ public List<ConnectionMonitorTestConfiguration> testConfigurations() { return this.innerProperties() == null ? null : this.innerProperties().testConfigurations(); } /** * Set the testConfigurations property: List of connection monitor test configurations. * * @param testConfigurations the testConfigurations value to set. * @return the ConnectionMonitorInner object itself. */ public ConnectionMonitorInner withTestConfigurations(List<ConnectionMonitorTestConfiguration> testConfigurations) { if (this.innerProperties() == null) { this.innerProperties = new ConnectionMonitorParameters(); } this.innerProperties().withTestConfigurations(testConfigurations); return this; } /** * Get the testGroups property: List of connection monitor test groups. * * @return the testGroups value. */ public List<ConnectionMonitorTestGroup> testGroups() { return this.innerProperties() == null ? null : this.innerProperties().testGroups(); } /** * Set the testGroups property: List of connection monitor test groups. * * @param testGroups the testGroups value to set. * @return the ConnectionMonitorInner object itself. */ public ConnectionMonitorInner withTestGroups(List<ConnectionMonitorTestGroup> testGroups) { if (this.innerProperties() == null) { this.innerProperties = new ConnectionMonitorParameters(); } this.innerProperties().withTestGroups(testGroups); return this; } /** * Get the outputs property: List of connection monitor outputs. * * @return the outputs value. */ public List<ConnectionMonitorOutput> outputs() { return this.innerProperties() == null ? null : this.innerProperties().outputs(); } /** * Set the outputs property: List of connection monitor outputs. * * @param outputs the outputs value to set. * @return the ConnectionMonitorInner object itself. */ public ConnectionMonitorInner withOutputs(List<ConnectionMonitorOutput> outputs) { if (this.innerProperties() == null) { this.innerProperties = new ConnectionMonitorParameters(); } this.innerProperties().withOutputs(outputs); return this; } /** * Get the notes property: Optional notes to be associated with the connection monitor. * * @return the notes value. */ public String notes() { return this.innerProperties() == null ? null : this.innerProperties().notes(); } /** * Set the notes property: Optional notes to be associated with the connection monitor. * * @param notes the notes value to set. * @return the ConnectionMonitorInner object itself. */ public ConnectionMonitorInner withNotes(String notes) { if (this.innerProperties() == null) { this.innerProperties = new ConnectionMonitorParameters(); } this.innerProperties().withNotes(notes); return this; } /** * Validates the instance. * * @throws IllegalArgumentException thrown if the instance is not valid. */ public void validate() { if (innerProperties() == null) { throw logger .logExceptionAsError( new IllegalArgumentException( "Missing required property innerProperties in model ConnectionMonitorInner")); } else { innerProperties().validate(); } } }
package com.airbnb.epoxy; import androidx.annotation.LayoutRes; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import java.lang.CharSequence; import java.lang.IllegalArgumentException; import java.lang.IllegalStateException; import java.lang.Number; import java.lang.Object; import java.lang.Override; import java.lang.String; import java.util.BitSet; /** * Generated file. Do not modify! */ public class OnViewRecycledViewModel_ extends EpoxyModel<OnViewRecycledView> implements GeneratedModel<OnViewRecycledView>, OnViewRecycledViewModelBuilder { private final BitSet assignedAttributes_epoxyGeneratedModel = new BitSet(1); private OnModelBoundListener<OnViewRecycledViewModel_, OnViewRecycledView> onModelBoundListener_epoxyGeneratedModel; private OnModelUnboundListener<OnViewRecycledViewModel_, OnViewRecycledView> onModelUnboundListener_epoxyGeneratedModel; private OnModelVisibilityStateChangedListener<OnViewRecycledViewModel_, OnViewRecycledView> onModelVisibilityStateChangedListener_epoxyGeneratedModel; private OnModelVisibilityChangedListener<OnViewRecycledViewModel_, OnViewRecycledView> onModelVisibilityChangedListener_epoxyGeneratedModel; /** * Bitset index: 0 */ @NonNull private CharSequence title_CharSequence; @Override public void addTo(EpoxyController controller) { super.addTo(controller); addWithDebugValidation(controller); if (!assignedAttributes_epoxyGeneratedModel.get(0)) { throw new IllegalStateException("A value is required for setTitle"); } } @Override public void handlePreBind(final EpoxyViewHolder holder, final OnViewRecycledView object, final int position) { validateStateHasNotChangedSinceAdded("The model was changed between being added to the controller and being bound.", position); } @Override public void bind(final OnViewRecycledView object) { super.bind(object); object.setTitle(title_CharSequence); } @Override public void bind(final OnViewRecycledView object, EpoxyModel previousModel) { if (!(previousModel instanceof OnViewRecycledViewModel_)) { bind(object); return; } OnViewRecycledViewModel_ that = (OnViewRecycledViewModel_) previousModel; super.bind(object); if ((title_CharSequence != null ? !title_CharSequence.equals(that.title_CharSequence) : that.title_CharSequence != null)) { object.setTitle(title_CharSequence); } } @Override public void handlePostBind(final OnViewRecycledView object, int position) { if (onModelBoundListener_epoxyGeneratedModel != null) { onModelBoundListener_epoxyGeneratedModel.onModelBound(this, object, position); } validateStateHasNotChangedSinceAdded("The model was changed during the bind call.", position); } /** * Register a listener that will be called when this model is bound to a view. * <p> * The listener will contribute to this model's hashCode state per the {@link * com.airbnb.epoxy.EpoxyAttribute.Option#DoNotHash} rules. * <p> * You may clear the listener by setting a null value, or by calling {@link #reset()} */ public OnViewRecycledViewModel_ onBind( OnModelBoundListener<OnViewRecycledViewModel_, OnViewRecycledView> listener) { onMutation(); this.onModelBoundListener_epoxyGeneratedModel = listener; return this; } @Override public void unbind(OnViewRecycledView object) { super.unbind(object); if (onModelUnboundListener_epoxyGeneratedModel != null) { onModelUnboundListener_epoxyGeneratedModel.onModelUnbound(this, object); } object.onRecycled1(); object.onRecycled2(); } /** * Register a listener that will be called when this model is unbound from a view. * <p> * The listener will contribute to this model's hashCode state per the {@link * com.airbnb.epoxy.EpoxyAttribute.Option#DoNotHash} rules. * <p> * You may clear the listener by setting a null value, or by calling {@link #reset()} */ public OnViewRecycledViewModel_ onUnbind( OnModelUnboundListener<OnViewRecycledViewModel_, OnViewRecycledView> listener) { onMutation(); this.onModelUnboundListener_epoxyGeneratedModel = listener; return this; } @Override public void onVisibilityStateChanged(int visibilityState, final OnViewRecycledView object) { if (onModelVisibilityStateChangedListener_epoxyGeneratedModel != null) { onModelVisibilityStateChangedListener_epoxyGeneratedModel.onVisibilityStateChanged(this, object, visibilityState); } super.onVisibilityStateChanged(visibilityState, object); } /** * Register a listener that will be called when this model visibility state has changed. * <p> * The listener will contribute to this model's hashCode state per the {@link * com.airbnb.epoxy.EpoxyAttribute.Option#DoNotHash} rules. */ public OnViewRecycledViewModel_ onVisibilityStateChanged( OnModelVisibilityStateChangedListener<OnViewRecycledViewModel_, OnViewRecycledView> listener) { onMutation(); this.onModelVisibilityStateChangedListener_epoxyGeneratedModel = listener; return this; } @Override public void onVisibilityChanged(float percentVisibleHeight, float percentVisibleWidth, int visibleHeight, int visibleWidth, final OnViewRecycledView object) { if (onModelVisibilityChangedListener_epoxyGeneratedModel != null) { onModelVisibilityChangedListener_epoxyGeneratedModel.onVisibilityChanged(this, object, percentVisibleHeight, percentVisibleWidth, visibleHeight, visibleWidth); } super.onVisibilityChanged(percentVisibleHeight, percentVisibleWidth, visibleHeight, visibleWidth, object); } /** * Register a listener that will be called when this model visibility has changed. * <p> * The listener will contribute to this model's hashCode state per the {@link * com.airbnb.epoxy.EpoxyAttribute.Option#DoNotHash} rules. */ public OnViewRecycledViewModel_ onVisibilityChanged( OnModelVisibilityChangedListener<OnViewRecycledViewModel_, OnViewRecycledView> listener) { onMutation(); this.onModelVisibilityChangedListener_epoxyGeneratedModel = listener; return this; } /** * <i>Required.</i> * * @see OnViewRecycledView#setTitle(CharSequence) */ public OnViewRecycledViewModel_ title(@NonNull CharSequence title) { if (title == null) { throw new IllegalArgumentException("title cannot be null"); } assignedAttributes_epoxyGeneratedModel.set(0); onMutation(); this.title_CharSequence = title; return this; } @NonNull public CharSequence title() { return title_CharSequence; } @Override public OnViewRecycledViewModel_ id(long id) { super.id(id); return this; } @Override public OnViewRecycledViewModel_ id(@Nullable Number... ids) { super.id(ids); return this; } @Override public OnViewRecycledViewModel_ id(long id1, long id2) { super.id(id1, id2); return this; } @Override public OnViewRecycledViewModel_ id(@Nullable CharSequence key) { super.id(key); return this; } @Override public OnViewRecycledViewModel_ id(@Nullable CharSequence key, @Nullable CharSequence... otherKeys) { super.id(key, otherKeys); return this; } @Override public OnViewRecycledViewModel_ id(@Nullable CharSequence key, long id) { super.id(key, id); return this; } @Override public OnViewRecycledViewModel_ layout(@LayoutRes int layoutRes) { super.layout(layoutRes); return this; } @Override public OnViewRecycledViewModel_ spanSizeOverride( @Nullable EpoxyModel.SpanSizeOverrideCallback spanSizeCallback) { super.spanSizeOverride(spanSizeCallback); return this; } @Override public OnViewRecycledViewModel_ show() { super.show(); return this; } @Override public OnViewRecycledViewModel_ show(boolean show) { super.show(show); return this; } @Override public OnViewRecycledViewModel_ hide() { super.hide(); return this; } @Override @LayoutRes protected int getDefaultLayout() { return 1; } @Override public OnViewRecycledViewModel_ reset() { onModelBoundListener_epoxyGeneratedModel = null; onModelUnboundListener_epoxyGeneratedModel = null; onModelVisibilityStateChangedListener_epoxyGeneratedModel = null; onModelVisibilityChangedListener_epoxyGeneratedModel = null; assignedAttributes_epoxyGeneratedModel.clear(); this.title_CharSequence = null; super.reset(); return this; } @Override public boolean equals(Object o) { if (o == this) { return true; } if (!(o instanceof OnViewRecycledViewModel_)) { return false; } if (!super.equals(o)) { return false; } OnViewRecycledViewModel_ that = (OnViewRecycledViewModel_) o; if (((onModelBoundListener_epoxyGeneratedModel == null) != (that.onModelBoundListener_epoxyGeneratedModel == null))) { return false; } if (((onModelUnboundListener_epoxyGeneratedModel == null) != (that.onModelUnboundListener_epoxyGeneratedModel == null))) { return false; } if (((onModelVisibilityStateChangedListener_epoxyGeneratedModel == null) != (that.onModelVisibilityStateChangedListener_epoxyGeneratedModel == null))) { return false; } if (((onModelVisibilityChangedListener_epoxyGeneratedModel == null) != (that.onModelVisibilityChangedListener_epoxyGeneratedModel == null))) { return false; } if ((title_CharSequence != null ? !title_CharSequence.equals(that.title_CharSequence) : that.title_CharSequence != null)) { return false; } return true; } @Override public int hashCode() { int _result = super.hashCode(); _result = 31 * _result + (onModelBoundListener_epoxyGeneratedModel != null ? 1 : 0); _result = 31 * _result + (onModelUnboundListener_epoxyGeneratedModel != null ? 1 : 0); _result = 31 * _result + (onModelVisibilityStateChangedListener_epoxyGeneratedModel != null ? 1 : 0); _result = 31 * _result + (onModelVisibilityChangedListener_epoxyGeneratedModel != null ? 1 : 0); _result = 31 * _result + (title_CharSequence != null ? title_CharSequence.hashCode() : 0); return _result; } @Override public String toString() { return "OnViewRecycledViewModel_{" + "title_CharSequence=" + title_CharSequence + "}" + super.toString(); } @Override public int getSpanSize(int totalSpanCount, int position, int itemCount) { return totalSpanCount; } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.ads.googleads.v8.services.stub; import com.google.ads.googleads.v8.resources.GeographicView; import com.google.ads.googleads.v8.services.GetGeographicViewRequest; import com.google.api.core.ApiFunction; import com.google.api.core.BetaApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.GrpcTransportChannel; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import java.io.IOException; import java.util.List; import javax.annotation.Generated; import org.threeten.bp.Duration; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link GeographicViewServiceStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li> The default service address (googleads.googleapis.com) and default port (443) are used. * <li> Credentials are acquired automatically through Application Default Credentials. * <li> Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the total timeout of getGeographicView to 30 seconds: * * <pre>{@code * GeographicViewServiceStubSettings.Builder geographicViewServiceSettingsBuilder = * GeographicViewServiceStubSettings.newBuilder(); * geographicViewServiceSettingsBuilder * .getGeographicViewSettings() * .setRetrySettings( * geographicViewServiceSettingsBuilder * .getGeographicViewSettings() * .getRetrySettings() * .toBuilder() * .setTotalTimeout(Duration.ofSeconds(30)) * .build()); * GeographicViewServiceStubSettings geographicViewServiceSettings = * geographicViewServiceSettingsBuilder.build(); * }</pre> */ @Generated("by gapic-generator-java") public class GeographicViewServiceStubSettings extends StubSettings<GeographicViewServiceStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder().add("https://www.googleapis.com/auth/adwords").build(); private final UnaryCallSettings<GetGeographicViewRequest, GeographicView> getGeographicViewSettings; /** Returns the object with the settings used for calls to getGeographicView. */ public UnaryCallSettings<GetGeographicViewRequest, GeographicView> getGeographicViewSettings() { return getGeographicViewSettings; } @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public GeographicViewServiceStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcGeographicViewServiceStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ public static String getDefaultEndpoint() { return "googleads.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "googleads.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return InstantiatingGrpcChannelProvider.newBuilder() .setMaxInboundMessageSize(Integer.MAX_VALUE); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultGrpcTransportProviderBuilder().build(); } @BetaApi("The surface for customizing headers is not stable yet and may change in the future.") public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(GeographicViewServiceStubSettings.class)) .setTransportToken( GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected GeographicViewServiceStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); getGeographicViewSettings = settingsBuilder.getGeographicViewSettings().build(); } /** Builder for GeographicViewServiceStubSettings. */ public static class Builder extends StubSettings.Builder<GeographicViewServiceStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final UnaryCallSettings.Builder<GetGeographicViewRequest, GeographicView> getGeographicViewSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "retry_policy_0_codes", ImmutableSet.copyOf( Lists.<StatusCode.Code>newArrayList( StatusCode.Code.UNAVAILABLE, StatusCode.Code.DEADLINE_EXCEEDED))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelay(Duration.ofMillis(60000L)) .setInitialRpcTimeout(Duration.ofMillis(3600000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ofMillis(3600000L)) .setTotalTimeout(Duration.ofMillis(3600000L)) .build(); definitions.put("retry_policy_0_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); getGeographicViewSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(getGeographicViewSettings); initDefaults(this); } protected Builder(GeographicViewServiceStubSettings settings) { super(settings); getGeographicViewSettings = settings.getGeographicViewSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(getGeographicViewSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .getGeographicViewSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to getGeographicView. */ public UnaryCallSettings.Builder<GetGeographicViewRequest, GeographicView> getGeographicViewSettings() { return getGeographicViewSettings; } @Override public GeographicViewServiceStubSettings build() throws IOException { return new GeographicViewServiceStubSettings(this); } } }
package org.sakaiproject.component.app.messageforums.entity; import java.io.Reader; import java.io.StringReader; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.api.app.messageforums.DiscussionForum; import org.sakaiproject.api.app.messageforums.DiscussionTopic; import org.sakaiproject.api.app.messageforums.Message; import org.sakaiproject.api.app.messageforums.MessageForumsMessageManager; import org.sakaiproject.api.app.messageforums.Topic; import org.sakaiproject.api.app.messageforums.ui.DiscussionForumManager; import org.sakaiproject.api.app.messageforums.ui.UIPermissionsManager; import org.sakaiproject.component.api.ServerConfigurationService; import org.sakaiproject.entitybroker.DeveloperHelperService; import org.sakaiproject.entitybroker.EntityBroker; import org.sakaiproject.entitybroker.EntityReference; import org.sakaiproject.event.api.Event; import org.sakaiproject.search.api.EntityContentProducer; import org.sakaiproject.search.api.PortalUrlEnabledProducer; import org.sakaiproject.search.api.SearchIndexBuilder; import org.sakaiproject.search.api.SearchService; import org.sakaiproject.search.model.SearchBuilderItem; import org.sakaiproject.util.FormattedText; public class MessageForumsEntityContentProducer implements EntityContentProducer, PortalUrlEnabledProducer { private static Log log = LogFactory.getLog(MessageForumsEntityContentProducer.class); // runtime dependency private List addEvents = null; // runtime dependency private List removeEvents = null; /** * @param addEvents * The addEvents to set. */ public void setAddEvents(List addEvents) { this.addEvents = addEvents; } public void setRemoveEvents(List removeEvents) { this.removeEvents = removeEvents; } // runtime dependency private String toolName = null; /** * @param toolName * The toolName to set. */ public void setToolName(String toolName) { this.toolName = toolName; } private DeveloperHelperService developerHelperService; public void setDeveloperHelperService( DeveloperHelperService developerHelperService) { this.developerHelperService = developerHelperService; } private ServerConfigurationService serverConfigurationService; public void setServerConfigurationService( ServerConfigurationService serverConfigurationService) { this.serverConfigurationService = serverConfigurationService; } // injected dependency private SearchService searchService = null; /** * @param searchService the searchService to set */ public void setSearchService(SearchService searchService) { this.searchService = searchService; } // injected dependency private SearchIndexBuilder searchIndexBuilder = null; /** * @param searchIndexBuilder the searchIndexBuilder to set */ public void setSearchIndexBuilder(SearchIndexBuilder searchIndexBuilder) { this.searchIndexBuilder = searchIndexBuilder; } /** * Forums Services */ private MessageForumsMessageManager messageForumsMessageManager; public void setMessageForumsMessageManager( MessageForumsMessageManager messageForumsMessageManager) { this.messageForumsMessageManager = messageForumsMessageManager; } private DiscussionForumManager discussionForumManager; public void setDiscussionForumManager( DiscussionForumManager discussionForumManager) { this.discussionForumManager = discussionForumManager; } public void setUIPermissionManager(UIPermissionsManager permissionManager) { uIPermissionManager = permissionManager; } private UIPermissionsManager uIPermissionManager; private EntityBroker entityBroker; public void setEntityBroker(EntityBroker eb) { this.entityBroker = eb; } public void init() { if ( "true".equals(serverConfigurationService.getString( "search.enable", "false"))) { for (Iterator i = addEvents.iterator(); i.hasNext();) { searchService.registerFunction((String) i.next()); } for (Iterator i = removeEvents.iterator(); i.hasNext();) { searchService.registerFunction((String) i.next()); } searchIndexBuilder.registerEntityContentProducer(this); } } public boolean canRead(String reference) { String msgId = EntityReference.getIdFromRefByKey(reference, "Message"); Message m = messageForumsMessageManager.getMessageById(Long.valueOf(msgId)); Topic topic = m.getTopic(); boolean canRead = false; DiscussionTopic dt = discussionForumManager.getTopicById(topic.getId()); if(dt != null){ DiscussionForum df = discussionForumManager.getForumById(dt.getOpenForum().getId()); canRead = uIPermissionManager.isRead(dt, df); } return canRead; } public Integer getAction(Event event) { String evt = event.getEvent(); if (evt == null) return SearchBuilderItem.ACTION_UNKNOWN; for (Iterator i = addEvents.iterator(); i.hasNext();) { String match = (String) i.next(); if (evt.equals(match)) { return SearchBuilderItem.ACTION_ADD; } } for (Iterator i = removeEvents.iterator(); i.hasNext();) { String match = (String) i.next(); if (evt.equals(match)) { return SearchBuilderItem.ACTION_DELETE; } } return SearchBuilderItem.ACTION_UNKNOWN; } public String getContainer(String ref) { log.debug("getContainer(String "+ ref + ")"); return "/site/" + getSiteId(ref); } public String getContent(String reference) { log.debug("getting content for " + reference); String msgId = EntityReference.getIdFromRefByKey(reference, "Message"); Message m = messageForumsMessageManager.getMessageById(Long.valueOf(msgId)); StringBuilder sb = new StringBuilder(); if (m != null) { sb.append("author: " + m.getAuthor()); sb.append(" title: " + m.getTitle()); sb.append(" body: " + FormattedText.convertFormattedTextToPlaintext(m.getBody())); /* causes hibernate lazy init error List attachments = m.getAttachments(); if (attachments != null && attachments.size() > 0) { for (int q = 0; q < attachments.size(); q++) { Attachment at = (Attachment) attachments.get(q); String id = at.getAttachmentId(); EntityContentProducer ecp = searchIndexBuilder .newEntityContentProducer(id); String attachementDigest = ecp.getContent(id); sb.append("\n attachement: \n"); sb.append(attachementDigest); sb.append("\n"); } } */ } return sb.toString(); } public Reader getContentReader(String reference) { return new StringReader(getContent(reference)); } public Map getCustomProperties(String ref) { // TODO Auto-generated method stub return null; } public String getCustomRDF(String ref) { // TODO Auto-generated method stub return null; } public String getId(String ref) { return EntityReference.getIdFromRefByKey(ref, "Message"); } /** * Deprecated method */ public List getSiteContent(String context) { log.warn("DEPRECATED method getSiteContent called"); return null; } public Iterator getSiteContentIterator(String context) { log.debug("getSiteContentIterator(String "+ context + ")"); List<Message> messages = messageForumsMessageManager.getAllMessagesInSite(context); log.debug("got a list of " + messages.size() + "messages"); List<String> out = new ArrayList<String>(); for (int i = 0; i < messages.size(); i ++) { Message m = (Message)messages.get(i); String ref = buildRefForContextAndId(context, m.getId()); log.debug("adding " + ref); out.add(ref); } return out.iterator(); } private String buildRefForContextAndId(String context, Long messageId) { ///forums/site/705b3a28-e04d-4858-8b66-77fa9d9c9121/Message/2 String ret = "/forums/site/" + context + "/Message/" + messageId.toString(); return ret; } public String getSiteId(String reference) { return EntityReference.getIdFromRefByKey(reference, "site"); } public String getSubType(String ref) { log.debug("getSubType(" +ref); return "message"; } public String getTitle(String reference) { log.debug("getTitle: " + reference); String msgId = EntityReference.getIdFromRefByKey(reference, "Message"); Message m = messageForumsMessageManager.getMessageById(Long.valueOf(msgId)); if (m !=null) return m.getTitle(); return null; } public String getTool() { return toolName; } public String getType(String ref) { log.debug("getType(" +ref); return "Message"; } public String getUrl(String reference) { log.debug("getUrl(" + reference +")"); Map<String, String> params = new HashMap<String, String>(); String msgId = EntityReference.getIdFromRefByKey(reference, "Message"); Message m = messageForumsMessageManager.getMessageById(Long.valueOf(msgId)); params.put("messageId", msgId); params.put("topicId", m.getTopic().getId().toString()); log.debug("got topic: " + m.getTopic().getId().toString()); //Topic topic = developerHelperService.cloneBean(m.getTopic(), 1, null); DiscussionTopic topic = discussionForumManager.getTopicById(m.getTopic().getId()); params.put("forumId", topic.getOpenForum().getId().toString()); String context = "/site/" + this.getSiteId(reference); log.debug("context: " + context); //seems not to work "/discussionForum/message/dfViewMessage" String path = "/discussionForum/message/dfViewThreadDirect"; String url = null; try { url = developerHelperService.getToolViewURL("sakai.forums", path, params, context); log.debug("got url" + url); return url; } catch (Exception e) { //MSGCNTR this could happen if there is no tool placement log.warn("swallowing exception", e); } return url; } public boolean isContentFromReader(String reference) { return false; } public boolean isForIndex(String reference) { String msgId = EntityReference.getIdFromRefByKey(reference, "Message"); Message m = messageForumsMessageManager.getMessageById(Long.valueOf(msgId)); if (m != null && !m.getDeleted()) { log.debug("we will index " + reference); return true; } return false; } public boolean matches(String reference) { if (reference == null || "".equals(reference)) { return false; } try { String prefix = EntityReference.getPrefix(reference); log.debug("checking if " + prefix + " matches"); if (toolName.equals(prefix)) return true; } catch (Exception e) { log.warn("unable to parse reference: " + reference +", " + e); if (log.isDebugEnabled()) { log.debug(e); } } return false; } public boolean matches(Event event) { // TODO Auto-generated method stub return matches(event.getResource()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.shared.kerberos.components; import java.nio.BufferOverflowException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.directory.api.asn1.Asn1Object; import org.apache.directory.api.asn1.EncoderException; import org.apache.directory.api.asn1.ber.tlv.TLV; import org.apache.directory.api.asn1.ber.tlv.UniversalTag; import org.apache.directory.api.util.Strings; import org.apache.directory.server.i18n.I18n; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Store a list of addresses. * * The ASN.1 grammar is : * <pre> * -- NOTE: HostAddresses is always used as an OPTIONAL field and * -- should not be empty. * HostAddresses -- NOTE: subtly different from rfc1510, * -- but has a value mapping and encodes the same * ::= SEQUENCE OF HostAddress *</pre> * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a> */ public class HostAddresses implements Asn1Object { /** The logger */ private static final Logger LOG = LoggerFactory.getLogger( HostAddresses.class ); /** Speedup for logs */ private static final boolean IS_DEBUG = LOG.isDebugEnabled(); /** List of all HostAddress stored */ private List<HostAddress> addresses; // Storage for computed lengths private int addressesLength; /** * Creates a new instance of HostAddresses. */ public HostAddresses() { this.addresses = new ArrayList<>(); } /** * Creates a new instance of HostAddresses. * * @param addresses The associated addresses */ public HostAddresses( HostAddress[] addresses ) { if ( addresses == null ) { this.addresses = new ArrayList<>(); } else { this.addresses = Arrays.asList( addresses ); } } /** * Adds an {@link HostAddresses} to the list * @param hostAddress The address to add */ public void addHostAddress( HostAddress hostAddress ) { addresses.add( hostAddress ); } /** * Returns true if this {@link HostAddresses} contains a specified {@link HostAddress}. * * @param address The address we are looking for in the existing list * @return true if this {@link HostAddresses} contains a specified {@link HostAddress}. */ public boolean contains( HostAddress address ) { if ( addresses != null ) { return addresses.contains( address ); } return false; } /** * {@inheritDoc} */ @Override public int hashCode() { int hash = 37; if ( addresses != null ) { hash = hash * 17 + addresses.size(); hash = hash * 17 + addresses.hashCode(); } return hash; } /** * Returns true if two {@link HostAddresses} are equal. * * @param obj The {@link HostAddresses} we want to compare with the current one * @return true if two {@link HostAddresses} are equal. */ @Override public boolean equals( Object obj ) { if ( this == obj ) { return true; } if ( !( obj instanceof HostAddresses ) ) { return false; } HostAddresses that = ( HostAddresses ) obj; // Addresses can't be null after creation if ( addresses.size() != that.addresses.size() ) { return false; } for ( int i = 0; i < addresses.size(); i++ ) { if ( !addresses.get( i ).equals( that.addresses.get( i ) ) ) { return false; } } return true; } /** * Returns the contained {@link HostAddress}s as an array. * * @return An array of {@link HostAddress}s. */ public HostAddress[] getAddresses() { return addresses.toArray( new HostAddress[0] ); } /** * Compute the hostAddresses length * <pre> * HostAddresses : * * 0x30 L1 hostAddresses sequence of HostAddresses * | * +--&gt; 0x30 L2[1] Hostaddress[1] * | * +--&gt; 0x30 L2[2] Hostaddress[2] * | * ... * | * +--&gt; 0x30 L2[n] Hostaddress[n] * * where L1 = sum( L2[1], l2[2], ..., L2[n] ) * </pre> */ public int computeLength() { // Compute the addresses length. addressesLength = 0; if ( ( addresses != null ) && !addresses.isEmpty() ) { for ( HostAddress hostAddress : addresses ) { int length = hostAddress.computeLength(); addressesLength += length; } } return 1 + TLV.getNbBytes( addressesLength ) + addressesLength; } /** * Encode the HostAddress message to a PDU. * <pre> * HostAddress : * * 0x30 LL * 0x30 LL hostaddress[1] * 0x30 LL hostaddress[1] * ... * 0x30 LL hostaddress[1] * </pre> * @param buffer The buffer where to put the PDU. It should have been allocated * before, with the right size. * @return The constructed PDU. */ public ByteBuffer encode( ByteBuffer buffer ) throws EncoderException { if ( buffer == null ) { throw new EncoderException( I18n.err( I18n.ERR_148 ) ); } try { // The HostAddresses SEQ Tag buffer.put( UniversalTag.SEQUENCE.getValue() ); buffer.put( TLV.getBytes( addressesLength ) ); // The hostAddress list, if it's not empty if ( ( addresses != null ) && !addresses.isEmpty() ) { for ( HostAddress hostAddress : addresses ) { hostAddress.encode( buffer ); } } } catch ( BufferOverflowException boe ) { LOG.error( I18n.err( I18n.ERR_144, 1 + TLV.getNbBytes( addressesLength ) + addressesLength, buffer.capacity() ) ); throw new EncoderException( I18n.err( I18n.ERR_138 ), boe ); } if ( IS_DEBUG ) { LOG.debug( "HostAddresses encoding : {}", Strings.dumpBytes( buffer.array() ) ); LOG.debug( "HostAddresses initial value : {}", this ); } return buffer; } /** * @see Object#toString() */ public String toString() { StringBuilder sb = new StringBuilder(); boolean isFirst = true; for ( HostAddress hostAddress : addresses ) { if ( isFirst ) { isFirst = false; } else { sb.append( ", " ); } sb.append( hostAddress.toString() ); } return sb.toString(); } }
package com.aisparser; /** * 6-bit packed ASCII functions * @author Copyright 2006-2008 by Brian C. Lane <bcl@brianlane.com * All Rights Reserved * */ class SixbitsExhaustedException extends Exception { public SixbitsExhaustedException() {} public SixbitsExhaustedException( String str ) { super(str); } } /** * This class's methods are used to extract data from the 6-bit packed * ASCII string used by AIVDM/AIVDO AIS messages. * * init() should be called with a sixbit ASCII string. * * Up to 32 bits of data are fetched from the string by calling get() * * Use padBits() to set the number of padding bits at the end of the message, * it defaults to 0 if not set. * * */ public class Sixbit { private final int pow2_mask[] = { 0x00, 0x01, 0x03, 0x07, 0x0F, 0x1F, 0x3F }; private String bits; //!< raw 6-bit ASCII data string private int bitsIndex; //!< Index of next character private int remainder; //!< Remainder bits private int remainderLength; //!< Number of remainder bits private int padBits; //!< Number of padding bits at end /** * Totally empty constructor */ public Sixbit() {} /** * Initialize a 6-bit datastream structure * * This function initializes the state of the sixbit parser variables * */ public void init( String bits ) { this.bits = bits; this.bitsIndex = 0; this.remainder = 0; this.remainderLength = 0; this.padBits = 0; } /** * Set the bit padding value */ public void padBits( int num ) { this.padBits = num; } /** * Add more bits to the buffer */ public void add( String bits ) { this.bits += bits; } /* * Return the number of bits * * Takes into account the number of padding bits. */ public int bit_length() { return ((length() * 6) - this.padBits); } /** * Return the number of bytes in the sixbit string */ public int length() { return this.bits.length(); } /** * Convert an ASCII value to a 6-bit binary value * * This function checks the ASCII value to make sure it can be converted. * If not, it throws an IllegalArgumentException * Otherwise it returns the 6-bit binary value. * * @param ASCII character to convert * * This is used to convert the packed 6-bit value to a binary value. It * is not used to convert data from fields such as the name and * destination -- Use ais2ascii() instead. */ public int binfrom6bit( int ascii ) throws IllegalArgumentException { if( (ascii < 0x30) || (ascii > 0x77) || ((ascii > 0x57) && (ascii < 0x60)) ) throw new IllegalArgumentException("Illegal 6-bit ASCII value"); if( ascii < 0x60 ) return (ascii - 0x30) & 0x3F; else return (ascii - 0x38) & 0x3F; } /** Convert a binary value to a 6-bit ASCII value * * This function checks the binary value to make sure it can be converted. * If not, it throws an IllegalArgumentException. * Otherwise it returns the 6-bit ASCII value. * * @param value to convert * @returns 6-bit ASCII * */ public int binto6bit( int value ) throws IllegalArgumentException { if (value > 0x3F ) throw new IllegalArgumentException("Value is out of range (0-0x3F)"); if (value < 0x28) return value + 0x30; else return value + 0x38; } /** Convert a AIS 6-bit character to ASCII * * @param value 6-bit value to be converted * * return * - corresponding ASCII value (0x20-0x5F) * * This function is used to convert binary data to ASCII. This is * different from the 6-bit ASCII to binary conversion for VDM * messages; it is used for strings within the datastream itself. * eg. Ship Name, Callsign and Destination. */ public int ais2ascii( int value ) throws IllegalArgumentException { if (value > 0x3F ) throw new IllegalArgumentException("Value is out of range (0-0x3F)"); if( value < 0x20 ) return value + 0x40; else return value; } /** * Return 0-32 bits from a 6-bit ASCII stream * * @param numbits number of bits to return * * This method returns the requested number of bits to the caller. * It pulls the bits from the raw 6-bit ASCII as they are needed. * */ public long get( int numbits ) throws SixbitsExhaustedException { long result; int fetch_bits; result = 0; fetch_bits = numbits; while( fetch_bits > 0 ) { /* Is there anything left over from the last call? */ if( this.remainderLength > 0 ) { if( this.remainderLength <= fetch_bits ) { /* reminder is less than or equal to what is needed */ result = (result << 6) + this.remainder; fetch_bits -= this.remainderLength; this.remainder = 0; this.remainderLength = 0; } else { /* remainder is larger than what is needed Take the bits from the top of remainder */ result = result << fetch_bits; result += this.remainder >> (this.remainderLength - fetch_bits); /* Fixup remainder */ this.remainderLength -= fetch_bits; this.remainder &= pow2_mask[(int) this.remainderLength]; return result; } } /* Get the next block of 6 bits from the ASCII string */ if( this.bitsIndex < this.bits.length() ) { this.remainder = binfrom6bit( (int) bits.charAt(this.bitsIndex) ); this.bitsIndex++; if (this.bitsIndex == this.bits.length()) this.remainderLength = 6 - this.padBits; else this.remainderLength = 6; } else if (fetch_bits > 0) { /* Ran out of bits */ throw new SixbitsExhaustedException("Ran out of bits"); } else { return result; } } return result; } /** * Get an ASCII string from the 6-bit data stream * * @param length Number of characters to retrieve * * @return * String of the characters */ public String get_string( int length ) { char[] tmp_str = new char[length]; /* Get the 6-bit string, convert to ASCII */ for (int i=0; i < length; i++) { try { tmp_str[i] = (char) ais2ascii( (char) get( 6 )); } catch ( SixbitsExhaustedException e) { for (int j=i; j<length; j++) { tmp_str[j] = '@'; } break; } } return new String(tmp_str); } }
/* * Copyright 2015-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.glowroot.central.repo; import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import com.datastax.driver.core.BoundStatement; import com.datastax.driver.core.PreparedStatement; import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.Row; import com.datastax.driver.core.utils.UUIDs; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Multimaps; import com.google.common.collect.SetMultimap; import com.google.common.primitives.Ints; import com.google.common.util.concurrent.ListenableFuture; import org.checkerframework.checker.nullness.qual.Nullable; import org.immutables.serial.Serial; import org.immutables.value.Value; import org.glowroot.central.repo.Common.NeedsRollup; import org.glowroot.central.repo.Common.NeedsRollupFromChildren; import org.glowroot.central.util.ClusterManager; import org.glowroot.central.util.MoreFutures; import org.glowroot.central.util.MoreFutures.DoRollup; import org.glowroot.central.util.Session; import org.glowroot.common.util.CaptureTimes; import org.glowroot.common.util.Clock; import org.glowroot.common.util.OnlyUsedByTests; import org.glowroot.common.util.Styles; import org.glowroot.common2.repo.ConfigRepository.RollupConfig; import org.glowroot.common2.repo.util.Gauges; import org.glowroot.wire.api.model.CollectorServiceOuterClass.GaugeValueMessage.GaugeValue; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static java.util.concurrent.TimeUnit.DAYS; import static java.util.concurrent.TimeUnit.HOURS; import static java.util.concurrent.TimeUnit.MINUTES; public class GaugeValueDaoImpl implements GaugeValueDao { private final Session session; private final ConfigRepositoryImpl configRepository; private final ExecutorService asyncExecutor; private final Clock clock; private final GaugeNameDao gaugeNameDao; // index is rollupLevel private final ImmutableList<PreparedStatement> insertValuePS; private final ImmutableList<PreparedStatement> readValuePS; private final ImmutableList<PreparedStatement> readOldestCaptureTimePS; private final ImmutableList<PreparedStatement> readValueForRollupPS; private final PreparedStatement readValueForRollupFromChildPS; private final List<PreparedStatement> insertNeedsRollup; private final List<PreparedStatement> readNeedsRollup; private final List<PreparedStatement> deleteNeedsRollup; private final PreparedStatement insertNeedsRollupFromChild; private final PreparedStatement readNeedsRollupFromChild; private final PreparedStatement deleteNeedsRollupFromChild; // needs rollup caches are only to reduce pressure on the needs rollup tables by reducing // duplicate entries private final ConcurrentMap<NeedsRollupKey, ImmutableSet<String>> needsRollupCache1; GaugeValueDaoImpl(Session session, ConfigRepositoryImpl configRepository, ClusterManager clusterManager, ExecutorService asyncExecutor, int cassandraGcGraceSeconds, Clock clock) throws Exception { this.session = session; this.configRepository = configRepository; this.asyncExecutor = asyncExecutor; this.clock = clock; gaugeNameDao = new GaugeNameDao(session, configRepository, clock); int count = configRepository.getRollupConfigs().size(); List<Integer> rollupExpirationHours = Lists .newArrayList(configRepository.getCentralStorageConfig().rollupExpirationHours()); rollupExpirationHours.add(0, rollupExpirationHours.get(0)); List<PreparedStatement> insertValuePS = new ArrayList<>(); List<PreparedStatement> readValuePS = new ArrayList<>(); List<PreparedStatement> readOldestCaptureTimePS = new ArrayList<>(); List<PreparedStatement> readValueForRollupPS = new ArrayList<>(); for (int i = 0; i <= count; i++) { // name already has "[counter]" suffix when it is a counter session.createTableWithTWCS("create table if not exists gauge_value_rollup_" + i + " (agent_rollup varchar, gauge_name varchar, capture_time timestamp, value" + " double, weight bigint, primary key ((agent_rollup, gauge_name)," + " capture_time))", rollupExpirationHours.get(i)); insertValuePS.add(session.prepare("insert into gauge_value_rollup_" + i + " (agent_rollup, gauge_name, capture_time, value, weight) values (?, ?, ?, ?," + " ?) using ttl ?")); readValuePS.add(session.prepare("select capture_time, value, weight from" + " gauge_value_rollup_" + i + " where agent_rollup = ? and gauge_name = ? and" + " capture_time >= ? and capture_time <= ?")); readOldestCaptureTimePS.add(session.prepare("select capture_time from" + " gauge_value_rollup_" + i + " where agent_rollup = ? and gauge_name = ?" + " limit 1")); readValueForRollupPS.add(session.prepare("select value, weight from gauge_value_rollup_" + i + " where agent_rollup = ? and gauge_name = ? and capture_time > ? and" + " capture_time <= ?")); } this.insertValuePS = ImmutableList.copyOf(insertValuePS); this.readValuePS = ImmutableList.copyOf(readValuePS); this.readOldestCaptureTimePS = ImmutableList.copyOf(readOldestCaptureTimePS); this.readValueForRollupPS = ImmutableList.copyOf(readValueForRollupPS); this.readValueForRollupFromChildPS = session.prepare("select value, weight from" + " gauge_value_rollup_1 where agent_rollup = ? and gauge_name = ? and" + " capture_time = ?"); List<PreparedStatement> insertNeedsRollup = new ArrayList<>(); List<PreparedStatement> readNeedsRollup = new ArrayList<>(); List<PreparedStatement> deleteNeedsRollup = new ArrayList<>(); for (int i = 1; i <= count; i++) { session.createTableWithLCS("create table if not exists gauge_needs_rollup_" + i + " (agent_rollup varchar, capture_time timestamp, uniqueness timeuuid," + " gauge_names set<varchar>, primary key (agent_rollup, capture_time," + " uniqueness)) with gc_grace_seconds = " + cassandraGcGraceSeconds, true); insertNeedsRollup.add(session.prepare("insert into gauge_needs_rollup_" + i + " (agent_rollup, capture_time, uniqueness, gauge_names) values (?, ?, ?, ?)" + " using TTL ?")); readNeedsRollup.add(session.prepare("select capture_time, uniqueness, gauge_names from" + " gauge_needs_rollup_" + i + " where agent_rollup = ?")); deleteNeedsRollup.add(session.prepare("delete from gauge_needs_rollup_" + i + " where" + " agent_rollup = ? and capture_time = ? and uniqueness = ?")); } this.insertNeedsRollup = insertNeedsRollup; this.readNeedsRollup = readNeedsRollup; this.deleteNeedsRollup = deleteNeedsRollup; session.createTableWithLCS("create table if not exists gauge_needs_rollup_from_child" + " (agent_rollup varchar, capture_time timestamp, uniqueness timeuuid," + " child_agent_rollup varchar, gauge_names set<varchar>, primary key" + " (agent_rollup, capture_time, uniqueness)) with gc_grace_seconds = " + cassandraGcGraceSeconds, true); insertNeedsRollupFromChild = session.prepare("insert into gauge_needs_rollup_from_child" + " (agent_rollup, capture_time, uniqueness, child_agent_rollup, gauge_names)" + " values (?, ?, ?, ?, ?) using TTL ?"); readNeedsRollupFromChild = session.prepare("select capture_time, uniqueness," + " child_agent_rollup, gauge_names from gauge_needs_rollup_from_child where" + " agent_rollup = ?"); deleteNeedsRollupFromChild = session.prepare("delete from gauge_needs_rollup_from_child" + " where agent_rollup = ? and capture_time = ? and uniqueness = ?"); needsRollupCache1 = clusterManager.createReplicatedMap("gaugeNeedsRollupCache1", 5, MINUTES); } @Override public void store(String agentId, List<GaugeValue> gaugeValues) throws Exception { store(agentId, AgentRollupIds.getAgentRollupIds(agentId), gaugeValues); } public void store(String agentId, List<String> agentRollupIdsForMeta, List<GaugeValue> gaugeValues) throws Exception { if (gaugeValues.isEmpty()) { return; } int ttl = getTTLs().get(0); long maxCaptureTime = 0; List<Future<?>> futures = new ArrayList<>(); for (GaugeValue gaugeValue : gaugeValues) { BoundStatement boundStatement = insertValuePS.get(0).bind(); String gaugeName = gaugeValue.getGaugeName(); long captureTime = gaugeValue.getCaptureTime(); maxCaptureTime = Math.max(captureTime, maxCaptureTime); int adjustedTTL = Common.getAdjustedTTL(ttl, captureTime, clock); int i = 0; boundStatement.setString(i++, agentId); boundStatement.setString(i++, gaugeName); boundStatement.setTimestamp(i++, new Date(captureTime)); boundStatement.setDouble(i++, gaugeValue.getValue()); boundStatement.setLong(i++, gaugeValue.getWeight()); boundStatement.setInt(i++, adjustedTTL); futures.add(session.writeAsync(boundStatement)); for (String agentRollupIdForMeta : agentRollupIdsForMeta) { futures.addAll(gaugeNameDao.insert(agentRollupIdForMeta, captureTime, gaugeName)); } } // wait for success before inserting "needs rollup" records MoreFutures.waitForAll(futures); futures.clear(); // insert into gauge_needs_rollup_1 Map<NeedsRollupKey, ImmutableSet<String>> updatesForNeedsRollupCache1 = new HashMap<>(); SetMultimap<Long, String> rollupCaptureTimes = getRollupCaptureTimes(gaugeValues); for (Map.Entry<Long, Set<String>> entry : Multimaps.asMap(rollupCaptureTimes).entrySet()) { Long captureTime = entry.getKey(); Set<String> gaugeNames = entry.getValue(); NeedsRollupKey needsRollupKey = ImmutableNeedsRollupKey.of(agentId, captureTime); ImmutableSet<String> needsRollupGaugeNames = needsRollupCache1.get(needsRollupKey); if (needsRollupGaugeNames == null) { // first insert for this key updatesForNeedsRollupCache1.put(needsRollupKey, ImmutableSet.copyOf(gaugeNames)); } else if (needsRollupGaugeNames.containsAll(gaugeNames)) { // capture current time after getting data from cache to prevent race condition with // reading the data in Common.getNeedsRollupList() if (!Common.isOldEnoughToRollup(captureTime, clock.currentTimeMillis(), configRepository.getRollupConfigs().get(0).intervalMillis())) { // completely covered by prior inserts that haven't been rolled up yet so no // need to re-insert same data continue; } } else { // merge will maybe help prevent a few subsequent inserts Set<String> combined = new HashSet<>(needsRollupGaugeNames); combined.addAll(gaugeNames); updatesForNeedsRollupCache1.put(needsRollupKey, ImmutableSet.copyOf(gaugeNames)); } BoundStatement boundStatement = insertNeedsRollup.get(0).bind(); int adjustedTTL = Common.getAdjustedTTL(ttl, captureTime, clock); int needsRollupAdjustedTTL = Common.getNeedsRollupAdjustedTTL(adjustedTTL, configRepository.getRollupConfigs()); int i = 0; boundStatement.setString(i++, agentId); boundStatement.setTimestamp(i++, new Date(captureTime)); boundStatement.setUUID(i++, UUIDs.timeBased()); boundStatement.setSet(i++, gaugeNames); boundStatement.setInt(i++, needsRollupAdjustedTTL); futures.add(session.writeAsync(boundStatement)); } MoreFutures.waitForAll(futures); // update the cache now that the above inserts were successful needsRollupCache1.putAll(updatesForNeedsRollupCache1); } @Override public List<Gauge> getRecentlyActiveGauges(String agentRollupId) throws Exception { long now = clock.currentTimeMillis(); long from = now - DAYS.toMillis(7); return getGauges(agentRollupId, from, now + DAYS.toMillis(365)); } @Override public List<Gauge> getGauges(String agentRollupId, long from, long to) throws Exception { List<Gauge> gauges = new ArrayList<>(); for (String gaugeName : gaugeNameDao.getGaugeNames(agentRollupId, from, to)) { gauges.add(Gauges.getGauge(gaugeName)); } return gauges; } // from is INCLUSIVE @Override public List<GaugeValue> readGaugeValues(String agentRollupId, String gaugeName, long from, long to, int rollupLevel) throws Exception { BoundStatement boundStatement = readValuePS.get(rollupLevel).bind(); int i = 0; boundStatement.setString(i++, agentRollupId); boundStatement.setString(i++, gaugeName); boundStatement.setTimestamp(i++, new Date(from)); boundStatement.setTimestamp(i++, new Date(to)); ResultSet results = session.read(boundStatement); List<GaugeValue> gaugeValues = new ArrayList<>(); for (Row row : results) { i = 0; gaugeValues.add(GaugeValue.newBuilder() .setCaptureTime(checkNotNull(row.getTimestamp(i++)).getTime()) .setValue(row.getDouble(i++)) .setWeight(row.getLong(i++)) .build()); } return gaugeValues; } @Override public long getOldestCaptureTime(String agentRollupId, String gaugeName, int rollupLevel) throws Exception { BoundStatement boundStatement = readOldestCaptureTimePS.get(rollupLevel).bind(); int i = 0; boundStatement.setString(i++, agentRollupId); boundStatement.setString(i++, gaugeName); ResultSet results = session.read(boundStatement); Row row = results.one(); return row == null ? Long.MAX_VALUE : checkNotNull(row.getTimestamp(0)).getTime(); } @Override public void rollup(String agentRollupId) throws Exception { rollup(agentRollupId, AgentRollupIds.getParent(agentRollupId), !agentRollupId.endsWith("::")); } // there is no rollup from children on 5-second gauge values // // child agent rollups should be processed before their parent agent rollup, since initial // parent rollup depends on the 1-minute child rollup public void rollup(String agentRollupId, @Nullable String parentAgentRollupId, boolean leaf) throws Exception { List<Integer> ttls = getTTLs(); int rollupLevel; if (leaf) { rollupLevel = 1; } else { rollupFromChildren(agentRollupId, parentAgentRollupId, ttls.get(1)); rollupLevel = 2; } while (rollupLevel <= configRepository.getRollupConfigs().size()) { int ttl = ttls.get(rollupLevel); rollup(agentRollupId, parentAgentRollupId, rollupLevel, ttl); rollupLevel++; } } private SetMultimap<Long, String> getRollupCaptureTimes(List<GaugeValue> gaugeValues) { SetMultimap<Long, String> rollupCaptureTimes = HashMultimap.create(); List<RollupConfig> rollupConfigs = configRepository.getRollupConfigs(); for (GaugeValue gaugeValue : gaugeValues) { String gaugeName = gaugeValue.getGaugeName(); long captureTime = gaugeValue.getCaptureTime(); long intervalMillis = rollupConfigs.get(0).intervalMillis(); long rollupCaptureTime = CaptureTimes.getRollup(captureTime, intervalMillis); rollupCaptureTimes.put(rollupCaptureTime, gaugeName); } return rollupCaptureTimes; } private void rollupFromChildren(String agentRollupId, @Nullable String parentAgentRollupId, int ttl) throws Exception { final int rollupLevel = 1; List<NeedsRollupFromChildren> needsRollupFromChildrenList = Common .getNeedsRollupFromChildrenList(agentRollupId, readNeedsRollupFromChild, session); List<RollupConfig> rollupConfigs = configRepository.getRollupConfigs(); long nextRollupIntervalMillis = rollupConfigs.get(rollupLevel).intervalMillis(); for (NeedsRollupFromChildren needsRollupFromChildren : needsRollupFromChildrenList) { long captureTime = needsRollupFromChildren.getCaptureTime(); int adjustedTTL = Common.getAdjustedTTL(ttl, captureTime, clock); List<ListenableFuture<?>> futures = new ArrayList<>(); for (Map.Entry<String, Collection<String>> entry : needsRollupFromChildren.getKeys() .asMap() .entrySet()) { String gaugeName = entry.getKey(); Collection<String> childAgentRollupIds = entry.getValue(); futures.add(rollupOneFromChildren(rollupLevel, agentRollupId, gaugeName, childAgentRollupIds, captureTime, adjustedTTL)); } // wait for above async work to ensure rollup complete before proceeding MoreFutures.waitForAll(futures); int needsRollupAdjustedTTL = Common.getNeedsRollupAdjustedTTL(adjustedTTL, rollupConfigs); if (parentAgentRollupId != null) { // insert needs to happen first before call to postRollup(), see method-level // comment on postRollup Common.insertNeedsRollupFromChild(agentRollupId, parentAgentRollupId, insertNeedsRollupFromChild, needsRollupFromChildren, captureTime, needsRollupAdjustedTTL, session); } Common.postRollup(agentRollupId, needsRollupFromChildren.getCaptureTime(), needsRollupFromChildren.getKeys().keySet(), needsRollupFromChildren.getUniquenessKeysForDeletion(), nextRollupIntervalMillis, insertNeedsRollup.get(rollupLevel), deleteNeedsRollupFromChild, needsRollupAdjustedTTL, session); } } private void rollup(String agentRollupId, @Nullable String parentAgentRollupId, int rollupLevel, int ttl) throws Exception { List<RollupConfig> rollupConfigs = configRepository.getRollupConfigs(); long rollupIntervalMillis = rollupConfigs.get(rollupLevel - 1).intervalMillis(); Collection<NeedsRollup> needsRollupList = Common.getNeedsRollupList(agentRollupId, rollupLevel, rollupIntervalMillis, readNeedsRollup, session, clock); Long nextRollupIntervalMillis = null; if (rollupLevel < rollupConfigs.size()) { nextRollupIntervalMillis = rollupConfigs.get(rollupLevel).intervalMillis(); } for (NeedsRollup needsRollup : needsRollupList) { long captureTime = needsRollup.getCaptureTime(); long from = captureTime - rollupIntervalMillis; int adjustedTTL = Common.getAdjustedTTL(ttl, captureTime, clock); Set<String> gaugeNames = needsRollup.getKeys(); List<ListenableFuture<?>> futures = new ArrayList<>(); for (String gaugeName : gaugeNames) { futures.add(rollupOne(rollupLevel, agentRollupId, gaugeName, from, captureTime, adjustedTTL)); } if (futures.isEmpty()) { // no rollups occurred, warning already logged inside rollupOne() above // this can happen there is an old "needs rollup" record that was created prior to // TTL was introduced in 0.9.6, and when the "last needs rollup" record wasn't // processed (also prior to 0.9.6), and when the corresponding old data has expired Common.postRollup(agentRollupId, needsRollup.getCaptureTime(), gaugeNames, needsRollup.getUniquenessKeysForDeletion(), null, null, deleteNeedsRollup.get(rollupLevel - 1), -1, session); continue; } // wait for above async work to ensure rollup complete before proceeding MoreFutures.waitForAll(futures); int needsRollupAdjustedTTL = Common.getNeedsRollupAdjustedTTL(adjustedTTL, rollupConfigs); if (rollupLevel == 1 && parentAgentRollupId != null) { // insert needs to happen first before call to postRollup(), see method-level // comment on postRollup BoundStatement boundStatement = insertNeedsRollupFromChild.bind(); int i = 0; boundStatement.setString(i++, parentAgentRollupId); boundStatement.setTimestamp(i++, new Date(captureTime)); boundStatement.setUUID(i++, UUIDs.timeBased()); boundStatement.setString(i++, agentRollupId); boundStatement.setSet(i++, gaugeNames); boundStatement.setInt(i++, needsRollupAdjustedTTL); session.write(boundStatement); } PreparedStatement insertNeedsRollup = nextRollupIntervalMillis == null ? null : this.insertNeedsRollup.get(rollupLevel); PreparedStatement deleteNeedsRollup = this.deleteNeedsRollup.get(rollupLevel - 1); Common.postRollup(agentRollupId, needsRollup.getCaptureTime(), gaugeNames, needsRollup.getUniquenessKeysForDeletion(), nextRollupIntervalMillis, insertNeedsRollup, deleteNeedsRollup, needsRollupAdjustedTTL, session); } } private ListenableFuture<?> rollupOneFromChildren(int rollupLevel, String agentRollupId, String gaugeName, Collection<String> childAgentRollupIds, long captureTime, int adjustedTTL) throws Exception { List<ListenableFuture<ResultSet>> futures = new ArrayList<>(); for (String childAgentRollupId : childAgentRollupIds) { BoundStatement boundStatement = readValueForRollupFromChildPS.bind(); int i = 0; boundStatement.setString(i++, childAgentRollupId); boundStatement.setString(i++, gaugeName); boundStatement.setTimestamp(i++, new Date(captureTime)); futures.add(session.readAsyncWarnIfNoRows(boundStatement, "no gauge value table" + " records found for agentRollupId={}, gaugeName={}, captureTime={}, level={}", childAgentRollupId, gaugeName, captureTime, rollupLevel)); } return MoreFutures.rollupAsync(futures, asyncExecutor, new DoRollup() { @Override public ListenableFuture<?> execute(Iterable<Row> rows) throws Exception { return rollupOneFromRows(rollupLevel, agentRollupId, gaugeName, captureTime, adjustedTTL, rows); } }); } // from is non-inclusive private ListenableFuture<?> rollupOne(int rollupLevel, String agentRollupId, String gaugeName, long from, long to, int adjustedTTL) throws Exception { BoundStatement boundStatement = readValueForRollupPS.get(rollupLevel - 1).bind(); int i = 0; boundStatement.setString(i++, agentRollupId); boundStatement.setString(i++, gaugeName); boundStatement.setTimestamp(i++, new Date(from)); boundStatement.setTimestamp(i++, new Date(to)); ListenableFuture<ResultSet> future = session.readAsyncWarnIfNoRows(boundStatement, "no gauge value table records found for agentRollupId={}, gaugeName={}, from={}," + " to={}, level={}", agentRollupId, gaugeName, from, to, rollupLevel); return MoreFutures.rollupAsync(future, asyncExecutor, new DoRollup() { @Override public ListenableFuture<?> execute(Iterable<Row> rows) throws Exception { return rollupOneFromRows(rollupLevel, agentRollupId, gaugeName, to, adjustedTTL, rows); } }); } private ListenableFuture<?> rollupOneFromRows(int rollupLevel, String agentRollupId, String gaugeName, long to, int adjustedTTL, Iterable<Row> rows) throws Exception { double totalWeightedValue = 0; long totalWeight = 0; for (Row row : rows) { double value = row.getDouble(0); long weight = row.getLong(1); totalWeightedValue += value * weight; totalWeight += weight; } BoundStatement boundStatement = insertValuePS.get(rollupLevel).bind(); int i = 0; boundStatement.setString(i++, agentRollupId); boundStatement.setString(i++, gaugeName); boundStatement.setTimestamp(i++, new Date(to)); // individual gauge value weights cannot be zero, and rows is non-empty // (see callers of this method), so totalWeight is guaranteed non-zero checkState(totalWeight != 0); boundStatement.setDouble(i++, totalWeightedValue / totalWeight); boundStatement.setLong(i++, totalWeight); boundStatement.setInt(i++, adjustedTTL); return session.writeAsync(boundStatement); } private List<Integer> getTTLs() throws Exception { List<Integer> rollupExpirationHours = Lists .newArrayList(configRepository.getCentralStorageConfig().rollupExpirationHours()); rollupExpirationHours.add(0, rollupExpirationHours.get(0)); List<Integer> ttls = new ArrayList<>(); for (long expirationHours : rollupExpirationHours) { ttls.add(Ints.saturatedCast(HOURS.toSeconds(expirationHours))); } return ttls; } @Override @OnlyUsedByTests public void truncateAll() throws Exception { for (int i = 0; i <= configRepository.getRollupConfigs().size(); i++) { session.updateSchemaWithRetry("truncate gauge_value_rollup_" + i); } for (int i = 1; i <= configRepository.getRollupConfigs().size(); i++) { session.updateSchemaWithRetry("truncate gauge_needs_rollup_" + i); } session.updateSchemaWithRetry("truncate gauge_name"); session.updateSchemaWithRetry("truncate gauge_needs_rollup_from_child"); } @Value.Immutable @Serial.Structural @Styles.AllParameters interface NeedsRollupKey extends Serializable { String agentRollupId(); long captureTime(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.accumulo.core.util; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import org.apache.accumulo.core.client.AccumuloException; import org.apache.accumulo.core.conf.AccumuloConfiguration; import org.apache.accumulo.core.conf.ConfigurationCopy; import org.apache.accumulo.core.conf.Property; import org.apache.accumulo.core.data.ArrayByteSequence; import org.apache.accumulo.core.data.ByteSequence; import org.apache.accumulo.core.data.Column; import org.apache.accumulo.core.data.ColumnUpdate; import org.apache.accumulo.core.data.Mutation; import org.apache.accumulo.core.data.Range; import org.apache.accumulo.core.data.TableId; import org.apache.accumulo.core.dataImpl.thrift.TMutation; import org.apache.accumulo.core.file.FileSKVIterator; import org.apache.accumulo.core.file.rfile.RFile.Reader; import org.apache.commons.lang3.mutable.MutableLong; import org.apache.hadoop.io.Text; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableSet; public class LocalityGroupUtil { private static final Logger log = LoggerFactory.getLogger(LocalityGroupUtil.class); // using an ImmutableSet here for more efficient comparisons in LocalityGroupIterator public static final Set<ByteSequence> EMPTY_CF_SET = Set.of(); /** * Create a set of families to be passed into the SortedKeyValueIterator seek call from a supplied * set of columns. We are using the ImmutableSet to enable faster comparisons down in the * LocalityGroupIterator. * * @param columns * The set of columns * @return An immutable set of columns */ public static Set<ByteSequence> families(Collection<Column> columns) { if (columns.isEmpty()) { return EMPTY_CF_SET; } var builder = ImmutableSet.<ByteSequence>builder(); columns.forEach(c -> builder.add(new ArrayByteSequence(c.getColumnFamily()))); return builder.build(); } @SuppressWarnings("serial") public static class LocalityGroupConfigurationError extends AccumuloException { LocalityGroupConfigurationError(String why) { super(why); } } public static boolean isLocalityGroupProperty(String prop) { return prop.startsWith(Property.TABLE_LOCALITY_GROUP_PREFIX.getKey()) || prop.equals(Property.TABLE_LOCALITY_GROUPS.getKey()); } public static void checkLocalityGroups(Iterable<Entry<String,String>> config) throws LocalityGroupConfigurationError { ConfigurationCopy cc = new ConfigurationCopy(config); if (cc.get(Property.TABLE_LOCALITY_GROUPS) != null) { getLocalityGroups(cc); } } public static Map<String,Set<ByteSequence>> getLocalityGroupsIgnoringErrors(AccumuloConfiguration acuconf, TableId tableId) { try { return getLocalityGroups(acuconf); } catch (LocalityGroupConfigurationError | RuntimeException e) { log.warn("Failed to get locality group config for tableId:" + tableId + ", proceeding without locality groups.", e); } return Collections.emptyMap(); } public static Map<String,Set<ByteSequence>> getLocalityGroups(AccumuloConfiguration acuconf) throws LocalityGroupConfigurationError { Map<String,Set<ByteSequence>> result = new HashMap<>(); String[] groups = acuconf.get(Property.TABLE_LOCALITY_GROUPS).split(","); for (String group : groups) { if (!group.isEmpty()) { result.put(group, new HashSet<>()); } } HashSet<ByteSequence> all = new HashSet<>(); for (Entry<String,String> entry : acuconf) { String property = entry.getKey(); String value = entry.getValue(); String prefix = Property.TABLE_LOCALITY_GROUP_PREFIX.getKey(); if (property.startsWith(prefix)) { // this property configures a locality group, find out which one: String group = property.substring(prefix.length()); String[] parts = group.split("\\."); group = parts[0]; if (result.containsKey(group)) { if (parts.length == 1) { Set<ByteSequence> colFamsSet = decodeColumnFamilies(value); if (!Collections.disjoint(all, colFamsSet)) { colFamsSet.retainAll(all); throw new LocalityGroupConfigurationError("Column families " + colFamsSet + " in group " + group + " is already used by another locality group"); } all.addAll(colFamsSet); result.put(group, colFamsSet); } } } } Set<Entry<String,Set<ByteSequence>>> es = result.entrySet(); for (Entry<String,Set<ByteSequence>> entry : es) { if (entry.getValue().isEmpty()) { throw new LocalityGroupConfigurationError( "Locality group " + entry.getKey() + " specified but not declared"); } } // result.put("", all); return result; } public static Set<ByteSequence> decodeColumnFamilies(String colFams) throws LocalityGroupConfigurationError { HashSet<ByteSequence> colFamsSet = new HashSet<>(); for (String family : colFams.split(",")) { ByteSequence cfbs = decodeColumnFamily(family); colFamsSet.add(cfbs); } return colFamsSet; } public static ByteSequence decodeColumnFamily(String colFam) throws LocalityGroupConfigurationError { byte[] output = new byte[colFam.length()]; int pos = 0; for (int i = 0; i < colFam.length(); i++) { char c = colFam.charAt(i); if (c == '\\') { // next char must be 'x' or '\' i++; if (i >= colFam.length()) { throw new LocalityGroupConfigurationError("Expected 'x' or '\' after '\' in " + colFam); } char nc = colFam.charAt(i); switch (nc) { case '\\': output[pos++] = '\\'; break; case 'x': // next two chars must be [0-9][0-9] i++; output[pos++] = (byte) (0xff & Integer.parseInt(colFam.substring(i, i + 2), 16)); i++; break; default: throw new LocalityGroupConfigurationError( "Expected 'x' or '\' after '\' in " + colFam); } } else { output[pos++] = (byte) (0xff & c); } } return new ArrayByteSequence(output, 0, pos); } public static String encodeColumnFamilies(Set<Text> colFams) { SortedSet<String> ecfs = new TreeSet<>(); StringBuilder sb = new StringBuilder(); for (Text text : colFams) { String ecf = encodeColumnFamily(sb, text.getBytes(), text.getLength()); ecfs.add(ecf); } return Joiner.on(",").join(ecfs); } public static String encodeColumnFamily(ByteSequence bs) { if (bs.offset() != 0) { throw new IllegalArgumentException("The offset cannot be non-zero."); } return encodeColumnFamily(new StringBuilder(), bs.getBackingArray(), bs.length()); } private static String encodeColumnFamily(StringBuilder sb, byte[] ba, int len) { sb.setLength(0); for (int i = 0; i < len; i++) { int c = 0xff & ba[i]; if (c == '\\') { sb.append("\\\\"); } else if (c >= 32 && c <= 126 && c != ',') { sb.append((char) c); } else { sb.append("\\x").append(String.format("%02X", c)); } } return sb.toString(); } public static class PartitionedMutation extends Mutation { private byte[] row; private List<ColumnUpdate> updates; public PartitionedMutation(byte[] row, List<ColumnUpdate> updates) { this.row = row; this.updates = updates; } @Override public byte[] getRow() { return row; } @Override public List<ColumnUpdate> getUpdates() { return updates; } @Override public TMutation toThrift() { throw new UnsupportedOperationException(); } @Override public int hashCode() { throw new UnsupportedOperationException(); } @Override public boolean equals(Object o) { throw new UnsupportedOperationException(); } @Override public boolean equals(Mutation m) { throw new UnsupportedOperationException(); } } public static class Partitioner { private Map<ByteSequence,Integer> colfamToLgidMap; private PreAllocatedArray<Map<ByteSequence,MutableLong>> groups; public Partitioner(PreAllocatedArray<Map<ByteSequence,MutableLong>> groups) { this.groups = groups; this.colfamToLgidMap = new HashMap<>(); for (int i = 0; i < groups.length; i++) { for (ByteSequence cf : groups.get(i).keySet()) { colfamToLgidMap.put(cf, i); } } } public void partition(List<Mutation> mutations, PreAllocatedArray<List<Mutation>> partitionedMutations) { MutableByteSequence mbs = new MutableByteSequence(new byte[0], 0, 0); PreAllocatedArray<List<ColumnUpdate>> parts = new PreAllocatedArray<>(groups.length + 1); for (Mutation mutation : mutations) { if (mutation.getUpdates().size() == 1) { int lgid = getLgid(mbs, mutation.getUpdates().get(0)); partitionedMutations.get(lgid).add(mutation); } else { for (int i = 0; i < parts.length; i++) { parts.set(i, null); } int lgcount = 0; for (ColumnUpdate cu : mutation.getUpdates()) { int lgid = getLgid(mbs, cu); if (parts.get(lgid) == null) { parts.set(lgid, new ArrayList<>()); lgcount++; } parts.get(lgid).add(cu); } if (lgcount == 1) { for (int i = 0; i < parts.length; i++) { if (parts.get(i) != null) { partitionedMutations.get(i).add(mutation); break; } } } else { for (int i = 0; i < parts.length; i++) { if (parts.get(i) != null) { partitionedMutations.get(i) .add(new PartitionedMutation(mutation.getRow(), parts.get(i))); } } } } } } private Integer getLgid(MutableByteSequence mbs, ColumnUpdate cu) { mbs.setArray(cu.getColumnFamily(), 0, cu.getColumnFamily().length); Integer lgid = colfamToLgidMap.get(mbs); if (lgid == null) { lgid = groups.length; } return lgid; } } /** * This method created to help seek an rfile for a locality group obtained from * {@link Reader#getLocalityGroupCF()}. This method can possibly return an empty list for the * default locality group. When this happens the default locality group needs to be seeked * differently. This method helps do that. * * <p> * For the default locality group will seek using the families of all other locality groups * non-inclusive. * * @see Reader#getLocalityGroupCF() */ public static void seek(FileSKVIterator reader, Range range, String lgName, Map<String,ArrayList<ByteSequence>> localityGroupCF) throws IOException { Collection<ByteSequence> families; boolean inclusive; if (lgName == null) { // this is the default locality group, create a set of all families not in the default group Set<ByteSequence> nonDefaultFamilies = new HashSet<>(); for (Entry<String,ArrayList<ByteSequence>> entry : localityGroupCF.entrySet()) { if (entry.getKey() != null) { nonDefaultFamilies.addAll(entry.getValue()); } } families = nonDefaultFamilies; inclusive = false; } else { families = localityGroupCF.get(lgName); inclusive = true; } reader.seek(range, families, inclusive); } public static void ensureNonOverlappingGroups(Map<String,Set<Text>> groups) { HashSet<Text> all = new HashSet<>(); for (Entry<String,Set<Text>> entry : groups.entrySet()) { if (!Collections.disjoint(all, entry.getValue())) { throw new IllegalArgumentException( "Group " + entry.getKey() + " overlaps with another group"); } if (entry.getValue().isEmpty()) { throw new IllegalArgumentException("Group " + entry.getKey() + " is empty"); } all.addAll(entry.getValue()); } } }
package play.libs; import com.ning.http.client.AsyncHttpClient; import com.ning.http.client.AsyncCompletionHandler; import com.ning.http.client.FluentCaseInsensitiveStringsMap; import com.ning.http.client.PerRequestConfig; import com.ning.http.client.RequestBuilderBase; import com.ning.http.client.Realm.AuthScheme; import com.ning.http.client.Realm.RealmBuilder; import com.ning.http.client.FluentStringsMap; import java.io.IOException; import java.io.InputStream; import java.io.File; import java.net.MalformedURLException; import java.net.URI; import java.util.Collection; import java.util.Map; import java.util.HashMap; import java.util.List; import java.util.ArrayList; import com.ning.http.util.AsyncHttpProviderUtils; import org.jboss.netty.handler.codec.http.HttpHeaders; import org.w3c.dom.Document; import play.libs.F.Promise; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; /** * Asynchronous API to to query web services, as an http client. * * The value returned is a {@code Promise<Response>}, and you should use Play's asynchronous mechanisms to use this response. */ public class WS { private static AsyncHttpClient client() { return play.api.libs.ws.WS.client(); } /** * Prepare a new request. You can then construct it by chaining calls. * * @param url the URL to request */ public static WSRequestHolder url(String url) { return new WSRequestHolder(url); } /** * Provides the bridge between Play and the underlying ning request */ public static class WSRequest extends RequestBuilderBase<WSRequest> { private FluentCaseInsensitiveStringsMap headers = new FluentCaseInsensitiveStringsMap(); private String method; private String url; public WSRequest(String method) { super(WSRequest.class, method, false); this.method = method; } private WSRequest auth(String username, String password, AuthScheme scheme) { this.setRealm((new RealmBuilder()) .setScheme(scheme) .setPrincipal(username) .setPassword(password) .setUsePreemptiveAuth(true) .build()); return this; } /** * Set an HTTP header. */ @Override public WSRequest setHeader(String name, String value) { headers.replace(name, value); return super.setHeader(name, value); } /** * Add an HTTP header (used for headers with mutiple values). */ @Override public WSRequest addHeader(String name, String value) { if (value == null) { value = ""; } headers.add(name, value); return super.addHeader(name, value); } /** * Defines the request headers. */ @Override public WSRequest setHeaders(FluentCaseInsensitiveStringsMap hdrs) { headers = (headers == null ? new FluentCaseInsensitiveStringsMap() : headers); return super.setHeaders(hdrs); } /** * Defines the request headers. */ @Override public WSRequest setHeaders(Map<String, Collection<String>> hdrs) { headers = (headers == null ? new FluentCaseInsensitiveStringsMap() : new FluentCaseInsensitiveStringsMap(headers)); return super.setHeaders(hdrs); } /** * Return the headers of the request being constructed */ public Map<String, List<String>> getAllHeaders() { return headers; } public List<String> getHeader(String name) { List<String> hdrs = headers.get(name); if (hdrs == null) return new ArrayList<String>(); return hdrs; } public String getMethod() { return this.method; } @Override public WSRequest setUrl(String url) { this.url = url; return super.setUrl(url); } public String getUrl() { return this.url; } public Promise<Response> execute() { final scala.concurrent.Promise<Response> scalaPromise = scala.concurrent.Promise$.MODULE$.<Response>apply(); try { WS.client().executeRequest(request, new AsyncCompletionHandler<com.ning.http.client.Response>() { @Override public com.ning.http.client.Response onCompleted(com.ning.http.client.Response response) { final com.ning.http.client.Response ahcResponse = response; scalaPromise.success(new Response(ahcResponse)); return response; } @Override public void onThrowable(Throwable t) { scalaPromise.failure(t); } }); } catch (IOException exception) { scalaPromise.failure(exception); } return new Promise<Response>(scalaPromise.future()); } } /** * provides the User facing API for building WS request. */ public static class WSRequestHolder { private final String url; private Map<String, Collection<String>> headers = new HashMap<String, Collection<String>>(); private Map<String, Collection<String>> queryParameters = new HashMap<String, Collection<String>>(); private String username = null; private String password = null; private AuthScheme scheme = null; private SignatureCalculator calculator = null; private int timeout = 0; private Boolean followRedirects = null; public WSRequestHolder(String url) { this.url = url; } /** * Sets a header with the given name, this can be called repeatedly. * * @param name * @param value */ public WSRequestHolder setHeader(String name, String value) { if (headers.containsKey(name)) { Collection<String> values = headers.get(name); values.add(value); } else { List<String> values = new ArrayList<String>(); values.add(value); headers.put(name, values); } return this; } /** * Sets a query parameter with the given name,this can be called repeatedly. * * @param name * @param value */ public WSRequestHolder setQueryParameter(String name, String value) { if (queryParameters.containsKey(name)) { Collection<String> values = queryParameters.get(name); values.add(value); } else { List<String> values = new ArrayList<String>(); values.add(value); queryParameters.put(name, values); } return this; } /** * Sets the authentication header for the current request using BASIC authentication. * * @param username * @param password */ public WSRequestHolder setAuth(String username, String password) { this.username = username; this.password = password; this.scheme = AuthScheme.BASIC; return this; } /** * Sets the authentication header for the current request. * * @param username * @param password * @param scheme authentication scheme */ public WSRequestHolder setAuth(String username, String password, AuthScheme scheme) { this.username = username; this.password = password; this.scheme = scheme; return this; } public WSRequestHolder sign(SignatureCalculator calculator) { this.calculator = calculator; return this; } /** * Sets whether redirects (301, 302) should be followed automatically. * * @param followRedirects */ public WSRequestHolder setFollowRedirects(Boolean followRedirects) { this.followRedirects = followRedirects; return this; } /** * Sets the request timeout in milliseconds. * * @param timeout */ public WSRequestHolder setTimeout(int timeout) { this.timeout = timeout; return this; } /** * Set the content type. If the request body is a String, and no charset parameter is included, then it will * default to UTF-8. * * @param contentType The content type */ public WSRequestHolder setContentType(String contentType) { return setHeader(HttpHeaders.Names.CONTENT_TYPE, contentType); } /** * @return the URL of the request. */ public String getUrl() { return this.url; } /** * @return the headers (a copy to prevent side-effects). */ public Map<String, Collection<String>> getHeaders() { return new HashMap<String, Collection<String>>(this.headers); } /** * @return the query parameters (a copy to prevent side-effects). */ public Map<String, Collection<String>> getQueryParameters() { return new HashMap<String, Collection<String>>(this.queryParameters); } /** * @return the auth username, null if not an authenticated request. */ public String getUsername() { return this.username; } /** * @return the auth password, null if not an authenticated request */ public String getPassword() { return this.password; } /** * @return the auth scheme, null if not an authenticated request */ public AuthScheme getScheme() { return this.scheme; } /** * @return the signature calculator (exemple: OAuth), null if none is set. */ public SignatureCalculator getCalculator() { return this.calculator; } /** * @return the auth scheme (null if not an authenticated request) */ public int getTimeout() { return this.timeout; } /** * @return true if the request is configure to follow redirect, false if it is configure not to, null if nothing is configured and the global client preference should be used instead. */ public Boolean getFollowRedirects() { return this.followRedirects; } /** * Perform a GET on the request asynchronously. */ public Promise<Response> get() { return execute("GET"); } /** * Perform a POST on the request asynchronously. * * @param body represented as String */ public Promise<Response> post(String body) { return executeString("POST", body); } /** * Perform a PUT on the request asynchronously. * * @param body represented as String */ public Promise<Response> put(String body) { return executeString("PUT", body); } /** * Perform a POST on the request asynchronously. * * @param body represented as JSON */ public Promise<Response> post(JsonNode body) { return executeJson("POST", body); } /** * Perform a PUT on the request asynchronously. * * @param body represented as JSON */ public Promise<Response> put(JsonNode body) { return executeJson("PUT", body); } /** * Perform a POST on the request asynchronously. * * @param body represented as an InputStream */ public Promise<Response> post(InputStream body) { return executeIS("POST", body); } /** * Perform a PUT on the request asynchronously. * * @param body represented as an InputStream */ public Promise<Response> put(InputStream body) { return executeIS("PUT", body); } /** * Perform a POST on the request asynchronously. * * @param body represented as a File */ public Promise<Response> post(File body) { return executeFile("POST", body); } /** * Perform a PUT on the request asynchronously. * * @param body represented as a File */ public Promise<Response> put(File body) { return executeFile("PUT", body); } /** * Perform a DELETE on the request asynchronously. */ public Promise<Response> delete() { return execute("DELETE"); } /** * Perform a HEAD on the request asynchronously. */ public Promise<Response> head() { return execute("HEAD"); } /** * Perform an OPTIONS on the request asynchronously. */ public Promise<Response> options() { return execute("OPTIONS"); } /** * Execute an arbitrary method on the request asynchronously. * * @param method The method to execute */ public Promise<Response> execute(String method) { WSRequest req = new WSRequest(method).setUrl(url) .setHeaders(headers) .setQueryParameters(new FluentStringsMap(queryParameters)); return execute(req); } private Promise<Response> executeString(String method, String body) { FluentCaseInsensitiveStringsMap headers = new FluentCaseInsensitiveStringsMap(this.headers); // Detect and maybe add charset String contentType = headers.getFirstValue(HttpHeaders.Names.CONTENT_TYPE); if (contentType == null) { contentType = "text/plain"; } String charset = AsyncHttpProviderUtils.parseCharset(contentType); if (charset == null) { charset = "utf-8"; headers.replace(HttpHeaders.Names.CONTENT_TYPE, contentType + "; charset=utf-8"); } WSRequest req = new WSRequest(method).setBody(body) .setUrl(url) .setHeaders(headers) .setQueryParameters(new FluentStringsMap(queryParameters)) .setBodyEncoding(charset); return execute(req); } private Promise<Response> executeJson(String method, JsonNode body) { WSRequest req = new WSRequest(method).setBody(Json.stringify(body)) .setUrl(url) .setHeaders(headers) .setHeader(HttpHeaders.Names.CONTENT_TYPE, "application/json; charset=utf-8") .setQueryParameters(new FluentStringsMap(queryParameters)) .setBodyEncoding("utf-8"); return execute(req); } private Promise<Response> executeIS(String method, InputStream body) { WSRequest req = new WSRequest(method).setBody(body) .setUrl(url) .setHeaders(headers) .setQueryParameters(new FluentStringsMap(queryParameters)); return execute(req); } private Promise<Response> executeFile(String method, File body) { WSRequest req = new WSRequest(method).setBody(body) .setUrl(url) .setHeaders(headers) .setQueryParameters(new FluentStringsMap(queryParameters)); return execute(req); } private Promise<Response> execute(WSRequest req) { if (this.timeout > 0) { PerRequestConfig config = new PerRequestConfig(); config.setRequestTimeoutInMs(this.timeout); req.setPerRequestConfig(config); } if (this.followRedirects != null) { req.setFollowRedirects(this.followRedirects); } if (this.username != null && this.password != null && this.scheme != null) req.auth(this.username, this.password, this.scheme); if (this.calculator != null) this.calculator.sign(req); return req.execute(); } } /** * A WS Cookie. */ public static interface Cookie { /** * Returns the underlying "native" object for the cookie. */ public Object getUnderlying(); public String getDomain(); public String getName(); public String getValue(); public String getPath(); public Integer getMaxAge(); public Boolean isSecure(); public Integer getVersion(); // Cookie ports should not be used; cookies for a given host are shared across // all the ports on that host. } /** * The Ning implementation of a WS cookie. */ private static class NingCookie implements Cookie { private final com.ning.http.client.Cookie ahcCookie; public NingCookie(com.ning.http.client.Cookie ahcCookie) { this.ahcCookie = ahcCookie; } /** * Returns the underlying "native" object for the cookie. */ public Object getUnderlying() { return ahcCookie; } public String getDomain() { return ahcCookie.getDomain(); } public String getName() { return ahcCookie.getName(); } public String getValue() { return ahcCookie.getValue(); } public String getPath() { return ahcCookie.getPath(); } public Integer getMaxAge() { return ahcCookie.getMaxAge(); } public Boolean isSecure() { return ahcCookie.isSecure(); } public Integer getVersion() { return ahcCookie.getVersion(); } } /** * A WS response. */ public static class Response { private com.ning.http.client.Response ahcResponse; public Response(com.ning.http.client.Response ahcResponse) { this.ahcResponse = ahcResponse; } /** * Get the HTTP status code of the response */ public int getStatus() { return ahcResponse.getStatusCode(); } /** * Get the HTTP status text of the response */ public String getStatusText() { return ahcResponse.getStatusText(); } /** * Get the given HTTP header of the response */ public String getHeader(String key) { return ahcResponse.getHeader(key); } /** * Get all the cookies. */ public List<Cookie> getCookies() { List<Cookie> cookieList = new ArrayList<Cookie>(); for (com.ning.http.client.Cookie ahcCookie : ahcResponse.getCookies()) { cookieList.add(new NingCookie(ahcCookie)); } return cookieList; } /** * Get only one cookie, using the cookie name. */ public Cookie getCookie(String name) { for (com.ning.http.client.Cookie ahcCookie : ahcResponse.getCookies()) { // safe -- cookie.getName() will never return null if (ahcCookie.getName().equals(name)) { return new NingCookie(ahcCookie); } } return null; } /** * Get the response body as a string. If the charset is not specified, this defaults to ISO-8859-1 for text * sub mime types, as per RFC-2616 sec 3.7.1, otherwise it defaults to UTF-8. */ public String getBody() { try { // RFC-2616#3.7.1 states that any text/* mime type should default to ISO-8859-1 charset if not // explicitly set, while Plays default encoding is UTF-8. So, use UTF-8 if charset is not explicitly // set and content type is not text/*, otherwise default to ISO-8859-1 String contentType = ahcResponse.getContentType(); if (contentType == null) { // As defined by RFC-2616#7.2.1 contentType = "application/octet-stream"; } String charset = AsyncHttpProviderUtils.parseCharset(contentType); if (charset != null) { return ahcResponse.getResponseBody(charset); } else if (contentType.startsWith("text/")) { return ahcResponse.getResponseBody(AsyncHttpProviderUtils.DEFAULT_CHARSET); } else { return ahcResponse.getResponseBody("utf-8"); } } catch (IOException e) { throw new RuntimeException(e); } } /** * Get the response body as a {@link Document DOM document} * @return a DOM document */ public Document asXml() { try { return play.libs.XML.fromInputStream(ahcResponse.getResponseBodyAsStream(), "utf-8"); } catch (IOException e) { throw new RuntimeException(e); } } /** * Get the response body as a {@link com.fasterxml.jackson.databind.JsonNode} * @return the json response */ public JsonNode asJson() { try { // Jackson will automatically detect the correct encoding according to the rules in RFC-4627 return Json.parse(ahcResponse.getResponseBodyAsStream()); } catch (IOException e) { throw new RuntimeException(e); } } /** * Get the response body as a stream * @return The stream to read the response body from */ public InputStream getBodyAsStream() { try { return ahcResponse.getResponseBodyAsStream(); } catch (IOException e) { throw new RuntimeException(e); } } /** * Get the response body as a byte array * @return The byte array */ public byte[] asByteArray() { try { return ahcResponse.getResponseBodyAsBytes(); } catch (IOException e) { throw new RuntimeException(e); } } /** * Return the request {@link java.net.URI}. Note that if the request got redirected, the value of the * {@link java.net.URI} will be the last valid redirect url. * * @return the request {@link java.net.URI}. */ public URI getUri() { try { return ahcResponse.getUri(); } catch (MalformedURLException e) { throw new RuntimeException(e); } } } /** * Sign a WS call. */ public static interface SignatureCalculator { /** * Sign a request */ public void sign(WSRequest request); } }
/* * Copyright 2009-2017 Aconex * * Licensed under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.pcp.parfait.dropwizard.metricadapters; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.when; import java.util.Map; import com.codahale.metrics.Sampling; import com.codahale.metrics.Snapshot; import io.pcp.parfait.Monitorable; import io.pcp.parfait.ValueSemantics; import com.google.common.collect.Maps; import org.hamcrest.Matchers; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; @RunWith(MockitoJUnitRunner.class) @SuppressWarnings("unchecked") public class SamplingAdapterTest { private static final String NAME = "NAME"; private static final String DESCRIPTION = "DESCRIPTION"; private static final String MIN = "min"; private static final String MAX = "max"; private static final String MEAN = "mean"; private static final String MEDIAN = "median"; private static final String STDDEV = "stddev"; private static final String SEVENTY_FIFTH = "seventyfifth"; private static final String NINETY_FIFTH = "ninetyfifth"; private static final String NINETY_EIGHTH = "ninetyeighth"; private static final String NINETY_NINETH = "ninetynineth"; private static final String THREE_NINES = "threenines"; private static final long INITIAL_MAX = 11l; private static final long INITIAL_MIN = 22l; private static final Double INITIAL_MEDIAN = 12.34; private static final Double INITIAL_STDDEV = 56.78; private static final Double INITIAL_MEAN = 90.12; private static final Double INITIAL_75th = 75.75; private static final Double INITIAL_95th = 95.95; private static final Double INITIAL_98th = 98.98; private static final Double INITIAL_99th = 99.99; private static final Double INITIAL_THREE_NINES = 999.999; private SamplingAdapter adapter; @Mock private Sampling samplingMetric; @Mock private Snapshot snapshot; @Before public void setUp() { when(samplingMetric.getSnapshot()).thenReturn(snapshot); when(snapshot.getMax()).thenReturn(INITIAL_MAX); when(snapshot.getMin()).thenReturn(INITIAL_MIN); when(snapshot.getMedian()).thenReturn(INITIAL_MEDIAN); when(snapshot.getStdDev()).thenReturn(INITIAL_STDDEV); when(snapshot.getMean()).thenReturn(INITIAL_MEAN); when(snapshot.get75thPercentile()).thenReturn(INITIAL_75th); when(snapshot.get95thPercentile()).thenReturn(INITIAL_95th); when(snapshot.get98thPercentile()).thenReturn(INITIAL_98th); when(snapshot.get99thPercentile()).thenReturn(INITIAL_99th); when(snapshot.get999thPercentile()).thenReturn(INITIAL_THREE_NINES); adapter = new SamplingAdapter(samplingMetric, NAME, DESCRIPTION); } private Map<String, Monitorable> extractMonitorables(SamplingAdapter adapter) { Map<String, Monitorable> monitorables = Maps.newHashMap(); for (Monitorable monitorable : adapter.getMonitorables()) { final String name = monitorable.getName(); monitorables.put(name.substring(name.lastIndexOf('.') + 1), monitorable); } return monitorables; } @Test public void shouldPublishMinMetric() { final Monitorable<Long> minMonitorable = extractMonitorables(adapter).get(MIN); assertThat(minMonitorable, notNullValue()); assertThat(minMonitorable.getDescription(), is(DESCRIPTION + " - Minimum")); assertThat(minMonitorable.getSemantics(), is(ValueSemantics.FREE_RUNNING)); assertThat(minMonitorable.get(), is(INITIAL_MIN)); } @Test public void shouldPublishMaxMetric() { final Monitorable<Long> maxMonitorable = extractMonitorables(adapter).get(MAX); assertThat(maxMonitorable, notNullValue()); assertThat(maxMonitorable.getDescription(), is(DESCRIPTION + " - Maximum")); assertThat(maxMonitorable.getSemantics(), is(ValueSemantics.MONOTONICALLY_INCREASING)); assertThat(maxMonitorable.get(), is(INITIAL_MAX)); } @Test public void shouldPublishMeanMetric() { final Monitorable<Double> meanMonitorable = extractMonitorables(adapter).get(MEAN); assertThat(meanMonitorable, notNullValue()); assertThat(meanMonitorable.getDescription(), is(DESCRIPTION + " - Mean")); assertThat(meanMonitorable.getSemantics(), is(ValueSemantics.FREE_RUNNING)); assertThat(meanMonitorable.get(), is(INITIAL_MEAN)); } @Test public void shouldPublishMedianMetric() { final Monitorable<Double> medianMonitorable = extractMonitorables(adapter).get(MEDIAN); assertThat(medianMonitorable, notNullValue()); assertThat(medianMonitorable.getDescription(), is(DESCRIPTION + " - Median")); assertThat(medianMonitorable.getSemantics(), is(ValueSemantics.FREE_RUNNING)); assertThat(medianMonitorable.get(), is(INITIAL_MEDIAN)); } @Test public void shouldPublishStdDevMetric() { final Monitorable<Double> standardDevMetric = extractMonitorables(adapter).get(STDDEV); assertThat(standardDevMetric, notNullValue()); assertThat(standardDevMetric.getDescription(), is(DESCRIPTION + " - Standard Deviation")); assertThat(standardDevMetric.getSemantics(), is(ValueSemantics.FREE_RUNNING)); assertThat(standardDevMetric.get(), is(INITIAL_STDDEV)); } @Test public void shouldPublish75thPercentileMetric() { final Monitorable<Double> seventyFifthMetric = extractMonitorables(adapter).get(SEVENTY_FIFTH); assertThat(seventyFifthMetric, notNullValue()); assertThat(seventyFifthMetric.getDescription(), is(DESCRIPTION + " - 75th Percentile of recent data")); assertThat(seventyFifthMetric.getSemantics(), is(ValueSemantics.FREE_RUNNING)); assertThat(seventyFifthMetric.get(), is(INITIAL_75th)); } @Test public void shouldPublish95thPercentileMetric() { final Monitorable<Double> ninetyFifthMetric = extractMonitorables(adapter).get(NINETY_FIFTH); assertThat(ninetyFifthMetric, notNullValue()); assertThat(ninetyFifthMetric.getDescription(), is(DESCRIPTION + " - 95th Percentile of recent data")); assertThat(ninetyFifthMetric.getSemantics(), is(ValueSemantics.FREE_RUNNING)); assertThat(ninetyFifthMetric.get(), is(INITIAL_95th)); } @Test public void shouldPublish98thPercentileMetric() { final Monitorable<Double> ninetyEigthMetric = extractMonitorables(adapter).get(NINETY_EIGHTH); assertThat(ninetyEigthMetric, notNullValue()); assertThat(ninetyEigthMetric.getDescription(), is(DESCRIPTION + " - 98th Percentile of recent data")); assertThat(ninetyEigthMetric.getSemantics(), is(ValueSemantics.FREE_RUNNING)); assertThat(ninetyEigthMetric.get(), is(INITIAL_98th)); } @Test public void shouldPublish99thPercentileMetric() { final Monitorable<Double> ninetyNinthMetric = extractMonitorables(adapter).get(NINETY_NINETH); assertThat(ninetyNinthMetric, notNullValue()); assertThat(ninetyNinthMetric.getDescription(), is(DESCRIPTION + " - 99th Percentile of recent data")); assertThat(ninetyNinthMetric.getSemantics(), is(ValueSemantics.FREE_RUNNING)); assertThat(ninetyNinthMetric.get(), is(INITIAL_99th)); } @Test public void shouldPublish999thPercentileMetric() { final Monitorable<Double> threeNinesMetric = extractMonitorables(adapter).get(THREE_NINES); assertThat(threeNinesMetric, notNullValue()); assertThat(threeNinesMetric.getDescription(), is(DESCRIPTION + " - 99.9th Percentile of recent data")); assertThat(threeNinesMetric.getSemantics(), is(ValueSemantics.FREE_RUNNING)); assertThat(threeNinesMetric.get(), is(INITIAL_THREE_NINES)); } @Test public void shouldUpdateMinMetric() { long newMin = INITIAL_MIN - 5; when(snapshot.getMin()).thenReturn(newMin); adapter.updateMonitorables(); assertThat(extractMonitorables(adapter).get(MIN).get(), Matchers.<Object>is(newMin)); } @Test public void shouldUpdateMaxMetric() { long newMax = INITIAL_MAX + 5; when(snapshot.getMax()).thenReturn(newMax); adapter.updateMonitorables(); assertThat(extractMonitorables(adapter).get(MAX).get(), Matchers.<Object>is(newMax)); } @Test public void shouldUpdateMeanMetric() { double newMean = INITIAL_MEAN + 5; when(snapshot.getMean()).thenReturn(newMean); adapter.updateMonitorables(); assertThat(extractMonitorables(adapter).get(MEAN).get(), Matchers.<Object>is(newMean)); } @Test public void shouldUpdateMedianMetric() { double newMedian = INITIAL_MEDIAN + 5; when(snapshot.getMedian()).thenReturn(newMedian); adapter.updateMonitorables(); assertThat(extractMonitorables(adapter).get(MEDIAN).get(), Matchers.<Object>is(newMedian)); } @Test public void shouldUpdateStdDevMetric() { double newStdDev = INITIAL_STDDEV + 5; when(snapshot.getStdDev()).thenReturn(newStdDev); adapter.updateMonitorables(); assertThat(extractMonitorables(adapter).get(STDDEV).get(), Matchers.<Object>is(newStdDev)); } @Test public void shouldUpdate75thMetric() { double new75thMetric = INITIAL_75th + 5; when(snapshot.get75thPercentile()).thenReturn(new75thMetric); adapter.updateMonitorables(); assertThat(extractMonitorables(adapter).get(SEVENTY_FIFTH).get(), Matchers.<Object>is(new75thMetric)); } @Test public void shouldUpdate95thMetric() { double new95thMetric = INITIAL_95th + 5; when(snapshot.get95thPercentile()).thenReturn(new95thMetric); adapter.updateMonitorables(); assertThat(extractMonitorables(adapter).get(NINETY_FIFTH).get(), Matchers.<Object>is(new95thMetric)); } @Test public void shouldUpdate98thMetric() { double new98thMetric = INITIAL_98th + 5; when(snapshot.get98thPercentile()).thenReturn(new98thMetric); adapter.updateMonitorables(); assertThat(extractMonitorables(adapter).get(NINETY_EIGHTH).get(), Matchers.<Object>is(new98thMetric)); } @Test public void shouldUpdate99thMetric() { double new99thMetric = INITIAL_99th + 5; when(snapshot.get99thPercentile()).thenReturn(new99thMetric); adapter.updateMonitorables(); assertThat(extractMonitorables(adapter).get(NINETY_NINETH).get(), Matchers.<Object>is(new99thMetric)); } @Test public void shouldUpdate999thMetric() { double new999thMetric = INITIAL_THREE_NINES + 5; when(snapshot.get999thPercentile()).thenReturn(new999thMetric); adapter.updateMonitorables(); assertThat(extractMonitorables(adapter).get(THREE_NINES).get(), Matchers.<Object>is(new999thMetric)); } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2015 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.pscanrulesAlpha; import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import net.htmlparser.jericho.Source; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.parosproxy.paros.Constant; import org.parosproxy.paros.core.scanner.Alert; import org.parosproxy.paros.network.HttpHeader; import org.parosproxy.paros.network.HttpMessage; import org.parosproxy.paros.network.HttpRequestHeader; import org.zaproxy.addon.commonlib.CommonAlertTag; import org.zaproxy.addon.commonlib.http.HttpDateUtils; import org.zaproxy.zap.extension.pscan.PluginPassiveScanner; /** * Detect "storable" and "cacheable" reponses. "Storable" implies that the response can be stored in * some manner by the caching server, even if it is not served in response to any requests. * "Cacheable" responses are responses that are served by the caching server in response to some * request. Unlike "CacheControlScanner", this rule does not attempt to determine if the various * cache settings are "incorrectly" set (since that depends on the response contents, and on the * context), but instead, looks at the conditions defined in rfc7234 to determine if a given request * and response are storable by rfc7234 compliant cache servers, and subsequently retrievable from * the cache (ie, "cacheable"): * * <p>A cache MUST NOT store a response to any request, unless: o The request method is understood * by the cache and defined as being cacheable, and o the response status code is understood by the * cache, and o the "no-store" cache directive (see Section 5.2) does not appear in request or * response header fields, and o the "private" response directive (see Section 5.2.2.6) does not * appear in the response, if the cache is shared, and o the Authorization header field (see Section * 4.2 of [RFC7235]) does not appear in the request, if the cache is shared, unless the response * explicitly allows it (see Section 3.2), and o the response either: * contains an Expires header * field (see Section 5.3), or * contains a max-age response directive (see Section 5.2.2.8), or * * contains a s-maxage response directive (see Section 5.2.2.9) and the cache is shared, or * * contains a Cache Control Extension (see Section 5.2.3) that allows it to be cached, or * has a * status code that is defined as cacheable by default (see Section 4.2.2), or * contains a public * response directive (see Section 5.2.2.5). Note that any of the requirements listed above can be * overridden by a cache-control extension; see Section 5.2.3. * * <p>When presented with a request, a cache MUST NOT reuse a stored response, unless: o The * presented effective request URI (Section 5.5 of [RFC7230]) and that of the stored response match, * and o the request method associated with the stored response allows it to be used for the * presented request, and o selecting header fields nominated by the stored response (if any) match * those presented (see Section 4.1), and o the presented request does not contain the no-cache * pragma (Section 5.4), nor the no-cache cache directive (Section 5.2.1), unless the stored * response is successfully validated (Section 4.3), and o the stored response does not contain the * no-cache cache directive (Section 5.2.2.2), unless it is successfully validated (Section 4.3), * and o the stored response is either: * fresh (see Section 4.2), or * allowed to be served stale * (see Section 4.2.4), or * successfully validated (see Section 4.3). Note that any of the * requirements listed above can be overridden by a cache-control extension; see Section 5.2.3. * * @author 70pointer@gmail.com */ public class CacheableScanRule extends PluginPassiveScanner { private static final String MESSAGE_PREFIX_STORABILITY_CACHEABILITY = "pscanalpha.storabilitycacheability."; private static final String MESSAGE_PREFIX_NONSTORABLE = "pscanalpha.nonstorable."; private static final String MESSAGE_PREFIX_STORABLE_NONCACHEABLE = "pscanalpha.storablenoncacheable."; private static final String MESSAGE_PREFIX_STORABLE_CACHEABLE = "pscanalpha.storablecacheable."; private static final long SECONDS_IN_YEAR = TimeUnit.SECONDS.convert(365, TimeUnit.DAYS); private static final int PLUGIN_ID = 10049; private static final Map<String, String> ALERT_TAGS = CommonAlertTag.toMap(CommonAlertTag.WSTG_V42_ATHN_06_CACHE_WEAKNESS); private static final Logger logger = LogManager.getLogger(CacheableScanRule.class); @Override public void scanHttpResponseReceive(HttpMessage msg, int id, Source source) { // TODO: standardise the logic in the case of duplicate / conflicting headers. try { logger.debug("Checking URL {} for storability", msg.getRequestHeader().getURI()); // storability: is the request method understood by the cache and defined as being // cacheable? String method = msg.getRequestHeader().getMethod(); String methodUpper = method.toUpperCase(); if (!(methodUpper.equals(HttpRequestHeader.GET) || methodUpper.equals(HttpRequestHeader.HEAD) || methodUpper.equals(HttpRequestHeader.POST))) { // non-cacheable method ==> non-storable logger.debug( "{} is not storable due to the use of the non-cacheable request method '{}'", msg.getRequestHeader().getURI(), method); alertNonStorable(msg, id, method + " "); return; } // is the response status code "understood" by the cache? // this is somewhat implementation specific, so lets assume that a cache "understands" // all 1XX, 2XX, 3XX, 4XX, and 5XX response classes for now. // this logic will allow us to detect if the response is storable by "some" compliant // caching server int responseClass = msg.getResponseHeader().getStatusCode() / 100; if ((responseClass != 1) && (responseClass != 2) && (responseClass != 3) && (responseClass != 4) && (responseClass != 5)) { logger.debug( "{} is not storable due to the use of a HTTP response class [{}] that we do not 'understand' (we 'understand' 1XX, 2XX, 3XX, 4XX, and 5XX response classes)", msg.getRequestHeader().getURI(), responseClass); alertNonStorable(msg, id, String.valueOf(msg.getResponseHeader().getStatusCode())); return; } // does the "no-store" cache directive appear in request or response header fields? // 1: check the Pragma request header (for HTTP 1.0 caches) // 2: check the Pragma response header (for HTTP 1.0 caches) // 3: check the Cache-Control request header (for HTTP 1.1 caches) // 4: check the Cache-Control response header (for HTTP 1.1 caches) List<String> headers = new ArrayList<>(); headers.addAll(msg.getRequestHeader().getHeaderValues(HttpHeader.PRAGMA)); headers.addAll(msg.getResponseHeader().getHeaderValues(HttpHeader.PRAGMA)); headers.addAll(msg.getRequestHeader().getHeaderValues(HttpHeader.CACHE_CONTROL)); headers.addAll(msg.getResponseHeader().getHeaderValues(HttpHeader.CACHE_CONTROL)); for (String directive : headers) { for (String directiveToken : directive.split(" ")) { // strip off any trailing comma if (directiveToken.endsWith(",")) directiveToken = directiveToken.substring(0, directiveToken.length() - 1); logger.trace("Looking for 'no-store' in [{}]", directiveToken); if (directiveToken.toLowerCase().equals("no-store")) { logger.debug( "{} is not storable due to the use of HTTP caching directive 'no-store' in the request or response", msg.getRequestHeader().getURI()); alertNonStorable(msg, id, directiveToken); return; } } } // does the "private" response directive appear in the response, if the cache is shared // check the Cache-Control response header only (for HTTP 1.1 caches) List<String> responseHeadersCacheControl = msg.getResponseHeader().getHeaderValues(HttpHeader.CACHE_CONTROL); if (!responseHeadersCacheControl.isEmpty()) { for (String directive : responseHeadersCacheControl) { for (String directiveToken : directive.split(" ")) { // strip off any trailing comma if (directiveToken.endsWith(",")) directiveToken = directiveToken.substring(0, directiveToken.length() - 1); logger.trace("Looking for 'private' in [{}]", directiveToken); if (directiveToken.toLowerCase().equals("private")) { logger.debug( "{} is not storable due to the use of HTTP caching directive 'private' in the response", msg.getRequestHeader().getURI()); alertNonStorable(msg, id, directiveToken); return; } } } } // does the Authorization header field appear in the request, if the cache is shared // (which we assume it is for now) // if so, does the response explicitly allow it to be cached? (see rfc7234 section 3.2) // Note: this logic defines if an initial request is storable. A second request for the // same URL // may or may not be actually served from the cache, depending on other criteria, such // as whether the cached response is // considered stale (based on the values of s-maxage and other values). This is in // accordance with rfc7234 section 3.2. List<String> authHeaders = msg.getRequestHeader().getHeaderValues(HttpHeader.AUTHORIZATION); if (!authHeaders.isEmpty()) { // there is an authorization header // look for "must-revalidate", "public", and "s-maxage", in the response, since // these permit // a request with an "Authorization" request header to be cached if (!responseHeadersCacheControl.isEmpty()) { boolean authorizedIsStorable = false; for (String directive : responseHeadersCacheControl) { for (String directiveToken : directive.split(" ")) { // strip off any trailing comma if (directiveToken.endsWith(",")) directiveToken = directiveToken.substring(0, directiveToken.length() - 1); logger.trace( "Looking for 'must-revalidate', 'public', 's-maxage' in [{}]", directiveToken); if ((directiveToken.toLowerCase().equals("must-revalidate")) || (directiveToken.toLowerCase().equals("public")) || (directiveToken.toLowerCase().startsWith("s-maxage="))) { authorizedIsStorable = true; break; } } } // is the request with an authorisation header allowed, based on the response // headers? if (!authorizedIsStorable) { logger.debug( "{} is not storable due to the use of the 'Authorisation' request header, without a compensatory 'must-revalidate', 'public', or 's-maxage' directive in the response", msg.getRequestHeader().getURI()); alertNonStorable(msg, id, HttpHeader.AUTHORIZATION + ":"); return; } } else { logger.debug( "{} is not storable due to the use of the 'Authorisation' request header, without a compensatory 'must-revalidate', 'public', or 's-maxage' directive in the response (no 'Cache-Control' directive was noted)", msg.getRequestHeader().getURI()); alertNonStorable(msg, id, HttpHeader.AUTHORIZATION + ":"); return; } } // in addition to the checks above, just one of the following needs to be true for the // response to be storable /* * the response * contains an Expires header field (see Section 5.3), or * contains a max-age response directive (see Section 5.2.2.8), or * contains a s-maxage response directive (see Section 5.2.2.9) and the cache is shared, or * contains a Cache Control Extension (see Section 5.2.3) that allows it to be cached, or * has a status code that is defined as cacheable by default (see Section 4.2.2), or * contains a public response directive (see Section 5.2.2.5). */ // TODO: replace "Expires" with some defined constant. Can't find one right now though. // Ho Hum. List<String> expires = msg.getResponseHeader().getHeaderValues("Expires"); if (!expires.isEmpty()) logger.debug( "{} *is* storable due to the basic checks, and the presence of the 'Expires' header in the response", msg.getRequestHeader().getURI()); // grab this for later. Not needed for "storability" checks. List<String> dates = msg.getResponseHeader().getHeaderValues("Date"); String maxAge = null, sMaxAge = null, publicDirective = null; if (!responseHeadersCacheControl.isEmpty()) { for (String directive : responseHeadersCacheControl) { for (String directiveToken : directive.split(" ")) { // strip off any trailing comma if (directiveToken.endsWith(",")) directiveToken = directiveToken.substring(0, directiveToken.length() - 1); logger.trace( "Looking for 'max-age', 's-maxage', 'public' in [{}]", directiveToken); if (directiveToken.toLowerCase().startsWith("max-age=")) { logger.debug( "{} *is* storable due to the basic checks, and the presence of the 'max-age' caching directive in the response", msg.getRequestHeader().getURI()); maxAge = directiveToken; } if (directiveToken .toLowerCase() .startsWith("s-maxage=")) { // for a shared cache.. logger.debug( "{} *is* storable due to the basic checks, and the presence of the 's-maxage' caching directive in the response", msg.getRequestHeader().getURI()); sMaxAge = directiveToken; } if (directiveToken.toLowerCase().equals("public")) { logger.debug( "{} *is* storable due to the basic checks, and the presence of the 'public' caching directive in the response", msg.getRequestHeader().getURI()); publicDirective = directiveToken; } } } } // TODO: implement checks here for known (implementation specific) Cache Control // Extensions that would // allow the response to be cached. // rfc7231 defines the following response codes as cacheable by default boolean statusCodeCacheable = false; int response = msg.getResponseHeader().getStatusCode(); if ((response == 200) || (response == 203) || (response == 204) || (response == 206) || (response == 300) || (response == 301) || (response == 404) || (response == 405) || (response == 410) || (response == 414) || (response == 501)) { statusCodeCacheable = true; logger.debug( "{} *is* storable due to the basic checks, and the presence of a cacheable response status code (200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501)", msg.getRequestHeader().getURI()); } if (expires.isEmpty() && maxAge == null && sMaxAge == null && statusCodeCacheable == false && publicDirective == null) { logger.debug( "{} is not storable due to the absence of any of an 'Expires' header, 'max-age' directive, 's-maxage' directive, 'public' directive, or cacheable response status code (200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501) in the response", msg.getRequestHeader().getURI()); // we raise the alert with the status code as evidence, because all the other // conditions are "absent", rather "present" (ie, it is the only possible evidence // we can show in this case). alertNonStorable(msg, id, String.valueOf(response)); return; } // at this point, we *know* that the response is storable. // so check if the content is retrievable from the cache (i.e. "cacheable") /* * When presented with a request, a cache MUST NOT reuse a stored * response, unless: * o The presented effective request URI (Section 5.5 of [RFC7230]) and * that of the stored response match, and * o the request method associated with the stored response allows it * to be used for the presented request, and * o selecting header fields nominated by the stored response (if any) * match those presented (see Section 4.1), and * o the presented request does not contain the no-cache pragma * (Section 5.4), nor the no-cache cache directive (Section 5.2.1), * unless the stored response is successfully validated * (Section 4.3), and * o the stored response does not contain the no-cache cache directive * (Section 5.2.2.2), unless it is successfully validated * (Section 4.3), and * o the stored response is either: * * fresh (see Section 4.2), or * * allowed to be served stale (see Section 4.2.4), or * * successfully validated (see Section 4.3). * Note that any of the requirements listed above can be overridden by a * cache-control extension; see Section 5.2.3. */ // 1: we assume that the presented effective request URI matches that of the stored // response in the cache // 2: we assume that the presented request method is compatible with the request method // of the stored response // 3: we assume that the presented selecting header fields match the selecting header // fields nominated by the stored response (if any) // 4: we assume that the presented request does not contain the no-cache pragma, nor the // no-cache cache directive // check if the stored response does not contain the no-cache cache directive, unless it // is successfully validated // note: we cannot (passively or actively) check the re-validation process, and can only // assume that it will properly // respond with details of whether the cache server can serve the cached contents or // not. In any event, this decision is made by the origin // server, and is not at the discretion of the cache server, so we do not concern // ourselves with it here. headers = msg.getResponseHeader().getHeaderValues(HttpHeader.CACHE_CONTROL); if (!headers.isEmpty()) { for (String directive : headers) { for (String directiveToken : directive.split(" ")) { // strip off any trailing comma if (directiveToken.endsWith(",")) directiveToken = directiveToken.substring(0, directiveToken.length() - 1); logger.trace("Looking for 'no-cache' in [{}]", directiveToken); // Note: if the directive looked like "Cache-Control: no-cache #field-name" // (with the optional field name argument, with no comma separating them), // then the "no-cache" directive only applies to the field name (response // header) in question, and not the entire contents. // In this case, the remainder of the contents may be served without // validation. The logic below is consistent with this requirement. if (directiveToken.toLowerCase().equals("no-cache")) { logger.debug( "{} is not retrievable from the cache (cacheable) due to the use of the unqualified HTTP caching directive 'no-cache' in the response", msg.getRequestHeader().getURI()); alertStorableNonCacheable(msg, id, directiveToken); return; } } } } // is the stored response fresh? // Note that fresh = freshness lifetime > current age long lifetime = -1; boolean lifetimeFound = false; String freshEvidence = null; String otherInfo = null; // 1: calculate the freshness lifetime of the request, using the following checks, with // the following priority, as specified by rfc7234. // 1a:Get the "s-maxage" response directive value (if duplicates exist, the values are // invalid) if (!responseHeadersCacheControl.isEmpty()) { int lifetimesFound = 0; for (String directive : responseHeadersCacheControl) { for (String directiveToken : directive.split(" ")) { // strip off any trailing comma if (directiveToken.endsWith(",")) directiveToken = directiveToken.substring(0, directiveToken.length() - 1); logger.trace("Looking for 's-maxage' in [{}]", directiveToken); if (directiveToken.toLowerCase().startsWith("s-maxage=")) { logger.debug( "{} has a caching lifetime defined by an HTTP caching directive 's-maxage' ", msg.getRequestHeader().getURI()); lifetimeFound = true; lifetimesFound++; // get the portion of the string after "s-maxage=" lifetime = Long.parseLong(directiveToken.substring("s-maxage=".length())); freshEvidence = directiveToken; } } } // if duplicates exist, the values are invalid. as per rfc7234. if (lifetimesFound > 1) { lifetimeFound = false; lifetime = -1; freshEvidence = null; logger.debug( "{} had multiple caching lifetimes defined by an HTTP caching directive 's-maxage'. Invalidating all of these!", msg.getRequestHeader().getURI()); } } // 1b:Get the "max-age" response directive value (if duplicates exist, the values are // invalid) if (!lifetimeFound) { if (!responseHeadersCacheControl.isEmpty()) { int lifetimesFound = 0; for (String directive : responseHeadersCacheControl) { for (String directiveToken : directive.split(" ")) { // strip off any trailing comma if (directiveToken.endsWith(",")) directiveToken = directiveToken.substring(0, directiveToken.length() - 1); logger.trace("Looking for 'max-age' in [{}]", directiveToken); if (directiveToken.toLowerCase().startsWith("max-age=")) { logger.debug( "{} has a caching lifetime defined by an HTTP caching directive 'max-age' ", msg.getRequestHeader().getURI()); lifetimeFound = true; lifetimesFound++; // get the portion of the string after "maxage=" // Split on comma and use 0th item in case there weren't spaces: // Cache-Control: max-age=7776000,private try { lifetime = Long.parseLong( directiveToken.split(",")[0].substring( "max-age=".length())); } catch (NumberFormatException nfe) { lifetimeFound = false; lifetimesFound--; logger.debug( "Could not parse max-age to establish lifetime. Perhaps the value exceeds Long.MAX_VALUE or contains non-number characters:{}", directiveToken); } freshEvidence = directiveToken; } } } // if duplicates exist, the values are invalid. as per rfc7234. if (lifetimesFound > 1) { lifetimeFound = false; lifetime = -1; freshEvidence = null; logger.debug( "{} had multiple caching lifetimes defined by an HTTP caching directive 'max-age'. Invalidating all of these!", msg.getRequestHeader().getURI()); } } } // 1c: Get the "Expires" response header value - "Date" response header field. ("Date" // is optional if the origin has no clock, or returned a 1XX or 5XX response, else // mandatory) if (!lifetimeFound) { String expiresHeader = null; String dateHeader = null; if (!expires.isEmpty()) { // Expires can be absent, or take the form of "Thu, 27 Nov 2014 12:21:57 GMT", // "-1", "0", etc. // Invalid dates are treated as "expired" int expiresHeadersFound = 0; for (String directive : expires) { logger.debug( "{} has a caching lifetime expiry defined by an HTTP response header 'Expires'", msg.getRequestHeader().getURI()); expiresHeadersFound++; expiresHeader = directive; freshEvidence = directive; } // if duplicates exist, the values are invalid. as per rfc7234. if (expiresHeadersFound > 1) { expiresHeader = null; logger.debug( "{} had multiple caching lifetime expirys defined by an HTTP response header 'Expires'. Invalidating all of these!", msg.getRequestHeader().getURI()); } else { // we now have a single "expiry". // Now it is time to get the "date" for the request, so we can subtract the // "date" from the "expiry" to get the "lifetime". if (!dates.isEmpty()) { int dateHeadersFound = 0; for (String directive : dates) { logger.debug( "{} has a caching lifetime date defined by an HTTP response header 'Date'", msg.getRequestHeader().getURI()); dateHeadersFound++; dateHeader = directive; } // if duplicates exist, the values are invalid. as per rfc7234. if (dateHeadersFound > 1) { dateHeader = null; logger.debug( "{} had multiple caching lifetime dates defined by an HTTP response header 'Date'. Invalidating all of these!", msg.getRequestHeader().getURI()); } else { // we have one expiry, and one date. Yippee.. Are they valid tough?? // both dates can be invalid, or have one of 3 formats, all of which // MUST be supported! Date expiresDate = parseDate(expiresHeader); if (expiresDate != null) { Date dateDate = parseDate(dateHeader); if (dateDate != null) { // calculate the lifetime = Expires - Date lifetimeFound = true; lifetime = (expiresDate.getTime() - dateDate.getTime()) / 1000; // there is multiple parts to the evidence in this case (the // Expiry, and the Date, but lets show the Expiry) freshEvidence = expiresHeader; logger.debug( "{} had an 'Expires' date and a 'Date' date, which were used to calculate the lifetime of the request", msg.getRequestHeader().getURI()); } else { // the "Date" date is not valid. Treat it as "expired" logger.debug( "{} had an invalid caching lifetime date defined by an HTTP response header 'Date'. Ignoring the 'Expires' header for the purposes of lifetime calculation.", msg.getRequestHeader().getURI()); lifetime = -1; } } else { // the expires date is not valid. Treat it as "expired" // (will not result in a "cacheable" alert, so the evidence is // not needed, in fact logger.debug( "{} had an invalid caching lifetime expiry date defined by an HTTP response header 'Expiry'. Assuming an historic/ expired lifetime.", msg.getRequestHeader().getURI()); lifetimeFound = true; lifetime = 0; freshEvidence = expiresHeader; } } } else { // "Dates" is not defined. Nothing to do! logger.debug( "{} has a caching lifetime expiry defined by an HTTP response header 'Expires', but no 'Date' header to subtract from it", msg.getRequestHeader().getURI()); } } } else { // "Expires" is not defined. Nothing to do! logger.debug( "{} has no caching lifetime expiry defined by an HTTP response header 'Expires'", msg.getRequestHeader().getURI()); } } // 1d: Use a heuristic to determine a "plausible" expiration time. This is // implementation specific, and the implementation is permitted to be liberal. // for the purposes of this exercise, lets assume the cache chooses a "plausible" // expiration of 1 year (expressed in seconds) if (!lifetimeFound) { logger.debug( "{} has no caching lifetime expiry of any form, so assuming that it is set 'heuristically' to 1 year (as a form of worst case)", msg.getRequestHeader().getURI()); lifetimeFound = true; lifetime = SECONDS_IN_YEAR; // a liberal heuristic was assumed, for which no actual evidence exists freshEvidence = null; otherInfo = Constant.messages.getString( MESSAGE_PREFIX_STORABLE_CACHEABLE + "otherinfo.liberallifetimeheuristic"); } logger.debug( "{} has a caching lifetime of {}", msg.getRequestHeader().getURI(), lifetime); // 2: calculate the current age of the request // Note that since we are not necessarily testing via a cache, the "Age" header may // not be set (this is set by the caching server, not by the web server) // so we can only possibly get the "apparent_age", and not the "corrected_age_value" // documented in rfc7234. // In any event, this is not an issue, because in the worst case, the user could be // sending the first request for a given URL, placing // the response in the cache, with an age approaching 0 (depending on network delay). // By this logic, let's not even try to check the "apparent_age" (since it depends on // our network, and could be completely different for other users) // and let's assume that in at least some cases, the "age" can be 0 (the most extreme // case, from the point of view of "freshness"). // so "freshness" depends purely on the defined lifetime, in practice. long age = 0; // so after all that, is the response fresh or not? if (lifetime > age) { // fresh, so it can be retrieved from the cache logger.debug( "{} is retrievable from the cache (cacheable), since it is fresh", msg.getRequestHeader().getURI()); alertStorableCacheable(msg, id, freshEvidence, otherInfo); return; } else { // stale! // is the stored response allowed to be served stale? // if the following are not present, the response *can* be served stale.. // Note: this area of the RFC is vague at best (and somewhat contradictory), so this // area may need to be reviewed once the RFC has been updated // (the version used is rfc7234 from June 2014) /* "must-revalidate" - OK (fairly explicit) "proxy-revalidate" - OK (fairly explicit) "s-maxage" - see rfc7234, section 3.2 "max-age" - inferred, based on the case for "s-maxage" */ boolean staleRetrieveAllowed = true; String doNotRetrieveStaleEvidence = null; if (!responseHeadersCacheControl.isEmpty()) { for (String directive : responseHeadersCacheControl) { for (String directiveToken : directive.split(" ")) { // strip off any trailing comma if (directiveToken.endsWith(",")) directiveToken = directiveToken.substring(0, directiveToken.length() - 1); logger.trace( "Looking for 'must-revalidate', 'proxy-revalidate', 's-maxage', 'max-age' in [{}]", directiveToken); if ((directiveToken.toLowerCase().equals("must-revalidate")) || (directiveToken.toLowerCase().equals("proxy-revalidate")) || (directiveToken.toLowerCase().startsWith("s-maxage=")) || (directiveToken.toLowerCase().startsWith("max-age="))) { staleRetrieveAllowed = false; doNotRetrieveStaleEvidence = directiveToken; break; } } } } // TODO: check for any known Cache Control Extensions here, before making a final // call on the retrievability of the cached data. if (staleRetrieveAllowed) { // no directives were configured to prevent stale responses being retrieved // (without validation) alertStorableCacheable( msg, id, "", Constant.messages.getString( MESSAGE_PREFIX_STORABLE_CACHEABLE + "otherinfo.staleretrievenotblocked")); } else { // the directives do not allow stale responses to be retrieved // we saw just one other scenario where this could happen: where the response // was cached, but the "no-cache" response directive was specified alertStorableNonCacheable(msg, id, doNotRetrieveStaleEvidence); } } } catch (Exception e) { logger.error( "An error occurred while checking a URI [{}] for cacheability", msg.getRequestHeader().getURI(), e); } } private static Date parseDate(String dateStr) { ZonedDateTime dateTime = HttpDateUtils.parse(dateStr); if (dateTime != null) { return new Date(dateTime.toInstant().toEpochMilli()); } return null; } @Override public int getPluginId() { return PLUGIN_ID; } @Override public String getName() { return Constant.messages.getString(MESSAGE_PREFIX_STORABILITY_CACHEABILITY + "name"); } @Override public Map<String, String> getAlertTags() { return ALERT_TAGS; } /** * raise an alert for a non-storable response * * @param msg * @param id * @param evidence */ public void alertNonStorable(HttpMessage msg, int id, String evidence) { newAlert() .setName(Constant.messages.getString(MESSAGE_PREFIX_NONSTORABLE + "name")) .setRisk(Alert.RISK_INFO) .setConfidence(Alert.CONFIDENCE_MEDIUM) .setDescription(Constant.messages.getString(MESSAGE_PREFIX_NONSTORABLE + "desc")) .setSolution(Constant.messages.getString(MESSAGE_PREFIX_NONSTORABLE + "soln")) .setReference(Constant.messages.getString(MESSAGE_PREFIX_NONSTORABLE + "refs")) .setEvidence(evidence) .setCweId(524) // CWE-524: Information Exposure Through Caching .setWascId(13) // WASC-13: Information Leakage .raise(); } /** * raise an alert for a storable but non-cacheable response * * @param msg * @param id * @param evidence */ public void alertStorableNonCacheable(HttpMessage msg, int id, String evidence) { newAlert() .setName(Constant.messages.getString(MESSAGE_PREFIX_STORABLE_NONCACHEABLE + "name")) .setRisk(Alert.RISK_INFO) .setConfidence(Alert.CONFIDENCE_MEDIUM) .setDescription( Constant.messages.getString(MESSAGE_PREFIX_STORABLE_NONCACHEABLE + "desc")) .setSolution( Constant.messages.getString(MESSAGE_PREFIX_STORABLE_NONCACHEABLE + "soln")) .setReference( Constant.messages.getString(MESSAGE_PREFIX_STORABLE_NONCACHEABLE + "refs")) .setEvidence(evidence) .setCweId(524) // CWE-524: Information Exposure Through Caching .setWascId(13) // WASC-13: Information Leakage .raise(); } /** * raise an alert for a storable and cacheable (retrievable from the cache) response * * @param msg * @param id * @param evidence * @param otherInfo */ public void alertStorableCacheable(HttpMessage msg, int id, String evidence, String otherInfo) { newAlert() .setName(Constant.messages.getString(MESSAGE_PREFIX_STORABLE_CACHEABLE + "name")) .setRisk(Alert.RISK_INFO) .setConfidence(Alert.CONFIDENCE_MEDIUM) .setDescription( Constant.messages.getString(MESSAGE_PREFIX_STORABLE_CACHEABLE + "desc")) .setOtherInfo(otherInfo) .setSolution( Constant.messages.getString(MESSAGE_PREFIX_STORABLE_CACHEABLE + "soln")) .setReference( Constant.messages.getString(MESSAGE_PREFIX_STORABLE_CACHEABLE + "refs")) .setEvidence(evidence) .setCweId(524) // CWE-524: Information Exposure Through Caching .setWascId(13) // WASC-13: Information Leakage .raise(); } }
/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =======================================================================*/ // This class has been generated, DO NOT EDIT! package org.tensorflow.op.nn; import java.util.Arrays; import org.tensorflow.GraphOperation; import org.tensorflow.Operand; import org.tensorflow.Operation; import org.tensorflow.OperationBuilder; import org.tensorflow.Output; import org.tensorflow.op.RawOp; import org.tensorflow.op.RawOpInputs; import org.tensorflow.op.Scope; import org.tensorflow.op.annotation.Endpoint; import org.tensorflow.op.annotation.OpInputsMetadata; import org.tensorflow.op.annotation.OpMetadata; import org.tensorflow.op.annotation.Operator; import org.tensorflow.proto.framework.DataType; import org.tensorflow.types.TFloat32; import org.tensorflow.types.family.TNumber; /** * Quantized Instance normalization. * * @param <T> data type for {@code y} output */ @OpMetadata( opType = QuantizedInstanceNorm.OP_NAME, inputsClass = QuantizedInstanceNorm.Inputs.class ) @Operator( group = "nn" ) public final class QuantizedInstanceNorm<T extends TNumber> extends RawOp { /** * The name of this op, as known by TensorFlow core engine */ public static final String OP_NAME = "QuantizedInstanceNorm"; private Output<T> y; private Output<TFloat32> yMin; private Output<TFloat32> yMax; public QuantizedInstanceNorm(Operation operation) { super(operation, OP_NAME); int outputIdx = 0; y = operation.output(outputIdx++); yMin = operation.output(outputIdx++); yMax = operation.output(outputIdx++); } /** * Factory method to create a class wrapping a new QuantizedInstanceNorm operation. * * @param scope current scope * @param x A 4D input Tensor. * @param xMin The value represented by the lowest quantized input. * @param xMax The value represented by the highest quantized input. * @param options carries optional attribute values * @param <T> data type for {@code QuantizedInstanceNorm} output and operands * @return a new instance of QuantizedInstanceNorm */ @Endpoint( describeByClass = true ) public static <T extends TNumber> QuantizedInstanceNorm<T> create(Scope scope, Operand<T> x, Operand<TFloat32> xMin, Operand<TFloat32> xMax, Options... options) { OperationBuilder opBuilder = scope.opBuilder(OP_NAME, "QuantizedInstanceNorm"); opBuilder.addInput(x.asOutput()); opBuilder.addInput(xMin.asOutput()); opBuilder.addInput(xMax.asOutput()); if (options != null) { for (Options opts : options) { if (opts.outputRangeGiven != null) { opBuilder.setAttr("output_range_given", opts.outputRangeGiven); } if (opts.givenYMin != null) { opBuilder.setAttr("given_y_min", opts.givenYMin); } if (opts.givenYMax != null) { opBuilder.setAttr("given_y_max", opts.givenYMax); } if (opts.varianceEpsilon != null) { opBuilder.setAttr("variance_epsilon", opts.varianceEpsilon); } if (opts.minSeparation != null) { opBuilder.setAttr("min_separation", opts.minSeparation); } } } return new QuantizedInstanceNorm<>(opBuilder.build()); } /** * Sets the outputRangeGiven option. * * @param outputRangeGiven If True, {@code given_y_min} and {@code given_y_min} * and {@code given_y_max} are used as the output range. Otherwise, * the implementation computes the output range. * @return this Options instance. */ public static Options outputRangeGiven(Boolean outputRangeGiven) { return new Options().outputRangeGiven(outputRangeGiven); } /** * Sets the givenYMin option. * * @param givenYMin Output in {@code y_min} if {@code output_range_given} is True. * @return this Options instance. */ public static Options givenYMin(Float givenYMin) { return new Options().givenYMin(givenYMin); } /** * Sets the givenYMax option. * * @param givenYMax Output in {@code y_max} if {@code output_range_given} is True. * @return this Options instance. */ public static Options givenYMax(Float givenYMax) { return new Options().givenYMax(givenYMax); } /** * Sets the varianceEpsilon option. * * @param varianceEpsilon A small float number to avoid dividing by 0. * @return this Options instance. */ public static Options varianceEpsilon(Float varianceEpsilon) { return new Options().varianceEpsilon(varianceEpsilon); } /** * Sets the minSeparation option. * * @param minSeparation Minimum value of {@code y_max - y_min} * @return this Options instance. */ public static Options minSeparation(Float minSeparation) { return new Options().minSeparation(minSeparation); } /** * Gets y. * A 4D Tensor. * @return y. */ public Output<T> y() { return y; } /** * Gets yMin. * The value represented by the lowest quantized output. * @return yMin. */ public Output<TFloat32> yMin() { return yMin; } /** * Gets yMax. * The value represented by the highest quantized output. * @return yMax. */ public Output<TFloat32> yMax() { return yMax; } /** * Optional attributes for {@link org.tensorflow.op.nn.QuantizedInstanceNorm} */ public static class Options { private Boolean outputRangeGiven; private Float givenYMin; private Float givenYMax; private Float varianceEpsilon; private Float minSeparation; private Options() { } /** * Sets the outputRangeGiven option. * * @param outputRangeGiven If True, {@code given_y_min} and {@code given_y_min} * and {@code given_y_max} are used as the output range. Otherwise, * the implementation computes the output range. * @return this Options instance. */ public Options outputRangeGiven(Boolean outputRangeGiven) { this.outputRangeGiven = outputRangeGiven; return this; } /** * Sets the givenYMin option. * * @param givenYMin Output in {@code y_min} if {@code output_range_given} is True. * @return this Options instance. */ public Options givenYMin(Float givenYMin) { this.givenYMin = givenYMin; return this; } /** * Sets the givenYMax option. * * @param givenYMax Output in {@code y_max} if {@code output_range_given} is True. * @return this Options instance. */ public Options givenYMax(Float givenYMax) { this.givenYMax = givenYMax; return this; } /** * Sets the varianceEpsilon option. * * @param varianceEpsilon A small float number to avoid dividing by 0. * @return this Options instance. */ public Options varianceEpsilon(Float varianceEpsilon) { this.varianceEpsilon = varianceEpsilon; return this; } /** * Sets the minSeparation option. * * @param minSeparation Minimum value of {@code y_max - y_min} * @return this Options instance. */ public Options minSeparation(Float minSeparation) { this.minSeparation = minSeparation; return this; } } @OpInputsMetadata( outputsClass = QuantizedInstanceNorm.class ) public static class Inputs<T extends TNumber> extends RawOpInputs<QuantizedInstanceNorm<T>> { /** * A 4D input Tensor. */ public final Operand<T> x; /** * The value represented by the lowest quantized input. */ public final Operand<TFloat32> xMin; /** * The value represented by the highest quantized input. */ public final Operand<TFloat32> xMax; /** * The T attribute */ public final DataType T; /** * If True, `given_y_min` and `given_y_min` * and `given_y_max` are used as the output range. Otherwise, * the implementation computes the output range. */ public final boolean outputRangeGiven; /** * Output in `y_min` if `output_range_given` is True. */ public final float givenYMin; /** * Output in `y_max` if `output_range_given` is True. */ public final float givenYMax; /** * A small float number to avoid dividing by 0. */ public final float varianceEpsilon; /** * Minimum value of `y_max - y_min` */ public final float minSeparation; public Inputs(GraphOperation op) { super(new QuantizedInstanceNorm<>(op), op, Arrays.asList("T", "output_range_given", "given_y_min", "given_y_max", "variance_epsilon", "min_separation")); int inputIndex = 0; x = (Operand<T>) op.input(inputIndex++); xMin = (Operand<TFloat32>) op.input(inputIndex++); xMax = (Operand<TFloat32>) op.input(inputIndex++); T = op.attributes().getAttrType("T"); outputRangeGiven = op.attributes().getAttrBool("output_range_given"); givenYMin = op.attributes().getAttrFloat("given_y_min"); givenYMax = op.attributes().getAttrFloat("given_y_max"); varianceEpsilon = op.attributes().getAttrFloat("variance_epsilon"); minSeparation = op.attributes().getAttrFloat("min_separation"); } } }
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.opsworks.waiters; import com.amazonaws.annotation.SdkInternalApi; import com.amazonaws.waiters.WaiterAcceptor; import com.amazonaws.waiters.WaiterState; import com.amazonaws.waiters.AcceptorPathMatcher; import com.amazonaws.services.opsworks.model.*; import com.fasterxml.jackson.databind.JsonNode; import com.amazonaws.jmespath.*; import java.io.IOException; import javax.annotation.Generated; @SdkInternalApi @Generated("com.amazonaws:aws-java-sdk-code-generator") class InstanceStopped { static class IsStoppedMatcher extends WaiterAcceptor<DescribeInstancesResult> { private static final JsonNode expectedResult; static { try { expectedResult = ObjectMapperSingleton.getObjectMapper().readTree("\"stopped\""); } catch (IOException ioe) { throw new RuntimeException(ioe); } } private static final JmesPathExpression ast = new JmesPathProjection(new JmesPathFlatten(new JmesPathField("Instances")), new JmesPathField("Status")); /** * Takes the result and determines whether the state of the resource matches the expected state. To determine * the current state of the resource, JmesPath expression is evaluated and compared against the expected result. * * @param result * Corresponding result of the operation * @return True if current state of the resource matches the expected state, False otherwise */ @Override public boolean matches(DescribeInstancesResult result) { JsonNode queryNode = ObjectMapperSingleton.getObjectMapper().valueToTree(result); JsonNode finalResult = ast.accept(new JmesPathEvaluationVisitor(), queryNode); return AcceptorPathMatcher.pathAll(expectedResult, finalResult); } /** * Represents the current waiter state in the case where resource state matches the expected state * * @return Corresponding state of the waiter */ @Override public WaiterState getState() { return WaiterState.SUCCESS; } } static class IsBootingMatcher extends WaiterAcceptor<DescribeInstancesResult> { private static final JsonNode expectedResult; static { try { expectedResult = ObjectMapperSingleton.getObjectMapper().readTree("\"booting\""); } catch (IOException ioe) { throw new RuntimeException(ioe); } } private static final JmesPathExpression ast = new JmesPathProjection(new JmesPathFlatten(new JmesPathField("Instances")), new JmesPathField("Status")); /** * Takes the result and determines whether the state of the resource matches the expected state. To determine * the current state of the resource, JmesPath expression is evaluated and compared against the expected result. * * @param result * Corresponding result of the operation * @return True if current state of the resource matches the expected state, False otherwise */ @Override public boolean matches(DescribeInstancesResult result) { JsonNode queryNode = ObjectMapperSingleton.getObjectMapper().valueToTree(result); JsonNode finalResult = ast.accept(new JmesPathEvaluationVisitor(), queryNode); return AcceptorPathMatcher.pathAny(expectedResult, finalResult); } /** * Represents the current waiter state in the case where resource state matches the expected state * * @return Corresponding state of the waiter */ @Override public WaiterState getState() { return WaiterState.FAILURE; } } static class IsOnlineMatcher extends WaiterAcceptor<DescribeInstancesResult> { private static final JsonNode expectedResult; static { try { expectedResult = ObjectMapperSingleton.getObjectMapper().readTree("\"online\""); } catch (IOException ioe) { throw new RuntimeException(ioe); } } private static final JmesPathExpression ast = new JmesPathProjection(new JmesPathFlatten(new JmesPathField("Instances")), new JmesPathField("Status")); /** * Takes the result and determines whether the state of the resource matches the expected state. To determine * the current state of the resource, JmesPath expression is evaluated and compared against the expected result. * * @param result * Corresponding result of the operation * @return True if current state of the resource matches the expected state, False otherwise */ @Override public boolean matches(DescribeInstancesResult result) { JsonNode queryNode = ObjectMapperSingleton.getObjectMapper().valueToTree(result); JsonNode finalResult = ast.accept(new JmesPathEvaluationVisitor(), queryNode); return AcceptorPathMatcher.pathAny(expectedResult, finalResult); } /** * Represents the current waiter state in the case where resource state matches the expected state * * @return Corresponding state of the waiter */ @Override public WaiterState getState() { return WaiterState.FAILURE; } } static class IsPendingMatcher extends WaiterAcceptor<DescribeInstancesResult> { private static final JsonNode expectedResult; static { try { expectedResult = ObjectMapperSingleton.getObjectMapper().readTree("\"pending\""); } catch (IOException ioe) { throw new RuntimeException(ioe); } } private static final JmesPathExpression ast = new JmesPathProjection(new JmesPathFlatten(new JmesPathField("Instances")), new JmesPathField("Status")); /** * Takes the result and determines whether the state of the resource matches the expected state. To determine * the current state of the resource, JmesPath expression is evaluated and compared against the expected result. * * @param result * Corresponding result of the operation * @return True if current state of the resource matches the expected state, False otherwise */ @Override public boolean matches(DescribeInstancesResult result) { JsonNode queryNode = ObjectMapperSingleton.getObjectMapper().valueToTree(result); JsonNode finalResult = ast.accept(new JmesPathEvaluationVisitor(), queryNode); return AcceptorPathMatcher.pathAny(expectedResult, finalResult); } /** * Represents the current waiter state in the case where resource state matches the expected state * * @return Corresponding state of the waiter */ @Override public WaiterState getState() { return WaiterState.FAILURE; } } static class IsRebootingMatcher extends WaiterAcceptor<DescribeInstancesResult> { private static final JsonNode expectedResult; static { try { expectedResult = ObjectMapperSingleton.getObjectMapper().readTree("\"rebooting\""); } catch (IOException ioe) { throw new RuntimeException(ioe); } } private static final JmesPathExpression ast = new JmesPathProjection(new JmesPathFlatten(new JmesPathField("Instances")), new JmesPathField("Status")); /** * Takes the result and determines whether the state of the resource matches the expected state. To determine * the current state of the resource, JmesPath expression is evaluated and compared against the expected result. * * @param result * Corresponding result of the operation * @return True if current state of the resource matches the expected state, False otherwise */ @Override public boolean matches(DescribeInstancesResult result) { JsonNode queryNode = ObjectMapperSingleton.getObjectMapper().valueToTree(result); JsonNode finalResult = ast.accept(new JmesPathEvaluationVisitor(), queryNode); return AcceptorPathMatcher.pathAny(expectedResult, finalResult); } /** * Represents the current waiter state in the case where resource state matches the expected state * * @return Corresponding state of the waiter */ @Override public WaiterState getState() { return WaiterState.FAILURE; } } static class IsRequestedMatcher extends WaiterAcceptor<DescribeInstancesResult> { private static final JsonNode expectedResult; static { try { expectedResult = ObjectMapperSingleton.getObjectMapper().readTree("\"requested\""); } catch (IOException ioe) { throw new RuntimeException(ioe); } } private static final JmesPathExpression ast = new JmesPathProjection(new JmesPathFlatten(new JmesPathField("Instances")), new JmesPathField("Status")); /** * Takes the result and determines whether the state of the resource matches the expected state. To determine * the current state of the resource, JmesPath expression is evaluated and compared against the expected result. * * @param result * Corresponding result of the operation * @return True if current state of the resource matches the expected state, False otherwise */ @Override public boolean matches(DescribeInstancesResult result) { JsonNode queryNode = ObjectMapperSingleton.getObjectMapper().valueToTree(result); JsonNode finalResult = ast.accept(new JmesPathEvaluationVisitor(), queryNode); return AcceptorPathMatcher.pathAny(expectedResult, finalResult); } /** * Represents the current waiter state in the case where resource state matches the expected state * * @return Corresponding state of the waiter */ @Override public WaiterState getState() { return WaiterState.FAILURE; } } static class IsRunning_setupMatcher extends WaiterAcceptor<DescribeInstancesResult> { private static final JsonNode expectedResult; static { try { expectedResult = ObjectMapperSingleton.getObjectMapper().readTree("\"running_setup\""); } catch (IOException ioe) { throw new RuntimeException(ioe); } } private static final JmesPathExpression ast = new JmesPathProjection(new JmesPathFlatten(new JmesPathField("Instances")), new JmesPathField("Status")); /** * Takes the result and determines whether the state of the resource matches the expected state. To determine * the current state of the resource, JmesPath expression is evaluated and compared against the expected result. * * @param result * Corresponding result of the operation * @return True if current state of the resource matches the expected state, False otherwise */ @Override public boolean matches(DescribeInstancesResult result) { JsonNode queryNode = ObjectMapperSingleton.getObjectMapper().valueToTree(result); JsonNode finalResult = ast.accept(new JmesPathEvaluationVisitor(), queryNode); return AcceptorPathMatcher.pathAny(expectedResult, finalResult); } /** * Represents the current waiter state in the case where resource state matches the expected state * * @return Corresponding state of the waiter */ @Override public WaiterState getState() { return WaiterState.FAILURE; } } static class IsSetup_failedMatcher extends WaiterAcceptor<DescribeInstancesResult> { private static final JsonNode expectedResult; static { try { expectedResult = ObjectMapperSingleton.getObjectMapper().readTree("\"setup_failed\""); } catch (IOException ioe) { throw new RuntimeException(ioe); } } private static final JmesPathExpression ast = new JmesPathProjection(new JmesPathFlatten(new JmesPathField("Instances")), new JmesPathField("Status")); /** * Takes the result and determines whether the state of the resource matches the expected state. To determine * the current state of the resource, JmesPath expression is evaluated and compared against the expected result. * * @param result * Corresponding result of the operation * @return True if current state of the resource matches the expected state, False otherwise */ @Override public boolean matches(DescribeInstancesResult result) { JsonNode queryNode = ObjectMapperSingleton.getObjectMapper().valueToTree(result); JsonNode finalResult = ast.accept(new JmesPathEvaluationVisitor(), queryNode); return AcceptorPathMatcher.pathAny(expectedResult, finalResult); } /** * Represents the current waiter state in the case where resource state matches the expected state * * @return Corresponding state of the waiter */ @Override public WaiterState getState() { return WaiterState.FAILURE; } } static class IsStart_failedMatcher extends WaiterAcceptor<DescribeInstancesResult> { private static final JsonNode expectedResult; static { try { expectedResult = ObjectMapperSingleton.getObjectMapper().readTree("\"start_failed\""); } catch (IOException ioe) { throw new RuntimeException(ioe); } } private static final JmesPathExpression ast = new JmesPathProjection(new JmesPathFlatten(new JmesPathField("Instances")), new JmesPathField("Status")); /** * Takes the result and determines whether the state of the resource matches the expected state. To determine * the current state of the resource, JmesPath expression is evaluated and compared against the expected result. * * @param result * Corresponding result of the operation * @return True if current state of the resource matches the expected state, False otherwise */ @Override public boolean matches(DescribeInstancesResult result) { JsonNode queryNode = ObjectMapperSingleton.getObjectMapper().valueToTree(result); JsonNode finalResult = ast.accept(new JmesPathEvaluationVisitor(), queryNode); return AcceptorPathMatcher.pathAny(expectedResult, finalResult); } /** * Represents the current waiter state in the case where resource state matches the expected state * * @return Corresponding state of the waiter */ @Override public WaiterState getState() { return WaiterState.FAILURE; } } static class IsStop_failedMatcher extends WaiterAcceptor<DescribeInstancesResult> { private static final JsonNode expectedResult; static { try { expectedResult = ObjectMapperSingleton.getObjectMapper().readTree("\"stop_failed\""); } catch (IOException ioe) { throw new RuntimeException(ioe); } } private static final JmesPathExpression ast = new JmesPathProjection(new JmesPathFlatten(new JmesPathField("Instances")), new JmesPathField("Status")); /** * Takes the result and determines whether the state of the resource matches the expected state. To determine * the current state of the resource, JmesPath expression is evaluated and compared against the expected result. * * @param result * Corresponding result of the operation * @return True if current state of the resource matches the expected state, False otherwise */ @Override public boolean matches(DescribeInstancesResult result) { JsonNode queryNode = ObjectMapperSingleton.getObjectMapper().valueToTree(result); JsonNode finalResult = ast.accept(new JmesPathEvaluationVisitor(), queryNode); return AcceptorPathMatcher.pathAny(expectedResult, finalResult); } /** * Represents the current waiter state in the case where resource state matches the expected state * * @return Corresponding state of the waiter */ @Override public WaiterState getState() { return WaiterState.FAILURE; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.query; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.Objects; /** * Constructs a query that only match on documents that the field has a value in them. */ public class ExistsQueryBuilder extends AbstractQueryBuilder<ExistsQueryBuilder> { public static final String NAME = "exists"; public static final ParseField FIELD_FIELD = new ParseField("field"); private final String fieldName; public ExistsQueryBuilder(String fieldName) { if (Strings.isEmpty(fieldName)) { throw new IllegalArgumentException("field name is null or empty"); } this.fieldName = fieldName; } /** * Read from a stream. */ public ExistsQueryBuilder(StreamInput in) throws IOException { super(in); fieldName = in.readString(); } @Override protected void doWriteTo(StreamOutput out) throws IOException { out.writeString(fieldName); } /** * @return the field name that has to exist for this query to match */ public String fieldName() { return this.fieldName; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); builder.field(FIELD_FIELD.getPreferredName(), fieldName); printBoostAndQueryName(builder); builder.endObject(); } public static ExistsQueryBuilder fromXContent(XContentParser parser) throws IOException { String fieldPattern = null; String queryName = null; float boost = AbstractQueryBuilder.DEFAULT_BOOST; XContentParser.Token token; String currentFieldName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token.isValue()) { if (FIELD_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { fieldPattern = parser.text(); } else if (AbstractQueryBuilder.NAME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { queryName = parser.text(); } else if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { boost = parser.floatValue(); } else { throw new ParsingException(parser.getTokenLocation(), "[" + ExistsQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]"); } } else { throw new ParsingException(parser.getTokenLocation(), "[" + ExistsQueryBuilder.NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]"); } } if (fieldPattern == null) { throw new ParsingException(parser.getTokenLocation(), "[" + ExistsQueryBuilder.NAME + "] must be provided with a [field]"); } ExistsQueryBuilder builder = new ExistsQueryBuilder(fieldPattern); builder.queryName(queryName); builder.boost(boost); return builder; } @Override protected Query doToQuery(QueryShardContext context) throws IOException { return newFilter(context, fieldName); } public static Query newFilter(QueryShardContext context, String fieldPattern) { final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType) context .getMapperService().fullName(FieldNamesFieldMapper.NAME); if (fieldNamesFieldType == null) { // can only happen when no types exist, so no docs exist either return Queries.newMatchNoDocsQuery("Missing types in \"" + NAME + "\" query."); } final Collection<String> fields; if (context.getObjectMapper(fieldPattern) != null) { // the _field_names field also indexes objects, so we don't have to // do any more work to support exists queries on whole objects fields = Collections.singleton(fieldPattern); } else { fields = context.simpleMatchToIndexNames(fieldPattern); } if (context.indexVersionCreated().before(Version.V_6_1_0)) { return newLegacyExistsQuery(context, fields); } if (fields.size() == 1) { String field = fields.iterator().next(); return newFieldExistsQuery(context, field); } BooleanQuery.Builder boolFilterBuilder = new BooleanQuery.Builder(); for (String field : fields) { boolFilterBuilder.add(newFieldExistsQuery(context, field), BooleanClause.Occur.SHOULD); } return new ConstantScoreQuery(boolFilterBuilder.build()); } private static Query newLegacyExistsQuery(QueryShardContext context, Collection<String> fields) { // We create TermsQuery directly here rather than using FieldNamesFieldType.termsQuery() // so we don't end up with deprecation warnings if (fields.size() == 1) { Query filter = newLegacyExistsQuery(context, fields.iterator().next()); return new ConstantScoreQuery(filter); } BooleanQuery.Builder boolFilterBuilder = new BooleanQuery.Builder(); for (String field : fields) { Query filter = newLegacyExistsQuery(context, field); boolFilterBuilder.add(filter, BooleanClause.Occur.SHOULD); } return new ConstantScoreQuery(boolFilterBuilder.build()); } private static Query newLegacyExistsQuery(QueryShardContext context, String field) { MappedFieldType fieldType = context.fieldMapper(field); String fieldName = fieldType != null ? fieldType.name() : field; return new TermQuery(new Term(FieldNamesFieldMapper.NAME, fieldName)); } private static Query newFieldExistsQuery(QueryShardContext context, String field) { MappedFieldType fieldType = context.getMapperService().fullName(field); if (fieldType == null) { // The field does not exist as a leaf but could be an object so // check for an object mapper if (context.getObjectMapper(field) != null) { return newObjectFieldExistsQuery(context, field); } return Queries.newMatchNoDocsQuery("No field \"" + field + "\" exists in mappings."); } Query filter = fieldType.existsQuery(context); return new ConstantScoreQuery(filter); } private static Query newObjectFieldExistsQuery(QueryShardContext context, String objField) { BooleanQuery.Builder booleanQuery = new BooleanQuery.Builder(); Collection<String> fields = context.simpleMatchToIndexNames(objField + ".*"); for (String field : fields) { Query existsQuery = context.getMapperService().fullName(field).existsQuery(context); booleanQuery.add(existsQuery, Occur.SHOULD); } return new ConstantScoreQuery(booleanQuery.build()); } @Override protected int doHashCode() { return Objects.hash(fieldName); } @Override protected boolean doEquals(ExistsQueryBuilder other) { return Objects.equals(fieldName, other.fieldName); } @Override public String getWriteableName() { return NAME; } }
package com.itrustcambodia.push.task; import java.io.IOException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import javapns.communication.exceptions.CommunicationException; import javapns.communication.exceptions.KeystoreException; import javapns.notification.Payload; import javapns.notification.PushNotificationPayload; import org.quartz.DisallowConcurrentExecution; import org.quartz.JobExecutionContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.dao.EmptyResultDataAccessException; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.simple.SimpleJdbcInsert; import com.google.android.gcm.server.Message; import com.google.android.gcm.server.MulticastResult; import com.google.android.gcm.server.Result; import com.google.android.gcm.server.Sender; import com.google.gson.Gson; import com.google.gson.JsonSyntaxException; import com.itrustcambodia.pluggable.core.AbstractWebApplication; import com.itrustcambodia.pluggable.database.EntityRowMapper; import com.itrustcambodia.pluggable.quartz.Job; import com.itrustcambodia.pluggable.quartz.Scheduled; import com.itrustcambodia.pluggable.utilities.TableUtilities; import com.itrustcambodia.push.entity.Application; import com.itrustcambodia.push.entity.City; import com.itrustcambodia.push.entity.Country; import com.itrustcambodia.push.entity.Device; import com.itrustcambodia.push.entity.History; import com.itrustcambodia.push.entity.Manufacture; import com.itrustcambodia.push.entity.Model; import com.itrustcambodia.push.entity.Platform; import com.itrustcambodia.push.entity.Queue; import com.itrustcambodia.push.entity.QueueDevice; import com.itrustcambodia.push.entity.Version; @DisallowConcurrentExecution @Scheduled(cron = "0/2 * * * * ?", description = "Push Job") public class Push extends Job { private static final Logger LOGGER = LoggerFactory.getLogger(Push.class); @Override public void process(AbstractWebApplication application, JobExecutionContext context) { JdbcTemplate jdbcTemplate = application.getJdbcTemplate(); Gson gson = application.getGson(); Queue queue = null; try { queue = jdbcTemplate.queryForObject("select * from " + TableUtilities.getTableName(Queue.class) + " where " + Queue.QUEUE_DATE + " <= now() " + " order by " + Queue.QUEUE_DATE + " asc limit 1", new EntityRowMapper<Queue>(Queue.class)); } catch (EmptyResultDataAccessException e) { } if (queue != null) { Map<Long, String> models = new HashMap<Long, String>(); for (Model model : jdbcTemplate.query("select * from " + TableUtilities.getTableName(Model.class), new EntityRowMapper<Model>(Model.class))) { models.put(model.getId(), model.getName()); } Map<Long, String> manufactures = new HashMap<Long, String>(); for (Manufacture manufacture : jdbcTemplate.query("select * from " + TableUtilities.getTableName(Manufacture.class), new EntityRowMapper<Manufacture>(Manufacture.class))) { manufactures.put(manufacture.getId(), manufacture.getName()); } Map<Long, String> versions = new HashMap<Long, String>(); for (Version version : jdbcTemplate.query("select * from " + TableUtilities.getTableName(Version.class), new EntityRowMapper<Version>(Version.class))) { versions.put(version.getId(), version.getName()); } Map<Long, String> platforms = new HashMap<Long, String>(); for (Platform platform : jdbcTemplate.query("select * from " + TableUtilities.getTableName(Platform.class), new EntityRowMapper<Platform>(Platform.class))) { platforms.put(platform.getId(), platform.getName()); } Map<Long, String> cities = new HashMap<Long, String>(); for (City city : jdbcTemplate.query("select * from " + TableUtilities.getTableName(City.class), new EntityRowMapper<City>(City.class))) { cities.put(city.getId(), city.getName()); } Map<Long, String> countries = new HashMap<Long, String>(); for (Country country : jdbcTemplate.query("select * from " + TableUtilities.getTableName(Country.class), new EntityRowMapper<Country>(Country.class))) { countries.put(country.getId(), country.getName()); } LOGGER.info("broadcast message : queue id '{}' message '{}'", queue.getId(), queue.getMessage()); SimpleJdbcInsert insert = new SimpleJdbcInsert(jdbcTemplate); insert.withTableName(TableUtilities.getTableName(History.class)); while (true) { try { Thread.sleep(3000); } catch (InterruptedException e1) { } List<Device> devices = jdbcTemplate.query("select device.* from " + TableUtilities.getTableName(Device.class) + " device inner join " + TableUtilities.getTableName(QueueDevice.class) + " push_queue_device on device." + Device.ID + " = push_queue_device." + QueueDevice.DEVICE_ID + " where push_queue_device." + QueueDevice.QUEUE_ID + " = ? and device." + Device.FLAG + " = ? limit 1000", new EntityRowMapper<Device>(Device.class), queue.getId(), Device.Flag.ACTIVE); if (devices == null || devices.isEmpty()) { jdbcTemplate.update("delete from " + TableUtilities.getTableName(Queue.class) + " where " + Queue.ID + " = ?", queue.getId()); break; } Map<Long, List<Device>> androids = new HashMap<Long, List<Device>>(); Map<Long, List<Device>> ioss = new HashMap<Long, List<Device>>(); List<Device> cancels = new ArrayList<Device>(); for (Device device : devices) { if (platforms.get(device.getPlatformId()).equals(Device.Platform.ANDROID)) { if (androids.get(device.getApplicationId()) == null) { androids.put(device.getApplicationId(), new ArrayList<Device>()); } androids.get(device.getApplicationId()).add(device); } else if (platforms.get(device.getPlatformId()).equals(Device.Platform.IOS)) { if (ioss.get(device.getApplicationId()) == null) { ioss.put(device.getApplicationId(), new ArrayList<Device>()); } ioss.get(device.getApplicationId()).add(device); } else { cancels.add(device); } } if (!cancels.isEmpty()) { for (Device device : cancels) { Map<String, Object> fields = new HashMap<String, Object>(); fields.put(History.IP, device.getIp()); fields.put(History.MANUFACTURE, manufactures.get(device.getManufactureId())); fields.put(History.MESSAGE, queue.getMessage()); fields.put(History.MODEL, models.get(device.getModelId())); fields.put(History.PLATFORM, platforms.get(device.getId())); fields.put(History.QUEUE_DATE, queue.getQueueDate()); fields.put(History.SENT_DATE, new Date()); fields.put(History.TOKEN, device.getToken()); fields.put(History.CITY, cities.get(device.getCityId())); fields.put(History.COUNTRY, countries.get(device.getCountryId())); fields.put(History.VERSION, versions.get(device.getVersionId())); fields.put(History.STATUS, History.Status.CANCEL); fields.put(History.USER_ID, queue.getUserId()); fields.put(History.APPLICATION, jdbcTemplate.queryForObject("select " + Application.NAME + " from " + TableUtilities.getTableName(Application.class) + " where " + Application.ID + " = ?", String.class, device.getApplicationId())); insert.execute(fields); jdbcTemplate.update("delete from " + TableUtilities.getTableName(QueueDevice.class) + " where " + QueueDevice.DEVICE_ID + " = ? and " + QueueDevice.QUEUE_ID + " = ?", device.getId(), queue.getId()); } } if (!ioss.isEmpty()) { for (Entry<Long, List<Device>> entry : ioss.entrySet()) { Long applicationId = entry.getKey(); String iOSPushCertificate = jdbcTemplate.queryForObject("select " + Application.IOS_PUSH_CERTIFICATE + " from " + TableUtilities.getTableName(Application.class) + " where " + Application.ID + " = ?", String.class, applicationId); String iOSPushCertificatePassword = jdbcTemplate.queryForObject("select " + Application.IOS_PUSH_CERTIFICATE_PASSWORD + " from " + TableUtilities.getTableName(Application.class) + " where " + Application.ID + " = ?", String.class, applicationId); List<String> tokens = new ArrayList<String>(entry.getValue().size()); for (Device device : entry.getValue()) { tokens.add(device.getToken()); } if (!tokens.isEmpty()) { try { Payload payload = PushNotificationPayload.fromJSON(queue.getMessage()); javapns.Push.payload(payload, gson.fromJson(iOSPushCertificate, byte[].class), iOSPushCertificatePassword, true, tokens); for (Device device : entry.getValue()) { Map<String, Object> fields = new HashMap<String, Object>(); fields.put(History.IP, device.getIp()); fields.put(History.MANUFACTURE, manufactures.get(device.getManufactureId())); fields.put(History.MESSAGE, queue.getMessage()); fields.put(History.MODEL, models.get(device.getModelId())); fields.put(History.PLATFORM, platforms.get(device.getPlatformId())); fields.put(History.QUEUE_DATE, queue.getQueueDate()); fields.put(History.SENT_DATE, new Date()); fields.put(History.CITY, cities.get(device.getCityId())); fields.put(History.COUNTRY, countries.get(device.getCountryId())); fields.put(History.TOKEN, device.getToken()); fields.put(History.VERSION, versions.get(device.getVersionId())); fields.put(History.USER_ID, queue.getUserId()); fields.put(History.APPLICATION, jdbcTemplate.queryForObject("select " + Application.NAME + " from " + TableUtilities.getTableName(Application.class) + " where " + Application.ID + " = ?", String.class, device.getApplicationId())); fields.put(History.STATUS, History.Status.SUCCESS); LOGGER.info("success {}", device.getToken()); insert.execute(fields); jdbcTemplate.update("delete from " + TableUtilities.getTableName(QueueDevice.class) + " where " + QueueDevice.DEVICE_ID + " = ? and " + QueueDevice.QUEUE_ID + " = ?", device.getId(), queue.getId()); } } catch (JsonSyntaxException e) { for (Device device : entry.getValue()) { Map<String, Object> fields = new HashMap<String, Object>(); fields.put(History.IP, device.getIp()); fields.put(History.MANUFACTURE, manufactures.get(device.getManufactureId())); fields.put(History.MESSAGE, queue.getMessage()); fields.put(History.MODEL, models.get(device.getModelId())); fields.put(History.PLATFORM, platforms.get(device.getPlatformId())); fields.put(History.QUEUE_DATE, queue.getQueueDate()); fields.put(History.SENT_DATE, new Date()); fields.put(History.CITY, cities.get(device.getCityId())); fields.put(History.COUNTRY, countries.get(device.getCountryId())); fields.put(History.TOKEN, device.getToken()); fields.put(History.VERSION, versions.get(device.getVersionId())); fields.put(History.USER_ID, queue.getUserId()); fields.put(History.APPLICATION, jdbcTemplate.queryForObject("select " + Application.NAME + " from " + TableUtilities.getTableName(Application.class) + " where " + Application.ID + " = ?", String.class, device.getApplicationId())); fields.put(History.STATUS, History.Status.ERROR); LOGGER.info("failed {}", device.getToken()); insert.execute(fields); jdbcTemplate.update("delete from " + TableUtilities.getTableName(QueueDevice.class) + " where " + QueueDevice.DEVICE_ID + " = ? and " + QueueDevice.QUEUE_ID + " = ?", device.getId(), queue.getId()); } LOGGER.info("message format problem {}", e.getMessage()); } catch (CommunicationException e) { LOGGER.info("communication link problem {}", e.getMessage()); } catch (KeystoreException e) { LOGGER.info("certificate invalid problem {}", e.getMessage()); for (Device device : entry.getValue()) { Map<String, Object> fields = new HashMap<String, Object>(); fields.put(History.IP, device.getIp()); fields.put(History.MANUFACTURE, manufactures.get(device.getManufactureId())); fields.put(History.MESSAGE, queue.getMessage()); fields.put(History.MODEL, models.get(device.getModelId())); fields.put(History.PLATFORM, platforms.get(device.getPlatformId())); fields.put(History.QUEUE_DATE, queue.getQueueDate()); fields.put(History.SENT_DATE, new Date()); fields.put(History.CITY, cities.get(device.getCityId())); fields.put(History.COUNTRY, countries.get(device.getCountryId())); fields.put(History.TOKEN, device.getToken()); fields.put(History.VERSION, versions.get(device.getVersionId())); fields.put(History.USER_ID, queue.getUserId()); fields.put(History.APPLICATION, jdbcTemplate.queryForObject("select " + Application.NAME + " from " + TableUtilities.getTableName(Application.class) + " where " + Application.ID + " = ?", String.class, device.getApplicationId())); fields.put(History.STATUS, History.Status.ERROR); LOGGER.info("failed {}", device.getToken()); insert.execute(fields); jdbcTemplate.update("delete from " + TableUtilities.getTableName(QueueDevice.class) + " where " + QueueDevice.DEVICE_ID + " = ? and " + QueueDevice.QUEUE_ID + " = ?", device.getId(), queue.getId()); } } } } } if (!androids.isEmpty()) { Message message = gson.fromJson(queue.getMessage(), Message.class); for (Entry<Long, List<Device>> entry : androids.entrySet()) { Long applicationId = entry.getKey(); String key = jdbcTemplate.queryForObject("select " + Application.ANDROID_API_KEY + " from " + TableUtilities.getTableName(Application.class) + " where " + Application.ID + " = ?", String.class, applicationId); List<String> tokens = new ArrayList<String>(entry.getValue().size()); for (Device device : entry.getValue()) { tokens.add(device.getToken()); } Sender sender = new Sender(key); MulticastResult results = null; try { results = sender.send(message, tokens, 3); } catch (IOException e) { LOGGER.info("push error {}", e.getMessage()); } if (results != null && results.getResults() != null && !results.getResults().isEmpty()) { for (int i = 0; i < results.getResults().size(); i++) { Result result = results.getResults().get(i); Device device = entry.getValue().get(i); Map<String, Object> fields = new HashMap<String, Object>(); fields.put(History.IP, device.getIp()); fields.put(History.MANUFACTURE, manufactures.get(device.getManufactureId())); fields.put(History.MESSAGE, queue.getMessage()); fields.put(History.MODEL, models.get(device.getModelId())); fields.put(History.PLATFORM, platforms.get(device.getPlatformId())); fields.put(History.QUEUE_DATE, queue.getQueueDate()); fields.put(History.SENT_DATE, new Date()); fields.put(History.CITY, cities.get(device.getCityId())); fields.put(History.COUNTRY, countries.get(device.getCountryId())); fields.put(History.TOKEN, device.getToken()); fields.put(History.VERSION, versions.get(device.getVersionId())); fields.put(History.USER_ID, queue.getUserId()); fields.put(History.APPLICATION, jdbcTemplate.queryForObject("select " + Application.NAME + " from " + TableUtilities.getTableName(Application.class) + " where " + Application.ID + " = ?", String.class, device.getApplicationId())); if (result.getMessageId() == null) { fields.put(History.STATUS, History.Status.ERROR); jdbcTemplate.update("update " + TableUtilities.getTableName(Device.class) + " set " + Device.FLAG + " = ? where " + Device.ID + " = ?", Device.Flag.DELETE, device.getId()); LOGGER.info("failed {}", device.getToken()); } else { fields.put(History.STATUS, History.Status.SUCCESS); LOGGER.info("success {}", device.getToken()); } insert.execute(fields); jdbcTemplate.update("delete from " + TableUtilities.getTableName(QueueDevice.class) + " where " + QueueDevice.DEVICE_ID + " = ? and " + QueueDevice.QUEUE_ID + " = ?", device.getId(), queue.getId()); } } } } } } } }
/* * Copyright (C) 2007-2008 Esmertec AG. Copyright (C) 2007-2008 The Android Open * Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package info.guardianproject.otr.app.im.service; import info.guardianproject.otr.app.im.IChatSessionManager; import info.guardianproject.otr.app.im.IConnectionListener; import info.guardianproject.otr.app.im.IContactListManager; import info.guardianproject.otr.app.im.IInvitationListener; import info.guardianproject.otr.app.im.app.ImApp; import info.guardianproject.otr.app.im.engine.ChatGroupManager; import info.guardianproject.otr.app.im.engine.ConnectionListener; import info.guardianproject.otr.app.im.engine.Contact; import info.guardianproject.otr.app.im.engine.ContactListManager; import info.guardianproject.otr.app.im.engine.ImConnection; import info.guardianproject.otr.app.im.engine.ImErrorInfo; import info.guardianproject.otr.app.im.engine.ImException; import info.guardianproject.otr.app.im.engine.Invitation; import info.guardianproject.otr.app.im.engine.InvitationListener; import info.guardianproject.otr.app.im.engine.Presence; import info.guardianproject.otr.app.im.provider.Imps; import info.guardianproject.util.Debug; import java.util.HashMap; import java.util.Map; import android.content.ContentResolver; import android.content.ContentUris; import android.content.ContentValues; import android.database.Cursor; import android.net.Uri; import android.os.RemoteCallbackList; import android.os.RemoteException; import android.util.Log; public class ImConnectionAdapter extends info.guardianproject.otr.app.im.IImConnection.Stub { private static final String[] SESSION_COOKIE_PROJECTION = { Imps.SessionCookies.NAME, Imps.SessionCookies.VALUE, }; private static final int COLUMN_SESSION_COOKIE_NAME = 0; private static final int COLUMN_SESSION_COOKIE_VALUE = 1; ImConnection mConnection; private ConnectionListenerAdapter mConnectionListener; private InvitationListenerAdapter mInvitationListener; final RemoteCallbackList<IConnectionListener> mRemoteConnListeners = new RemoteCallbackList<IConnectionListener>(); ChatSessionManagerAdapter mChatSessionManager; ContactListManagerAdapter mContactListManager; ChatGroupManager mGroupManager; RemoteImService mService; long mProviderId = -1; long mAccountId = -1; boolean mAutoLoadContacts; int mConnectionState = ImConnection.DISCONNECTED; public ImConnectionAdapter(long providerId, long accountId, ImConnection connection, RemoteImService service) { mProviderId = providerId; mAccountId = accountId; mConnection = connection; mService = service; mConnectionListener = new ConnectionListenerAdapter(); mConnection.addConnectionListener(mConnectionListener); if ((connection.getCapability() & ImConnection.CAPABILITY_GROUP_CHAT) != 0) { mGroupManager = mConnection.getChatGroupManager(); mInvitationListener = new InvitationListenerAdapter(); mGroupManager.setInvitationListener(mInvitationListener); } mChatSessionManager = new ChatSessionManagerAdapter(this); mContactListManager = new ContactListManagerAdapter(this); } public ImConnection getAdaptee() { return mConnection; } public RemoteImService getContext() { return mService; } public long getProviderId() { return mProviderId; } public long getAccountId() { return mAccountId; } public int[] getSupportedPresenceStatus() { return mConnection.getSupportedPresenceStatus(); } public void networkTypeChanged() { mConnection.networkTypeChanged(); } void reestablishSession() { mConnectionState = ImConnection.LOGGING_IN; ContentResolver cr = mService.getContentResolver(); if ((mConnection.getCapability() & ImConnection.CAPABILITY_SESSION_REESTABLISHMENT) != 0) { Map<String, String> cookie = querySessionCookie(cr); if (cookie != null) { RemoteImService.debug("re-establish session"); try { mConnection.reestablishSessionAsync(cookie); } catch (IllegalArgumentException e) { RemoteImService.debug("Invalid session cookie, probably modified by others."); clearSessionCookie(cr); } } } } private Uri getSessionCookiesUri() { Uri.Builder builder = Imps.SessionCookies.CONTENT_URI_SESSION_COOKIES_BY.buildUpon(); ContentUris.appendId(builder, mProviderId); ContentUris.appendId(builder, mAccountId); return builder.build(); } public void login(final String passwordTemp, final boolean autoLoadContacts, final boolean retry) { Debug.wrapExceptions(new Runnable() { @Override public void run() { do_login(passwordTemp, autoLoadContacts, retry); } }); } public void do_login(String passwordTemp, boolean autoLoadContacts, boolean retry) { mAutoLoadContacts = autoLoadContacts; mConnectionState = ImConnection.LOGGING_IN; mConnection.loginAsync(mAccountId, passwordTemp, mProviderId, retry); } private void loadSavedPresence () { ContentResolver cr = mService.getContentResolver(); // Imps.ProviderSettings.setPresence(cr, mProviderId, status, statusText); int presenceState = Imps.ProviderSettings.getIntValue(cr, mProviderId, Imps.ProviderSettings.PRESENCE_STATE); String presenceStatusMessage = Imps.ProviderSettings.getStringValue(cr, mProviderId, Imps.ProviderSettings.PRESENCE_STATUS_MESSAGE); if (presenceState != -1) { Presence presence = new Presence(); presence.setStatus(presenceState); presence.setStatusText(presenceStatusMessage); try { mConnection.updateUserPresenceAsync(presence); } catch (ImException e) { Log.e(ImApp.LOG_TAG,"unable able to update presence",e); } } } @Override public void sendHeartbeat() throws RemoteException { mConnection.sendHeartbeat(mService.getHeartbeatInterval()); } @Override public void setProxy(String type, String host, int port) throws RemoteException { mConnection.setProxy(type, host, port); } private HashMap<String, String> querySessionCookie(ContentResolver cr) { Cursor c = cr.query(getSessionCookiesUri(), SESSION_COOKIE_PROJECTION, null, null, null); if (c == null) { return null; } HashMap<String, String> cookie = null; if (c.getCount() > 0) { cookie = new HashMap<String, String>(); while (c.moveToNext()) { cookie.put(c.getString(COLUMN_SESSION_COOKIE_NAME), c.getString(COLUMN_SESSION_COOKIE_VALUE)); } } c.close(); return cookie; } public void logout() { mConnectionState = ImConnection.LOGGING_OUT; mConnection.logout(); } public synchronized void cancelLogin() { if (mConnectionState >= ImConnection.LOGGED_IN) { // too late return; } mConnectionState = ImConnection.LOGGING_OUT; mConnection.logout(); } void suspend() { mConnectionState = ImConnection.SUSPENDING; mConnection.suspend(); } public void registerConnectionListener(IConnectionListener listener) { if (listener != null) { mRemoteConnListeners.register(listener); } } public void unregisterConnectionListener(IConnectionListener listener) { if (listener != null) { mRemoteConnListeners.unregister(listener); } } public void setInvitationListener(IInvitationListener listener) { if (mInvitationListener != null) { mInvitationListener.mRemoteListener = listener; } } public IChatSessionManager getChatSessionManager() { return mChatSessionManager; } public IContactListManager getContactListManager() { return mContactListManager; } public int getChatSessionCount() { if (mChatSessionManager == null) { return 0; } return mChatSessionManager.getChatSessionCount(); } public Contact getLoginUser() { return mConnection.getLoginUser(); } public Presence getUserPresence() { return mConnection.getUserPresence(); } public int updateUserPresence(Presence newPresence) { try { mConnection.updateUserPresenceAsync(newPresence); } catch (ImException e) { return e.getImError().getCode(); } return ImErrorInfo.NO_ERROR; } public int getState() { return mConnectionState; } public void rejectInvitation(long id) { handleInvitation(id, false); } public void acceptInvitation(long id) { handleInvitation(id, true); } private void handleInvitation(long id, boolean accept) { if (mGroupManager == null) { return; } ContentResolver cr = mService.getContentResolver(); Cursor c = cr.query(ContentUris.withAppendedId(Imps.Invitation.CONTENT_URI, id), null, null, null, null); if (c == null) { return; } if (c.moveToFirst()) { String inviteId = c.getString(c.getColumnIndexOrThrow(Imps.Invitation.INVITE_ID)); int status; if (accept) { mGroupManager.acceptInvitationAsync(inviteId); status = Imps.Invitation.STATUS_ACCEPTED; } else { mGroupManager.rejectInvitationAsync(inviteId); status = Imps.Invitation.STATUS_REJECTED; } // TODO c.updateInt(c.getColumnIndexOrThrow(Imps.Invitation.STATUS), status); // c.commitUpdates(); } c.close(); } void saveSessionCookie(ContentResolver cr) { Map<String, String> cookies = mConnection.getSessionContext(); int i = 0; ContentValues[] valuesList = new ContentValues[cookies.size()]; for (Map.Entry<String, String> entry : cookies.entrySet()) { ContentValues values = new ContentValues(2); values.put(Imps.SessionCookies.NAME, entry.getKey()); values.put(Imps.SessionCookies.VALUE, entry.getValue()); valuesList[i++] = values; } cr.bulkInsert(getSessionCookiesUri(), valuesList); } void clearSessionCookie(ContentResolver cr) { cr.delete(getSessionCookiesUri(), null, null); } void updateAccountStatusInDb() { Presence p = getUserPresence(); int presenceStatus = Imps.Presence.OFFLINE; int connectionStatus = convertConnStateForDb(mConnectionState); if (p != null) { presenceStatus = ContactListManagerAdapter.convertPresenceStatus(p); } ContentResolver cr = mService.getContentResolver(); Uri uri = Imps.AccountStatus.CONTENT_URI; ContentValues values = new ContentValues(); values.put(Imps.AccountStatus.ACCOUNT, mAccountId); values.put(Imps.AccountStatus.PRESENCE_STATUS, presenceStatus); values.put(Imps.AccountStatus.CONNECTION_STATUS, connectionStatus); cr.insert(uri, values); } private static int convertConnStateForDb(int state) { switch (state) { case ImConnection.DISCONNECTED: case ImConnection.LOGGING_OUT: return Imps.ConnectionStatus.OFFLINE; case ImConnection.LOGGING_IN: return Imps.ConnectionStatus.CONNECTING; case ImConnection.LOGGED_IN: return Imps.ConnectionStatus.ONLINE; case ImConnection.SUSPENDED: case ImConnection.SUSPENDING: return Imps.ConnectionStatus.SUSPENDED; default: return Imps.ConnectionStatus.OFFLINE; } } final class ConnectionListenerAdapter implements ConnectionListener { public void onStateChanged(final int state, final ImErrorInfo error) { synchronized (this) { if (state == ImConnection.LOGGED_IN && mConnectionState == ImConnection.LOGGING_OUT) { // A bit tricky here. The engine did login successfully // but the notification comes a bit late; user has already // issued a cancelLogin() and that cannot be undone. Here // we have to ignore the LOGGED_IN event and wait for // the upcoming DISCONNECTED. return; } if (state != ImConnection.DISCONNECTED) { mConnectionState = state; } } ContentResolver cr = mService.getContentResolver(); if (state == ImConnection.LOGGED_IN) { if ((mConnection.getCapability() & ImConnection.CAPABILITY_SESSION_REESTABLISHMENT) != 0) { saveSessionCookie(cr); } if (mAutoLoadContacts && mContactListManager.getState() != ContactListManager.LISTS_LOADED) { mContactListManager.loadContactLists(); } for (ChatSessionAdapter session : mChatSessionManager.mActiveChatSessionAdapters .values()) { session.sendPostponedMessages(); } // mService.getStatusBarNotifier().notifyLoggedIn(mProviderId, mAccountId); loadSavedPresence(); } else if (state == ImConnection.DISCONNECTED) { clearSessionCookie(cr); // mContactListManager might still be null if we fail // immediately in loginAsync (say, an invalid host URL) if (mContactListManager != null) { mContactListManager.clearOnLogout(); } mConnectionState = state; } else if (state == ImConnection.SUSPENDED && error != null) { // re-establish failed, schedule to retry mService.scheduleReconnect(5000); } updateAccountStatusInDb(); final int N = mRemoteConnListeners.beginBroadcast(); for (int i = 0; i < N; i++) { IConnectionListener listener = mRemoteConnListeners.getBroadcastItem(i); try { listener.onStateChanged(ImConnectionAdapter.this, state, error); } catch (RemoteException e) { // The RemoteCallbackList will take care of removing the // dead listeners. } } mRemoteConnListeners.finishBroadcast(); if (state == ImConnection.DISCONNECTED) { // NOTE: if this logic is changed, the logic in ImApp.MyConnListener must be changed to match mService.removeConnection(ImConnectionAdapter.this); } } public void onUserPresenceUpdated() { updateAccountStatusInDb(); final int N = mRemoteConnListeners.beginBroadcast(); for (int i = 0; i < N; i++) { IConnectionListener listener = mRemoteConnListeners.getBroadcastItem(i); try { listener.onUserPresenceUpdated(ImConnectionAdapter.this); } catch (RemoteException e) { // The RemoteCallbackList will take care of removing the // dead listeners. } } mRemoteConnListeners.finishBroadcast(); } public void onUpdatePresenceError(final ImErrorInfo error) { final int N = mRemoteConnListeners.beginBroadcast(); for (int i = 0; i < N; i++) { IConnectionListener listener = mRemoteConnListeners.getBroadcastItem(i); try { listener.onUpdatePresenceError(ImConnectionAdapter.this, error); } catch (RemoteException e) { // The RemoteCallbackList will take care of removing the // dead listeners. } } mRemoteConnListeners.finishBroadcast(); } } final class InvitationListenerAdapter implements InvitationListener { IInvitationListener mRemoteListener; public void onGroupInvitation(Invitation invitation) { String sender = invitation.getSender().getUser(); ContentValues values = new ContentValues(7); values.put(Imps.Invitation.PROVIDER, mProviderId); values.put(Imps.Invitation.ACCOUNT, mAccountId); values.put(Imps.Invitation.INVITE_ID, invitation.getInviteID()); values.put(Imps.Invitation.SENDER, sender); values.put(Imps.Invitation.GROUP_NAME, invitation.getGroupAddress().getUser()); values.put(Imps.Invitation.NOTE, invitation.getReason()); values.put(Imps.Invitation.STATUS, Imps.Invitation.STATUS_PENDING); ContentResolver resolver = mService.getContentResolver(); Uri uri = resolver.insert(Imps.Invitation.CONTENT_URI, values); long id = ContentUris.parseId(uri); try { if (mRemoteListener != null) { mRemoteListener.onGroupInvitation(id); return; } } catch (RemoteException e) { RemoteImService.debug("onGroupInvitation: dead listener " + mRemoteListener + "; removing", e); mRemoteListener = null; } // No listener registered or failed to notify the listener, send a // notification instead. mService.getStatusBarNotifier().notifyGroupInvitation(mProviderId, mAccountId, id, sender); } } }
/* * Copyright (C) 2015 Zhang Rui <bbcallen@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package tv.danmaku.ijk.media.player; import android.annotation.TargetApi; import android.content.Context; import android.net.Uri; import android.os.Build; import android.view.Surface; import android.view.SurfaceHolder; import java.io.FileDescriptor; import java.io.IOException; import java.util.Map; import tv.danmaku.ijk.media.player.misc.IMediaDataSource; import tv.danmaku.ijk.media.player.misc.ITrackInfo; public class MediaPlayerProxy implements IMediaPlayer { protected final IMediaPlayer mBackEndMediaPlayer; public MediaPlayerProxy(IMediaPlayer backEndMediaPlayer) { mBackEndMediaPlayer = backEndMediaPlayer; } public IMediaPlayer getInternalMediaPlayer() { return mBackEndMediaPlayer; } @Override public void setDisplay(SurfaceHolder sh) { mBackEndMediaPlayer.setDisplay(sh); } @TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH) @Override public void setSurface(Surface surface) { mBackEndMediaPlayer.setSurface(surface); } @Override public void setDataSource(Context context, Uri uri) throws IOException, IllegalArgumentException, SecurityException, IllegalStateException { mBackEndMediaPlayer.setDataSource(context, uri); } @TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH) @Override public void setDataSource(Context context, Uri uri, Map<String, String> headers) throws IOException, IllegalArgumentException, SecurityException, IllegalStateException { mBackEndMediaPlayer.setDataSource(context, uri, headers); } @Override public void setDataSource(FileDescriptor fd) throws IOException, IllegalArgumentException, IllegalStateException { mBackEndMediaPlayer.setDataSource(fd); } @Override public void setDataSource(String path) throws IOException, IllegalArgumentException, SecurityException, IllegalStateException { mBackEndMediaPlayer.setDataSource(path); } @Override public void setDataSource(IMediaDataSource mediaDataSource) { mBackEndMediaPlayer.setDataSource(mediaDataSource); } @Override public String getDataSource() { return mBackEndMediaPlayer.getDataSource(); } @Override public void prepareAsync() throws IllegalStateException { mBackEndMediaPlayer.prepareAsync(); } @Override public void start() throws IllegalStateException { mBackEndMediaPlayer.start(); } @Override public void stop() throws IllegalStateException { mBackEndMediaPlayer.stop(); } @Override public void pause() throws IllegalStateException { mBackEndMediaPlayer.pause(); } @Override public void setScreenOnWhilePlaying(boolean screenOn) { mBackEndMediaPlayer.setScreenOnWhilePlaying(screenOn); } @Override public int getVideoWidth() { return mBackEndMediaPlayer.getVideoWidth(); } @Override public int getVideoHeight() { return mBackEndMediaPlayer.getVideoHeight(); } @Override public boolean isPlaying() { return mBackEndMediaPlayer.isPlaying(); } @Override public void seekTo(long msec) throws IllegalStateException { mBackEndMediaPlayer.seekTo(msec); } @Override public long getCurrentPosition() { return mBackEndMediaPlayer.getCurrentPosition(); } @Override public long getDuration() { return mBackEndMediaPlayer.getDuration(); } @Override public void release() { mBackEndMediaPlayer.release(); } @Override public void reset() { mBackEndMediaPlayer.reset(); } @Override public void setVolume(float leftVolume, float rightVolume) { mBackEndMediaPlayer.setVolume(leftVolume, rightVolume); } @Override public int getAudioSessionId() { return mBackEndMediaPlayer.getAudioSessionId(); } @Override public MediaInfo getMediaInfo() { return mBackEndMediaPlayer.getMediaInfo(); } @Override public void setLogEnabled(boolean enable) { } @Override public boolean isPlayable() { return false; } @Override public void setOnPreparedListener(OnPreparedListener listener) { if (listener != null) { final OnPreparedListener finalListener = listener; mBackEndMediaPlayer.setOnPreparedListener(new OnPreparedListener() { @Override public void onPrepared(IMediaPlayer mp) { finalListener.onPrepared(MediaPlayerProxy.this); } }); } else { mBackEndMediaPlayer.setOnPreparedListener(null); } } @Override public void setOnCompletionListener(OnCompletionListener listener) { if (listener != null) { final OnCompletionListener finalListener = listener; mBackEndMediaPlayer.setOnCompletionListener(new OnCompletionListener() { @Override public void onCompletion(IMediaPlayer mp) { finalListener.onCompletion(MediaPlayerProxy.this); } }); } else { mBackEndMediaPlayer.setOnCompletionListener(null); } } @Override public void setOnBufferingUpdateListener(OnBufferingUpdateListener listener) { if (listener != null) { final OnBufferingUpdateListener finalListener = listener; mBackEndMediaPlayer.setOnBufferingUpdateListener(new OnBufferingUpdateListener() { @Override public void onBufferingUpdate(IMediaPlayer mp, int percent) { finalListener.onBufferingUpdate(MediaPlayerProxy.this, percent); } }); } else { mBackEndMediaPlayer.setOnBufferingUpdateListener(null); } } @Override public void setOnSeekCompleteListener(OnSeekCompleteListener listener) { if (listener != null) { final OnSeekCompleteListener finalListener = listener; mBackEndMediaPlayer.setOnSeekCompleteListener(new OnSeekCompleteListener() { @Override public void onSeekComplete(IMediaPlayer mp) { finalListener.onSeekComplete(MediaPlayerProxy.this); } }); } else { mBackEndMediaPlayer.setOnSeekCompleteListener(null); } } @Override public void setOnVideoSizeChangedListener(OnVideoSizeChangedListener listener) { if (listener != null) { final OnVideoSizeChangedListener finalListener = listener; mBackEndMediaPlayer.setOnVideoSizeChangedListener(new OnVideoSizeChangedListener() { @Override public void onVideoSizeChanged(IMediaPlayer mp, int width, int height, int sar_num, int sar_den) { finalListener.onVideoSizeChanged(MediaPlayerProxy.this, width, height, sar_num, sar_den); } }); } else { mBackEndMediaPlayer.setOnVideoSizeChangedListener(null); } } @Override public void setOnErrorListener(OnErrorListener listener) { if (listener != null) { final OnErrorListener finalListener = listener; mBackEndMediaPlayer.setOnErrorListener(new OnErrorListener() { @Override public boolean onError(IMediaPlayer mp, int what, int extra) { return finalListener.onError(MediaPlayerProxy.this, what, extra); } }); } else { mBackEndMediaPlayer.setOnErrorListener(null); } } @Override public void setOnInfoListener(OnInfoListener listener) { if (listener != null) { final OnInfoListener finalListener = listener; mBackEndMediaPlayer.setOnInfoListener(new OnInfoListener() { @Override public boolean onInfo(IMediaPlayer mp, int what, int extra) { return finalListener.onInfo(MediaPlayerProxy.this, what, extra); } }); } else { mBackEndMediaPlayer.setOnInfoListener(null); } } @Override public void setAudioStreamType(int streamtype) { mBackEndMediaPlayer.setAudioStreamType(streamtype); } @Override public void setKeepInBackground(boolean keepInBackground) { mBackEndMediaPlayer.setKeepInBackground(keepInBackground); } @Override public int getVideoSarNum() { return mBackEndMediaPlayer.getVideoSarNum(); } @Override public int getVideoSarDen() { return mBackEndMediaPlayer.getVideoSarDen(); } @Override public void setWakeMode(Context context, int mode) { mBackEndMediaPlayer.setWakeMode(context, mode); } @Override public ITrackInfo[] getTrackInfo() { return mBackEndMediaPlayer.getTrackInfo(); } @Override public void setLooping(boolean looping) { mBackEndMediaPlayer.setLooping(looping); } @Override public boolean isLooping() { return mBackEndMediaPlayer.isLooping(); } }
/* Author: M. Serhat Urtis */ import java.awt.image.BufferedImage; import java.io.*; import java.util.*; import javax.imageio.ImageIO; //16-Color /*Color indexes 0:Black 1:DarkRed 2:DarkGreen 3:DarkYellow 4:DarkBlue 5:DarkMagenta 6:DarkCyan 7:LightGray 8:Gray 9:Red 10:Green 11:Yellow 12:Blue 13:Magenta 14:Cyan 15:White */ public class Colors{ //8bit flag public boolean eightBit; //Color,R,G,B private int[][] colorPalette = new int[16][3]; //Constructor public Colors(String palette){ //All palettes switch(palette) { case "eightBit": { eightBit = true; } break; case "Windows": { eightBit = false; //Black colorPalette[0][0] = 0; colorPalette[0][1] = 0; colorPalette[0][2] = 0; //DarkRed colorPalette[1][0] = 128; colorPalette[1][1] = 0; colorPalette[1][2] = 0; //DarkGreen colorPalette[2][0] = 0; colorPalette[2][1] = 128; colorPalette[2][2] = 0; //DarkYellow colorPalette[3][0] = 180; colorPalette[3][1] = 180; colorPalette[3][2] = 0; //DarkBlue colorPalette[4][0] = 0; colorPalette[4][1] = 0; colorPalette[4][2] = 128; //DarkMagenta colorPalette[5][0] = 128; colorPalette[5][1] = 0; colorPalette[5][2] = 128; //DarkCyan colorPalette[6][0] = 0; colorPalette[6][1] = 180; colorPalette[6][2] = 180; //LightGray colorPalette[7][0] = 192; colorPalette[7][1] = 192; colorPalette[7][2] = 192; //Gray colorPalette[8][0] = 128; colorPalette[8][1] = 128; colorPalette[8][2] = 128; //Red colorPalette[9][0] = 255; colorPalette[9][1] = 0; colorPalette[9][2] = 0; //Green colorPalette[10][0] = 0; colorPalette[10][1] = 255; colorPalette[10][2] = 0; //Yellow colorPalette[11][0] = 255; colorPalette[11][1] = 255; colorPalette[11][2] = 0; //Blue colorPalette[12][0] = 0; colorPalette[12][1] = 0; colorPalette[12][2] = 255; //Magenta colorPalette[13][0] = 255; colorPalette[13][1] = 0; colorPalette[13][2] = 255; //Cyan colorPalette[14][0] = 0; colorPalette[14][1] = 255; colorPalette[14][2] = 255; //White colorPalette[15][0] = 255; colorPalette[15][1] = 255; colorPalette[15][2] = 255; } break; case "MAC": { eightBit = false; //Black colorPalette[0][0] = 0; colorPalette[0][1] = 0; colorPalette[0][2] = 0; //DarkRed colorPalette[1][0] = 255; colorPalette[1][1] = 102; colorPalette[1][2] = 0; //DarkGreen colorPalette[2][0] = 0; colorPalette[2][1] = 102; colorPalette[2][2] = 0; //DarkYellow colorPalette[3][0] = 255; colorPalette[3][1] = 102; colorPalette[3][2] = 0; //DarkBlue colorPalette[4][0] = 51; colorPalette[4][1] = 0; colorPalette[4][2] = 153; //DarkMagenta colorPalette[5][0] = 128; colorPalette[5][1] = 0; colorPalette[5][2] = 128; //DarkCyan colorPalette[6][0] = 0; colorPalette[6][1] = 153; colorPalette[6][2] = 255; //LightGray colorPalette[7][0] = 136; colorPalette[7][1] = 136; colorPalette[7][2] = 136; //Gray colorPalette[8][0] = 68; colorPalette[8][1] = 68; colorPalette[8][2] = 68; //Red colorPalette[9][0] = 221; colorPalette[9][1] = 0; colorPalette[9][2] = 0; //Green colorPalette[10][0] = 0; colorPalette[10][1] = 170; colorPalette[10][2] = 0; //Yellow colorPalette[11][0] = 255; colorPalette[11][1] = 255; colorPalette[11][2] = 0; //Blue colorPalette[12][0] = 0; colorPalette[12][1] = 0; colorPalette[12][2] = 204; //Magenta colorPalette[13][0] = 255; colorPalette[13][1] = 0; colorPalette[13][2] = 153; //Cyan colorPalette[14][0] = 0; colorPalette[14][1] = 153; colorPalette[14][2] = 255; //White colorPalette[15][0] = 255; colorPalette[15][1] = 255; colorPalette[15][2] = 255; } break; case "RISCOS": { eightBit = false; //Black colorPalette[0][0] = 0; colorPalette[0][1] = 0; colorPalette[0][2] = 0; //DarkRed colorPalette[1][0] = 128; colorPalette[1][1] = 0; colorPalette[1][2] = 0; //DarkGreen colorPalette[2][0] = 0; colorPalette[2][1] = 128; colorPalette[2][2] = 0; //DarkYellow colorPalette[3][0] = 160; colorPalette[3][1] = 160; colorPalette[3][2] = 0; //DarkBlue colorPalette[4][0] = 0; colorPalette[4][1] = 0; colorPalette[4][2] = 128; //DarkMagenta colorPalette[5][0] = 128; colorPalette[5][1] = 0; colorPalette[5][2] = 128; //DarkCyan colorPalette[6][0] = 0; colorPalette[6][1] = 160; colorPalette[6][2] = 160; //LightGray colorPalette[7][0] = 192; colorPalette[7][1] = 192; colorPalette[7][2] = 192; //Gray colorPalette[8][0] = 128; colorPalette[8][1] = 128; colorPalette[8][2] = 128; //Red colorPalette[9][0] = 221; colorPalette[9][1] = 0; colorPalette[9][2] = 0; //Green colorPalette[10][0] = 0; colorPalette[10][1] = 204; colorPalette[10][2] = 0; //Yellow colorPalette[11][0] = 255; colorPalette[11][1] = 187; colorPalette[11][2] = 0; //Blue colorPalette[12][0] = 0; colorPalette[12][1] = 68; colorPalette[12][2] = 153; //Magenta colorPalette[13][0] = 255; colorPalette[13][1] = 0; colorPalette[13][2] = 255; //Cyan colorPalette[14][0] = 0; colorPalette[14][1] = 187; colorPalette[14][2] = 255; //White colorPalette[15][0] = 255; colorPalette[15][1] = 255; colorPalette[15][2] = 255; } break; default: { eightBit = false; //Black colorPalette[0][0] = 0; colorPalette[0][1] = 0; colorPalette[0][2] = 0; //DarkRed colorPalette[1][0] = 128; colorPalette[1][1] = 0; colorPalette[1][2] = 0; //DarkGreen colorPalette[2][0] = 0; colorPalette[2][1] = 128; colorPalette[2][2] = 0; //DarkYellow colorPalette[3][0] = 180; colorPalette[3][1] = 180; colorPalette[3][2] = 0; //DarkBlue colorPalette[4][0] = 0; colorPalette[4][1] = 0; colorPalette[4][2] = 128; //DarkMagenta colorPalette[5][0] = 128; colorPalette[5][1] = 0; colorPalette[5][2] = 128; //DarkCyan colorPalette[6][0] = 0; colorPalette[6][1] = 128; colorPalette[6][2] = 128; //LightGray colorPalette[7][0] = 192; colorPalette[7][1] = 192; colorPalette[7][2] = 192; //Gray colorPalette[8][0] = 128; colorPalette[8][1] = 128; colorPalette[8][2] = 128; //Red colorPalette[9][0] = 255; colorPalette[9][1] = 0; colorPalette[9][2] = 0; //Green colorPalette[10][0] = 0; colorPalette[10][1] = 255; colorPalette[10][2] = 0; //Yellow colorPalette[11][0] = 255; colorPalette[11][1] = 255; colorPalette[11][2] = 0; //Blue colorPalette[12][0] = 0; colorPalette[12][1] = 0; colorPalette[12][2] = 255; //Magenta colorPalette[13][0] = 255; colorPalette[13][1] = 0; colorPalette[13][2] = 255; //Cyan colorPalette[14][0] = 0; colorPalette[14][1] = 255; colorPalette[14][2] = 255; //White colorPalette[15][0] = 255; colorPalette[15][1] = 255; colorPalette[15][2] = 255; } break; } } //Getters public int getRGBVal(String color, char RGB){ Random clRand = new Random(); int value = 0; int rgbSelector = 0; if(eightBit){ switch(color){ case "Black":{ return 0;} case "DarkRed": { switch(RGB){ case 'R': value = (16*(clRand.nextInt(14-12)+11))-1; break; case 'G': value = 0; break; case 'B': value = 0; break; default: value = 0; } } break; case "DarkGreen": { switch(RGB){ case 'R': value = 0; break; case 'G': value = (16*(clRand.nextInt(14-12)+11))-1; break; case 'B': value = 0; break; default: value = 0; } } break; case "DarkYellow": { switch(RGB){ case 'R': value = (16*(clRand.nextInt(14-12)+11))-1; break; case 'G': value = (16*(clRand.nextInt(14-12)+11))-1; break; case 'B': value = 0; break; default: value = 0; } } break; case "DarkBlue": { switch(RGB){ case 'R': value = 0; break; case 'G': value = 0; break; case 'B': value = (16*(clRand.nextInt(14-12)+11))-1; break; default: value = 0; } } break; case "DarkMagenta": { switch(RGB){ case 'R': value = (16*(clRand.nextInt(14-12)+11))-1; break; case 'G': value = 0; break; case 'B': value = (16*(clRand.nextInt(14-12)+11))-1; break; default: value = 0; } } break; case "DarkCyan": { switch(RGB){ case 'R': value = 0; break; case 'G': value = (16*(clRand.nextInt(14-12)+11))-1; break; case 'B': value = (16*(clRand.nextInt(14-12)+11))-1; break; default: value = 0; } } break; case "LightGray": { switch(RGB){ case 'R': value = 150 + (clRand.nextInt(10)); break; case 'G': value = 150 + (clRand.nextInt(10)); break; case 'B': value = 150 + (clRand.nextInt(10)); break; default: value = 0; } } break; case "Gray": { switch(RGB){ case 'R': value = 50 + (clRand.nextInt(10)); break; case 'G': value = 50 + (clRand.nextInt(10)); break; case 'B': value = 50 + (clRand.nextInt(10)); break; default: value = 0; } } break; case "Red": { switch(RGB){ case 'R': value = (16*(clRand.nextInt(16-14)+14))-1; break; case 'G': value = 0; break; case 'B': value = 0; break; default: value = 0; } } break; case "Green": { switch(RGB){ case 'R': value = 0; break; case 'G': value = (16*(clRand.nextInt(16-14)+14))-1; break; case 'B': value = 0; break; default: value = 0; } } break; case "Yellow": { switch(RGB){ case 'R': value = (16*(clRand.nextInt(16-14)+14))-1; break; case 'G': value = (16*(clRand.nextInt(16-14)+14))-1; break; case 'B': value = 0; break; default: value = 0; } } break; case "Blue": { switch(RGB){ case 'R': value = 0; break; case 'G': value = 0; break; case 'B': value = (16*(clRand.nextInt(16-14)+14))-1; break; default: value = 0; } } break; case "Magenta": { switch(RGB){ case 'R': value = (16*(clRand.nextInt(16-14)+14))-1; break; case 'G': value = 0; break; case 'B': value = (16*(clRand.nextInt(16-14)+14))-1; break; default: value = 0; } } break; case "Cyan": { switch(RGB){ case 'R': value = 0; break; case 'G': value = (16*(clRand.nextInt(16-14)+14))-1; break; case 'B': value = (16*(clRand.nextInt(16-14)+14))-1; break; default: value = 0; } } break; case "White": { switch(RGB){ case 'R': value = (16*(clRand.nextInt(16-15)+15))-1; break; case 'G': value = (16*(clRand.nextInt(16-15)+15))-1; break; case 'B': value = (16*(clRand.nextInt(16-15)+15))-1; break; } break; } default: value = 0; } } else{ switch(RGB){ case 'R': rgbSelector = 0; break; case 'G': rgbSelector = 1; break; case 'B': rgbSelector = 2; break; default : rgbSelector = 0; break; } switch(color){ case "Black": value = colorPalette[0][rgbSelector]; break; //Black case "DarkRed": value = colorPalette[1][rgbSelector]; break; //DarkRed case "DarkGreen": value = colorPalette[2][rgbSelector]; break; //DarkGreen case "DarkYellow": value = colorPalette[3][rgbSelector]; break; //DarkYellow case "DarkBlue": value = colorPalette[4][rgbSelector]; break; //DarkBlue case "DarkMagenta": value = colorPalette[5][rgbSelector]; break; //DarkMagenta case "DarkCyan": value = colorPalette[6][rgbSelector]; break; //DarkCyan case "LightGray": value = colorPalette[7][rgbSelector]; break; //LightGray case "Gray": value = colorPalette[8][rgbSelector]; break; //Gray case "Red": value = colorPalette[9][rgbSelector]; break; //Red case "Green": value = colorPalette[10][rgbSelector]; break; //Green case "Yellow": value = colorPalette[11][rgbSelector]; break; //Yellow case "Blue": value = colorPalette[12][rgbSelector]; break; //Blue case "Magenta": value = colorPalette[13][rgbSelector]; break; //Magenta case "Cyan": value = colorPalette[14][rgbSelector]; break; //Cyan case "White": value = colorPalette[15][rgbSelector]; break; //White default: value = 0; break; } } return value; } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.krazy.kcfw.modules.act.rest.diagram.services; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.activiti.engine.HistoryService; import org.activiti.engine.RepositoryService; import org.activiti.engine.RuntimeService; import org.activiti.engine.history.HistoricActivityInstance; import org.activiti.engine.impl.persistence.entity.ProcessDefinitionEntity; import org.activiti.engine.impl.pvm.PvmTransition; import org.activiti.engine.impl.pvm.process.ActivityImpl; import org.activiti.engine.runtime.ProcessInstance; import org.apache.shiro.authz.annotation.RequiresUser; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RestController; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; @RestController public class ProcessInstanceHighlightsResource { @Autowired private RuntimeService runtimeService; @Autowired private RepositoryService repositoryService; @Autowired private HistoryService historyService; protected ObjectMapper objectMapper = new ObjectMapper(); @RequiresUser @RequestMapping(value = "/act/service/process-instance/{processInstanceId}/highlights", method = RequestMethod.GET, produces = "application/json") public ObjectNode getHighlighted(@PathVariable String processInstanceId) { ObjectNode responseJSON = objectMapper.createObjectNode(); responseJSON.put("processInstanceId", processInstanceId); ArrayNode activitiesArray = objectMapper.createArrayNode(); ArrayNode flowsArray = objectMapper.createArrayNode(); try { ProcessInstance processInstance = runtimeService.createProcessInstanceQuery().processInstanceId(processInstanceId).singleResult(); ProcessDefinitionEntity processDefinition = (ProcessDefinitionEntity) repositoryService.getProcessDefinition(processInstance .getProcessDefinitionId()); responseJSON.put("processDefinitionId", processInstance.getProcessDefinitionId()); List<String> highLightedActivities = runtimeService.getActiveActivityIds(processInstanceId); List<String> highLightedFlows = getHighLightedFlows(processDefinition, processInstanceId); for (String activityId : highLightedActivities) { activitiesArray.add(activityId); } for (String flow : highLightedFlows) { flowsArray.add(flow); } } catch (Exception e) { e.printStackTrace(); } responseJSON.put("activities", activitiesArray); responseJSON.put("flows", flowsArray); return responseJSON; } /** * getHighLightedFlows * * @param processDefinition * @param processInstanceId * @return */ private List<String> getHighLightedFlows(ProcessDefinitionEntity processDefinition, String processInstanceId) { List<String> highLightedFlows = new ArrayList<String>(); List<HistoricActivityInstance> historicActivityInstances = historyService.createHistoricActivityInstanceQuery() .processInstanceId(processInstanceId) //order by startime asc is not correct. use default order is correct. //.orderByHistoricActivityInstanceStartTime().asc()/*.orderByActivityId().asc()*/ .list(); LinkedList<HistoricActivityInstance> hisActInstList = new LinkedList<HistoricActivityInstance>(); hisActInstList.addAll(historicActivityInstances); getHighlightedFlows(processDefinition.getActivities(), hisActInstList, highLightedFlows); return highLightedFlows; } /** * getHighlightedFlows * * code logic: 1. Loop all activities by id asc order; 2. Check each activity's outgoing transitions and eventBoundery outgoing transitions, if * outgoing transitions's destination.id is in other executed activityIds, add this transition to highLightedFlows List; 3. But if activity is not * a parallelGateway or inclusiveGateway, only choose the earliest flow. * * @param activityList * @param hisActInstList * @param highLightedFlows */ private void getHighlightedFlows(List<ActivityImpl> activityList, LinkedList<HistoricActivityInstance> hisActInstList, List<String> highLightedFlows) { //check out startEvents in activityList List<ActivityImpl> startEventActList = new ArrayList<ActivityImpl>(); Map<String, ActivityImpl> activityMap = new HashMap<String, ActivityImpl>(activityList.size()); for (ActivityImpl activity : activityList) { activityMap.put(activity.getId(), activity); String actType = (String) activity.getProperty("type"); if (actType != null && actType.toLowerCase().indexOf("startevent") >= 0) { startEventActList.add(activity); } } //These codes is used to avoid a bug: //ACT-1728 If the process instance was started by a callActivity, it will be not have the startEvent activity in ACT_HI_ACTINST table //Code logic: //Check the first activity if it is a startEvent, if not check out the startEvent's highlight outgoing flow. HistoricActivityInstance firstHistActInst = hisActInstList.getFirst(); String firstActType = (String) firstHistActInst.getActivityType(); if (firstActType != null && firstActType.toLowerCase().indexOf("startevent") < 0) { PvmTransition startTrans = getStartTransaction(startEventActList, firstHistActInst); if (startTrans != null) { highLightedFlows.add(startTrans.getId()); } } while (!hisActInstList.isEmpty()) { HistoricActivityInstance histActInst = hisActInstList.removeFirst(); ActivityImpl activity = activityMap.get(histActInst.getActivityId()); if (activity != null) { boolean isParallel = false; String type = histActInst.getActivityType(); if ("parallelGateway".equals(type) || "inclusiveGateway".equals(type)) { isParallel = true; } else if ("subProcess".equals(histActInst.getActivityType())) { getHighlightedFlows(activity.getActivities(), hisActInstList, highLightedFlows); } List<PvmTransition> allOutgoingTrans = new ArrayList<PvmTransition>(); allOutgoingTrans.addAll(activity.getOutgoingTransitions()); allOutgoingTrans.addAll(getBoundaryEventOutgoingTransitions(activity)); List<String> activityHighLightedFlowIds = getHighlightedFlows(allOutgoingTrans, hisActInstList, isParallel); highLightedFlows.addAll(activityHighLightedFlowIds); } } } /** * Check out the outgoing transition connected to firstActInst from startEventActList * * @param startEventActList * @param firstActInst * @return */ private PvmTransition getStartTransaction(List<ActivityImpl> startEventActList, HistoricActivityInstance firstActInst) { for (ActivityImpl startEventAct : startEventActList) { for (PvmTransition trans : startEventAct.getOutgoingTransitions()) { if (trans.getDestination().getId().equals(firstActInst.getActivityId())) { return trans; } } } return null; } /** * getBoundaryEventOutgoingTransitions * * @param activity * @return */ private List<PvmTransition> getBoundaryEventOutgoingTransitions(ActivityImpl activity) { List<PvmTransition> boundaryTrans = new ArrayList<PvmTransition>(); for (ActivityImpl subActivity : activity.getActivities()) { String type = (String) subActivity.getProperty("type"); if (type != null && type.toLowerCase().indexOf("boundary") >= 0) { boundaryTrans.addAll(subActivity.getOutgoingTransitions()); } } return boundaryTrans; } /** * find out single activity's highlighted flowIds * * @param activity * @param hisActInstList * @param isExclusive if true only return one flowId(Such as exclusiveGateway, BoundaryEvent On Task) * @return */ private List<String> getHighlightedFlows(List<PvmTransition> pvmTransitionList, LinkedList<HistoricActivityInstance> hisActInstList, boolean isParallel) { List<String> highLightedFlowIds = new ArrayList<String>(); PvmTransition earliestTrans = null; HistoricActivityInstance earliestHisActInst = null; for (PvmTransition pvmTransition : pvmTransitionList) { String destActId = pvmTransition.getDestination().getId(); HistoricActivityInstance destHisActInst = findHisActInst(hisActInstList, destActId); if (destHisActInst != null) { if (isParallel) { highLightedFlowIds.add(pvmTransition.getId()); } else if (earliestHisActInst == null || (earliestHisActInst.getId().compareTo(destHisActInst.getId()) > 0)) { earliestTrans = pvmTransition; earliestHisActInst = destHisActInst; } } } if ((!isParallel) && earliestTrans != null) { highLightedFlowIds.add(earliestTrans.getId()); } return highLightedFlowIds; } private HistoricActivityInstance findHisActInst(LinkedList<HistoricActivityInstance> hisActInstList, String actId) { for (HistoricActivityInstance hisActInst : hisActInstList) { if (hisActInst.getActivityId().equals(actId)) { return hisActInst; } } return null; } }
package com.horowitz.mickey.ocr; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import javax.imageio.ImageIO; import com.horowitz.mickey.ImageComparator; import com.horowitz.mickey.ImageManager; import com.horowitz.mickey.Pixel; import com.horowitz.mickey.SimilarityImageComparator; import com.horowitz.mickey.common.MyImageIO; public class OCRB { private ImageComparator _comparator; private List<BufferedImage> _digits; private int _minWidth; private int _maxWidth; private int _maxHeight; public OCRB(String prefix) throws IOException { this(prefix, new SimilarityImageComparator(0.04, 2000)); } public OCRB(String prefix, ImageComparator comparator) throws IOException { _comparator = comparator; _digits = new ArrayList<BufferedImage>(10); for (int i = 0; i < 10; i++) { _digits.add(ImageIO.read(ImageManager.getImageURL(prefix + i + ".bmp"))); } _digits.add(ImageIO.read(ImageManager.getImageURL(prefix + "slash" + ".bmp"))); _minWidth = Integer.MAX_VALUE; _maxWidth = 0; _maxHeight = 0; for (BufferedImage bi : _digits) { int w = bi.getWidth(); int h = bi.getHeight(); if (w > _maxWidth) _maxWidth = w; if (w < _minWidth) _minWidth = w; if (h > _maxHeight) _maxHeight = h; } } private void writeImage(BufferedImage image, int n) { if (false) try { MyImageIO.write(image, "PNG", new File("subimage" + n + ".png")); } catch (IOException e) { e.printStackTrace(); } } public String scanImage(BufferedImage image) { BufferedImage subimage = image.getSubimage(0, 0, image.getWidth(), image.getHeight()); writeImage(subimage, 1); // subimage = cutEdges(subimage, _foreground); // writeImage(subimage, 2); BufferedImage subimage2 = subimage.getSubimage(0, 0, subimage.getWidth(), subimage.getHeight()); String result = ""; int w = _maxWidth; int wmin = _minWidth; // int h = masks.getMaxHeight(); while (subimage.getWidth() >= wmin) { // we have space to work int ww = w; if (subimage.getWidth() < w) { ww = subimage.getWidth(); } subimage2 = subimage.getSubimage(0, 0, ww, subimage.getHeight()); writeImage(subimage2, 101); List<Integer> found = new ArrayList<Integer>(); for (int i = 0; i < _digits.size(); i++) { BufferedImage bi = _digits.get(i); Pixel p = _comparator.findImage(bi, subimage2); if (p != null) { found.add(i); } if (found.size() > 1) { // not good break; } } if (found.size() == 1) { // yahoooo Integer m = found.get(0); result += ("" + (m < 10 ? m : "/")); // cut the chunk and move forward if (subimage.getWidth() - _digits.get(m).getWidth() <= 0) { // it's over break; } subimage = subimage.getSubimage(0 + _digits.get(m).getWidth(), 0, subimage.getWidth() - _digits.get(m).getWidth(), subimage.getHeight()); writeImage(subimage, 102); } else if (found.isEmpty()) { int howMuchToTheRight = 1; // or w if (subimage.getWidth() - howMuchToTheRight >= wmin) { subimage = subimage.getSubimage(0 + howMuchToTheRight, 0, subimage.getWidth() - howMuchToTheRight, subimage.getHeight()); writeImage(subimage, 103); } else { // we're done break; } } else { //SKIP FOR NOW System.err.println(found); /* // size is 2 or more -> not good!!! // skip for now // WAIT WAIT WAIT String name = found.get(0).getName(); boolean same = true; for (Mask mask : found) { if (!mask.getName().equals(name)) { same = false; break; } } if (same) { // Phew result += name; Mask m = found.get(0); if (subimage.getWidth() - m.getWidth() <= 0) { // it's over break; } subimage = subimage.getSubimage(0 + m.getWidth(), 0, subimage.getWidth() - m.getWidth(), subimage.getHeight()); writeImage(subimage, 102); } else { System.out.println("UH OH!!!"); break; }*/ } }// while return result; } /* private BufferedImage cutEdges(BufferedImage image, Color foreground) { BufferedImage subimage; // cut north boolean lineClean = true; int yStart = 0; for (int y = 0; y < image.getHeight(); y++) { for (int x = 0; x < image.getWidth(); x++) { int diff = compareTwoColors(image.getRGB(x, y), foreground.getRGB()); if (diff <= 1100) { // found one, line not clean lineClean = false; break; } } if (!lineClean) { yStart = y; // enough break; } } subimage = image.getSubimage(0, yStart, image.getWidth(), image.getHeight() - yStart); writeImage(subimage, 3); // cut south lineClean = true; yStart = subimage.getHeight() - 1; for (int y = subimage.getHeight() - 1; y >= 0; y--) { for (int x = 0; x < subimage.getWidth(); x++) { int diff = compareTwoColors(subimage.getRGB(x, y), foreground.getRGB()); if (diff <= 1100) { // found one, line not clean lineClean = false; break; } } if (!lineClean) { yStart = y; // enough break; } } subimage = subimage.getSubimage(0, 0, subimage.getWidth(), yStart + 1); writeImage(subimage, 4); // cut west boolean colClean = true; int xStart = 0; for (int xx = 0; xx < subimage.getWidth(); xx++) { for (int y = 0; y < subimage.getHeight(); y++) { int diff = compareTwoColors(subimage.getRGB(xx, y), foreground.getRGB()); if (diff <= 1100) { // found one, line not clean colClean = false; break; } } if (!colClean) { xStart = xx; if (xStart > 0) xStart--; // enough break; } } subimage = subimage.getSubimage(xStart, 0, subimage.getWidth() - xStart, subimage.getHeight()); writeImage(subimage, 5); // cut east colClean = true; xStart = subimage.getWidth() - 1; for (int xx = subimage.getWidth() - 1; xx >= 0; xx--) { for (int y = 0; y < subimage.getHeight(); y++) { int diff = compareTwoColors(subimage.getRGB(xx, y), foreground.getRGB()); if (diff <= 1100) { // found one, line not clean colClean = false; break; } } if (!colClean) { xStart = xx; if (xStart < subimage.getWidth() - 1) xStart++; // enough break; } } subimage = subimage.getSubimage(0, 0, xStart + 1, subimage.getHeight()); writeImage(subimage, 6); return subimage; } */ public static void main(String[] args) { try { OCRB ocr = new OCRB("digit"); testImage(ocr, "test_253012.bmp", "253012"); testImage(ocr, "test_415592.bmp", "415592"); testImage(ocr, "test_102088.bmp", "102088"); ocr = new OCRB("g"); testImage(ocr, "test_27194549823.bmp", "27194549823"); } catch (IOException e) { e.printStackTrace(); } } private static void testImage(OCRB ocr, String filename, String expectedText) throws IOException { BufferedImage image = ImageIO.read(ImageManager.getImageURL(filename)); String res = ocr.scanImage(image); System.out.println("testing " + filename); System.out.println(expectedText); System.out.println(res); System.out.println(expectedText.equals(res) ? "ok" : "KO"); System.out.println(); } }
package org.jwaf.agent.persistence.repository; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; import javax.ejb.LocalBean; import javax.ejb.Stateless; import javax.inject.Inject; import org.jwaf.agent.AgentState; import org.jwaf.agent.exceptions.AgentNotFound; import org.jwaf.agent.exceptions.AgentStateChangeFailed; import org.jwaf.agent.persistence.entity.AgentEntity; import org.jwaf.agent.persistence.entity.AgentEntityView; import org.jwaf.agent.persistence.entity.AgentIdentifier; import org.jwaf.common.data.mongo.annotations.MorphiaAdvancedDatastore; import org.jwaf.message.persistence.entity.ACLMessage; import org.mongodb.morphia.AdvancedDatastore; import org.mongodb.morphia.query.Query; import org.mongodb.morphia.query.UpdateOperations; import org.mongodb.morphia.query.UpdateResults; import org.slf4j.Logger; import com.mongodb.DuplicateKeyException; /** * A repository bean that contains methods for crud operations on {@link AgentEntity} * and for transactions that change agent's state. */ @Stateless @LocalBean public class AgentRepository { private static final int MAX_RETRIES = 50; @Inject @MorphiaAdvancedDatastore private AdvancedDatastore ds; @Inject private Logger log; // crud public AgentEntity findAgent(String agentName) { return find(agentName); } public AgentEntityView findView(String agentName) { AgentEntityView ret = basicQuery(agentName) .retrievedFields(true, "aid", "type", "state") .get(); if(ret == null) { throw new AgentNotFound(); } else { return ret; } } public void create(AgentEntity agent) { ds.insert(agent); } public AgentEntity remove(String agentName) { return ds.findAndDelete(basicQuery(agentName)); } // activation public boolean activateSingleThreaded(String agentName) { return activateSingleThreaded(agentName, null); } public boolean activateSingleThreaded(String agentName, ACLMessage message) { // try at most MAX_RETRIES times for(int i=0; i<MAX_RETRIES; i++) { // retrieve previous state and hasNewMessages flag AgentEntity agent = basicQuery(agentName) .retrievedFields(true, "state", "hasNewMessages") .get(); assertAgentExists(agent, agentName); String prevState = agent.getState(); boolean alreadyHasMessages = agent.hasNewMessages(); boolean newMessageAvailable = message != null; boolean activated = false; Query<AgentEntity> query = basicQuery(agentName) .field("state").equal(prevState); UpdateOperations<AgentEntity> updates = ds.createUpdateOperations(AgentEntity.class); if(newMessageAvailable) { updates.add("messages", message); } // if prevState was PASSIVE if(AgentState.PASSIVE.equals(prevState)) { if(alreadyHasMessages || newMessageAvailable) { if(!newMessageAvailable) { // if no new messages available, ensure that we already have messages waiting query.field("hasNewMessages").equal(true); } // activate agent updates.set("state", AgentState.ACTIVE) .set("activeInstances", 1); activated = true; } else { // no messages waiting, no reason to activate return false; } } // if agent is about to be activated if(activated) { updates.set("hasNewMessages", false); } else if(newMessageAvailable) { updates.set("hasNewMessages", true); } UpdateResults res = ds.update(query, updates); // if updated return, else retry if(res.getUpdatedCount() > 0) { return activated; } else { log.warn("Activation of agent <{}> failed due to state change, try count {}, will retry at most {} times." + " Expected: {}" ,agentName, i+1, MAX_RETRIES, query); } } // if unsuccessful after MAX_RETRIES times log.error("Activation of agent <{}> failed due to state change after {} retries, aborting.", agentName); throw new AgentStateChangeFailed("Unable to activate agent."); } public boolean passivateSingleThreaded(String agentName) { // try at most MAX_RETRIES times for(int i=0; i<MAX_RETRIES; i++) { // retrieve hasNewMessages flag AgentEntity agent = basicQuery(agentName) .retrievedFields(true, "hasNewMessages") .get(); assertAgentExists(agent, agentName); boolean hasNewMessages = agent.hasNewMessages(); Query<AgentEntity> query = basicQuery(agentName) .field("hasNewMessages").equal(hasNewMessages); UpdateOperations<AgentEntity> updates = ds.createUpdateOperations(AgentEntity.class); boolean passivated; // if agent has new messages if(hasNewMessages) { // set flag to false and don't passivate updates.set("hasNewMessages", false); passivated = false; } else { // else do passivate updates.set("state", AgentState.PASSIVE) .set("activeInstances", 0); passivated = true; } UpdateResults res = ds.update(query, updates); // if updated return, else retry if(res.getUpdatedCount() > 0) { return passivated; } else { log.warn("Passivation of agent <{}> failed due to state change, try count {}, will retry at most {} times." + " Expected: {}" ,agentName, i+1, MAX_RETRIES, query); } } // if unsuccessful after MAX_RETRIES times log.error("Passivation of agent <{}> failed due to state change after {} retries, aborting.", agentName); throw new AgentStateChangeFailed("Unable to passivate agent."); } public Integer activateMultiThreadedInstance(String agentName) { UpdateOperations<AgentEntity> updates = ds.createUpdateOperations(AgentEntity.class) .inc("activeInstances") .set("state", AgentState.ACTIVE); AgentEntity agent = ds.findAndModify(basicQuery(agentName), updates, false); return agent.getActiveInstances(); } public Integer deactivateMultiThreadedInstance(String agentName) { UpdateOperations<AgentEntity> updates = ds.createUpdateOperations(AgentEntity.class) .dec("activeInstances"); AgentEntity agent = ds.findAndModify(basicQuery(agentName), updates, false); passivateMultiThreaded(agentName); return agent.getActiveInstances(); } private void passivateMultiThreaded(String agentName) { Query<AgentEntity> query = basicQuery(agentName) .field("activeInstances").equal(0); UpdateOperations<AgentEntity> updates = ds.createUpdateOperations(AgentEntity.class) .set("state", AgentState.PASSIVE); ds.update(query, updates); } public void forcePassivate(String agentName) { UpdateOperations<AgentEntity> updates = ds.createUpdateOperations(AgentEntity.class) .set("state", AgentState.PASSIVE) .set("activeInstances", 0); ds.update(basicQuery(agentName), updates); } // messages public void putToInbox(String agentName, ACLMessage message) { UpdateOperations<AgentEntity> updates = ds.createUpdateOperations(AgentEntity.class) .add("messages", message) .set("hasNewMessages", true); ds.update(basicQuery(agentName), updates); } public void putBackToInbox(String agentName, ACLMessage message) { // bind message to agent UpdateOperations<AgentEntity> agentUpdates = ds.createUpdateOperations(AgentEntity.class) .add("messages", message); ds.update(basicQuery(agentName), agentUpdates); saveMessageBackToDB(message); } private void saveMessageBackToDB(ACLMessage message) { // try at most MAX_RETRIES times for(int i=0; i<MAX_RETRIES; i++) { // increase unreadCount or insert if(ds.exists(message) != null) { Query<ACLMessage> query = ds.find(ACLMessage.class, "_id", message.getId()); UpdateOperations<ACLMessage> updates = ds.createUpdateOperations(ACLMessage.class) .inc("unreadCount"); UpdateResults res = ds.update(query, updates); // if updated return, else retry if(res.getUpdatedCount() > 0) { return; } } else { try { message.setUnreadCount(1); ds.insert(message); return; } catch(DuplicateKeyException e) {/*retry*/} } } // if unsuccessful after MAX_RETRIES times log.error("Putting a message back to inbox failed after {} attempts.", MAX_RETRIES); throw new AgentStateChangeFailed("Unable to put a message back to inbox."); } public List<ACLMessage> retrieveFromInbox(String agentName) { UpdateOperations<AgentEntity> updates = ds.createUpdateOperations(AgentEntity.class) .set("messages", Collections.<ACLMessage>emptyList()) .set("hasNewMessages", false); // clear messages, set hasNewMessages to false and return old AgentEntity version AgentEntity agent = ds.findAndModify(basicQuery(agentName), updates, true); assertAgentExists(agent, agentName); return new ArrayList<ACLMessage>(agent.getMessages()); } public void removeFromInbox(String agentName, List<ACLMessage> messages) { if(messages.isEmpty()) return; UpdateOperations<AgentEntity> updates = ds.createUpdateOperations(AgentEntity.class) .removeAll("messages", messages) .set("hasNewMessages", false); ds.update(basicQuery(agentName), updates); } public List<String> getMessageIDs(String agentName) { Query<AgentEntity> query = basicQuery(agentName).retrievedFields(true, "messages"); UpdateOperations<AgentEntity> updates = ds.createUpdateOperations(AgentEntity.class) .set("hasNewMessages", false); return ds.findAndModify(query, updates).getMessages() .stream() .map(m->m.getId()) .collect(Collectors.toList()); } public boolean hasNewMessages(String agentName) { return basicQuery(agentName) .retrievedFields(true, "hasNewMessages") .get() .hasNewMessages(); } public void ignoreNewMessages(String agentName) { UpdateOperations<AgentEntity> updates = ds.createUpdateOperations(AgentEntity.class) .set("hasNewMessages", false); ds.update(basicQuery(agentName), updates); } // data query public Integer getActiveInstances(String agentName) { return basicQuery(agentName) .retrievedFields(true, "activeInstances") .get() .getActiveInstances(); } public boolean containsAgent(AgentIdentifier aid) { return containsAgent(aid.getName()); } public boolean containsAgent(String agentName) { return basicQuery(agentName).countAll() > 0; } // transport public AgentEntity depart(String agentName) { UpdateOperations<AgentEntity> updates = ds.createUpdateOperations(AgentEntity.class) .set("state", AgentState.IN_TRANSIT); return ds.findAndModify(basicQuery(agentName), updates, true); } public List<ACLMessage> completeDeparture(String agentName) { AgentEntity agent = ds.findAndDelete(basicQuery(agentName)); return new ArrayList<ACLMessage>(agent.getMessages()); } private AgentEntity find(String agentName) { return basicQuery(agentName).get(); } private Query<AgentEntity> basicQuery(String agentName) { return ds.find(AgentEntity.class, "aid", new AgentIdentifier(agentName)); } private void assertAgentExists(AgentEntity agent, String agentName) { if(agent == null) { throw new AgentNotFound("Agent not found: <"+agentName+">"); } } }
/** * $RCSfile: ,v $ * $Revision: $ * $Date: $ * * Copyright (C) 2004-2011 Jive Software. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.java.sipmack.softphone.gui; import java.util.ArrayList; import java.util.List; import net.java.sipmack.common.AlertManager; import net.java.sipmack.common.Log; import net.java.sipmack.sip.Call; import net.java.sipmack.sip.Interlocutor; import net.java.sipmack.sip.InterlocutorUI; import net.java.sipmack.softphone.listeners.InterlocutorListener; import net.java.sipmack.events.UserActionListener; /** * The <code>GuiManager</code> class that Manage all the actions and Events of * User Interface. * * @author Thiago Rocha Camargo (thiago@jivesoftware.com) * @version 1.0, 20/07/2006 */ public class GuiManager implements GuiCallback, DefaultGuiManager { private List<InterlocutorUI> interlocutors = new ArrayList<InterlocutorUI>(); private AlertManager alertManager = new AlertManager(); public List<UserActionListener> actionHandlers = new ArrayList<UserActionListener>(); public List<InterlocutorListener> interlocutorListeners = new ArrayList<InterlocutorListener>(); private boolean autoAnswer = false; /** * Constructor of the class. Instantiate DTMFSounds and create the GUI */ public GuiManager() { } /** * Loads the config form SIPConfig class */ public void loadConfig() { } /** * Sets the actionListener * * @param ual UserActionListener that will handle actions */ public void addUserActionListener(UserActionListener ual) { actionHandlers.add(ual); } /** * Adds an InterlocutorListener * * @param interlocutorListener */ public void addInterlocutorListener(InterlocutorListener interlocutorListener) { interlocutorListeners.add(interlocutorListener); } /** * Removes an InterlocutorListener * * @param interlocutorListener */ public void removeInterlocutorListener(InterlocutorListener interlocutorListener) { interlocutorListeners.remove(interlocutorListener); } /** * Add a new interlocutor * * @param interlocutors InterlocutorUI to be added. */ public synchronized void addInterlocutor(InterlocutorUI interlocutors) { interlocutors.setCallback(this); this.interlocutors.add(interlocutors); for (InterlocutorListener interlocutorListener : interlocutorListeners) { interlocutorListener.interlocutorAdded(interlocutors); } } /** * Update the interlocutor * * @param interlocutorUI To be updated */ public void update(InterlocutorUI interlocutorUI) { } /** * Returns the current interlocutors * * @return List<InterlocutorUI> */ public List<InterlocutorUI> getInterlocutors() { return interlocutors; } /** * Counts the current interlocutors number */ public int countInterlocutors() { return interlocutors.size(); } /** * Remove an interlocutor * * @param interlocutorUI To be removed */ public synchronized void remove(InterlocutorUI interlocutorUI) { interlocutors.remove(interlocutorUI); for (InterlocutorListener interlocutorListener : interlocutorListeners) interlocutorListener.interlocutorRemoved(interlocutorUI); } /** * Start to play a wav. * * @param alertResourceName The wav to be played */ public void startAlert(String alertResourceName) { try { alertManager.startAlert(alertResourceName); } catch (Throwable ex) { // OK, no one cares really } } /** * Stop to play a wav. * * @param alertResourceName The wav to be stop */ public void stopAlert(String alertResourceName) { try { alertManager.stopAlert(alertResourceName); } catch (Throwable ex) { // OK, no one cares really } } /** * Stop all waves. */ public void stopAllAlerts() { try { alertManager.stopAllAlerts(); } catch (Throwable ex) { // OK, no one cares really } } /** * Answer the current ringing call */ public boolean answer() { if (interlocutors.size() < 1) { Log.debug("answer", "No interlocutors"); return false; } boolean found = false; for (InterlocutorUI interlocutor : interlocutors) { Interlocutor inter = (Interlocutor) interlocutor; if (!inter.getCall().isIncoming() || !inter.getCall().getState().equals(Call.ALERTING)) continue; found = true; for (UserActionListener ual : actionHandlers) { ual.handleAnswerRequest(inter); } } Log.debug("answer", "Answered"); return found; } /** * Hold all current calls. In fact it holds all medias depending of the * server. */ public void holdAll() { if (interlocutors.size() < 1) { Log.debug("hold", "No interlocutors"); return; } for (InterlocutorUI interlocutor : interlocutors) { boolean mic = interlocutor.onHoldMic(), cam = interlocutor.onHoldCam(); for (UserActionListener ual : actionHandlers) { ual.handleHold(interlocutor, !mic, cam); } } } /** * Hold current call of associated interlocutor. In fact it holds all medias depending of the * server. * * @param interlocutor interlocutor that will be holded */ public void hold(InterlocutorUI interlocutor) { boolean mic = interlocutor.onHoldMic(), cam = interlocutor.onHoldCam(); for (UserActionListener ual : actionHandlers) { ual.handleHold(interlocutor, !mic, cam); } } /** * Mute all current calls. */ public void muteAll(boolean mic) { if (interlocutors.size() < 1) { Log.debug("mute", "No interlocutors"); return; } for (InterlocutorUI interlocutor : interlocutors) { for (UserActionListener ual : actionHandlers) { ual.handleMute(interlocutor, mic); } } } /** * Mute the current call associated with the informed interlocutor. * * @param interlocutor * @param mic */ public void mute(InterlocutorUI interlocutor, boolean mic) { for (UserActionListener ual : actionHandlers) { ual.handleMute(interlocutor, mic); } } /** * Send a DTMF Tone to all current calls * * @param digit DTMF digit to be sent */ public void sendDTMF(String digit) { if (interlocutors.size() < 1) { Log.debug("sendDTMF", "No interlocutors"); return; } int selectedRow = 0; Interlocutor inter = (Interlocutor) interlocutors.get(selectedRow); for (UserActionListener ual : actionHandlers) { ual.handleDTMF(inter, digit); } } /** * Dial a number * * @param callee Number to be called */ public void dial(String callee) { for (UserActionListener ual : actionHandlers) { ual.handleDialRequest(callee); } } /** * Hangup the current call */ public boolean hangupAll() { if (interlocutors.size() < 1) { Log.debug("hangup", "No interlocutors"); return false; } Interlocutor inter; for (int i = 0; i < interlocutors.size(); i++) { inter = (Interlocutor) interlocutors.get(i); for (UserActionListener ual : actionHandlers) { ual.handleHangupRequest(inter); } } return true; } /** * Hangup the call associated with the informed InterlocutorUI * * @param interlocutorUI * @return */ public boolean hangup(InterlocutorUI interlocutorUI) { boolean result = true; for (UserActionListener ual : actionHandlers) { result = ual.handleHangupRequest((Interlocutor) interlocutorUI) ? result ? true : false : false; } return result; } /** * Set the autoAnswer option * * @param value The value to be set */ public void setAutoAnswer(boolean value) { autoAnswer = value; } /** * Get the autoAnswer option * * @return The value */ public boolean getAutoAnswer() { return autoAnswer; } }
package org.daisy.pipeline.client.models; import java.io.File; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import org.daisy.pipeline.client.Pipeline2Exception; import org.daisy.pipeline.client.Pipeline2Logger; import org.daisy.pipeline.client.filestorage.JobStorage; import org.daisy.pipeline.client.models.Argument; import org.daisy.pipeline.client.utils.XML; import org.daisy.pipeline.client.utils.XPath; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; /** An argument of type "string" */ public class Argument { /** The name of the option. This isn't necessarily unique; since inputs and options can have the same name. */ private String name; /** This is the value from the px:role="name" in the script documentation. */ private String nicename; /** A description of the option. */ private String desc; /** whether or not this option is required */ private Boolean required; /** whether or not multiple selections can be made */ private Boolean sequence; /** MIME types accepted (only relevant if type=anyDirURI or anyFileURI) */ private List<String> mediaTypes; /** Options with a output value of "result" or "temp" will only be included when the framework is running in local mode. */ private Output output; public enum Output { result, temp }; /** Type of underlying option. Either "input", "option" or "output". ("parameters" currently not supported) */ private Kind kind; public enum Kind { input, /*parameters,*/ option, output }; /** whether or not the ordering matters (only relevant if sequence==true) */ private Boolean ordered; /** XSD type or custom data type */ private String type; /** The default value for this argument */ private String defaultValue; private Node argumentNode; private boolean lazyLoaded = false; private List<String> values = null; /** Create option instance from option node. * * @param argumentNode the XML * @throws Pipeline2Exception thrown if an error occurs */ public Argument(Node argumentNode) throws Pipeline2Exception { this.argumentNode = argumentNode; } private void lazyLoad() { if (!lazyLoaded && argumentNode != null) { try { this.name = parseTypeString(XPath.selectText("@name", argumentNode, XPath.dp2ns)); this.nicename = parseTypeString(XPath.selectText("@nicename", argumentNode, XPath.dp2ns)); if (this.nicename == null || "".equals(this.nicename)) this.nicename = this.name; this.desc = XPath.selectText("@desc", argumentNode, XPath.dp2ns); if (this.desc == null) this.desc = ""; this.required = parseTypeBoolean(XPath.selectText("@required", argumentNode, XPath.dp2ns)); if (this.required == null) this.required = true; this.sequence = parseTypeBoolean(XPath.selectText("@sequence", argumentNode, XPath.dp2ns)); if (this.sequence == null) this.sequence = false; this.mediaTypes = parseTypeMediaTypes(XPath.selectText("@mediaType", argumentNode, XPath.dp2ns)); try { this.output = Output.valueOf(parseTypeString(XPath.selectText("@outputType", argumentNode, XPath.dp2ns))); } catch (IllegalArgumentException e) { this.kind = null; } catch (NullPointerException e) { this.kind = null; } try { this.kind = Kind.valueOf(argumentNode.getLocalName()); // TODO "parameters": how to determine that a port is a parameter port? } catch (IllegalArgumentException e) { this.kind = null; } catch (NullPointerException e) { this.kind = null; } this.ordered = parseTypeBoolean(XPath.selectText("@ordered", argumentNode, XPath.dp2ns)); if (this.ordered == null) this.ordered = true; this.type = parseTypeString(XPath.selectText("@type", argumentNode, XPath.dp2ns)); if (this.type == null) this.type = "string"; if (this.kind == Kind.input || this.kind == Kind.output) { this.type = "anyFileURI"; if (this.sequence && parseTypeBoolean(XPath.selectText("@required", argumentNode, XPath.dp2ns)) == null) this.required = false; if (this.mediaTypes.size() == 0) this.mediaTypes.add("application/xml"); } if (this.kind == Kind.output && this.output == null) { this.output = Output.result; } this.defaultValue = XPath.selectText("@default", argumentNode, XPath.dp2ns); List<Node> valueNodes = XPath.selectNodes("d:item", argumentNode, XPath.dp2ns); if (valueNodes.isEmpty()) { String value = XPath.selectText("text()", argumentNode, XPath.dp2ns); if (value != null && !"".equals(value)) { this.values = new ArrayList<String>(); this.values.add(normalizeValue(value)); } } else { this.values = new ArrayList<String>(); for (Node valueNode : valueNodes) { String value = XPath.selectText("@value", valueNode, XPath.dp2ns); this.values.add(normalizeValue(value)); } } } catch (Pipeline2Exception e) { Pipeline2Logger.logger().error("Failed to parse argument node", e); } lazyLoaded = true; } } private String normalizeValue(String value) { if (type == null || value == null) { return null; } switch (this.type) { case "anyFileURI": case "anyDirURI": case "anyURI": try { URI uri = new URI(value); uri = uri.normalize(); value = uri.toString(); break; } catch (URISyntaxException e) { Pipeline2Logger.logger().warn("Unable to parse URI", e); } break; case "boolean": value = value.toLowerCase(); } return value; } /** Helper function for the Script(Document) constructor */ private static String parseTypeString(String string) { if (!(string instanceof String)) return null; string = string.replaceAll("\"", "'").replaceAll("\\n", " "); if ("".equals(string)) return null; else return string; } /** Helper function for the Script(Document) constructor */ private static Boolean parseTypeBoolean(String bool) { if (!(bool instanceof String)) return null; if ("false".equals(bool)) return false; if ("true".equals(bool)) return true; return null; } /** Helper function for the Script(Document) constructor */ private static List<String> parseTypeMediaTypes(String mediaTypesString) { if (!(mediaTypesString instanceof String)) return new ArrayList<String>(); mediaTypesString = parseTypeString(mediaTypesString); String[] mediaTypes = (mediaTypesString==null?"":mediaTypesString).split(" "); List<String> mediaTypesList = new ArrayList<String>(); for (String mediaType : mediaTypes) { if ("".equals(mediaType)) continue; if ("text/xml".equals(mediaType)) mediaTypesList.add("application/xml"); else mediaTypesList.add(mediaType); } return mediaTypesList; } /** * Returns the number of values defined for the option or input. * * @return number of values */ public int size() { lazyLoad(); if (values == null) { return 0; } else { return values.size(); } } /** * Unset the given option or input. * * This is different from clearing the option in that it will no longer be defined. * * An option that is cleared but not unset is submitted as an empty list of * values to the Web API. An option that is unset are not submitted to the Web API, * which leaves the Web API or the Pipeline 2 script free to use a default value. */ public void unset() { lazyLoad(); if (values != null) { values.clear(); } values = null; } /** * Unset the given option or input. * * This is different from clearing the option in that it will no longer be defined. * * An option that is cleared but not unset is submitted as an empty list of * values to the Web API. An option that is unset are not submitted to the Web API, * which leaves the Web API or the Pipeline 2 script free to use a default value. * * @return True if the argument is defined/set. False otherwise. */ public boolean isDefined() { lazyLoad(); return values != null; } /** * Clear the given option or input. * * This is different from unsetting the option in that it will still be defined. * * An option that is cleared but not unset is submitted as an empty list of * values to the Web API. An option that is unset are not submitted to the Web API, * which leaves the Web API or the Pipeline 2 script free to use a default value. */ public void clear() { lazyLoad(); if (values == null) { values = new ArrayList<String>(); } else { values.clear(); } } /** Replace the value at the given position with the provided Integer value. * * @param position The position * @param value the value to use */ public void set(int position, Integer value) { if (value == null) { clear(); } else { set(position, value+""); } } /** Replace the value at the given position with the provided Long value. * * @param position The position * @param value the value to use */ public void set(int position, Long value) { if (value == null) { clear(); } else { set(position, value+""); } } /** Replace the value at the given position with the provided Double value. * * @param position The position * @param value the value to use */ public void set(int position, Double value) { if (value == null) { clear(); } else { set(position, value+""); } } /** Replace the value at the given position with the provided Boolean value. * * @param position The position * @param value the value to use */ public void set(int position, Boolean value) { if (value == null) { clear(); } else { set(position, value+""); } } /** Replace the value at the given position with the provided File value. * * @param position The position * @param file the file to use * @param context the job context */ public void set(int position, File file, JobStorage context) { if (file == null) { clear(); } else { context.addContextFile(file, file.getName()); set(position, context.getContextFilePath(file)); } } /** Replace the value at the given position with the provided String value. * * @param position The position * @param value the value to use */ public void set(int position, String value) { if (value == null) { clear(); } else { lazyLoad(); if (values != null && values.size() > position) { values.set(position, value); } } } /** Replace the value with the provided Integer value. * * @param value the value to use */ public void set(Integer value) { if (value == null) { clear(); } else { set(value+""); } } /** Replace the value with the provided Long value. * * @param value the value to use */ public void set(Long value) { if (value == null) { clear(); } else { set(value+""); } } /** Replace the value with the provided Double value. * * @param value the value to use */ public void set(Double value) { if (value == null) { clear(); } else { set(value+""); } } /** Replace the value with the provided Boolean value. * * @param value the value to use */ public void set(Boolean value) { if (value == null) { clear(); } else { set(value+""); } } /** Replace the value with the provided File value. * * @param file the file to use * @param context the job context */ public void set(File file, JobStorage context) { if (file == null) { clear(); } else if (getOutput() != null) { set(file.toURI().toString()); } else { context.addContextFile(file, file.getName()); set(context.getContextFilePath(file)); } } /** Replace the value with the provided String value. * * @param value the value to use */ public void set(String value) { clear(); if (value != null) { if (values == null) { values = new ArrayList<String>(); } values.add(normalizeValue(value)); } } /** Replace the values with all the provided String values. * * @param values the value to use */ public void setAll(Collection<String> values) { clear(); if (this.values == null) { this.values = new ArrayList<String>(); } for (String value : values) { this.values.add(normalizeValue(value)); } } /** Add to the list of values the provided Integer value. * * @param value the value to use */ public void add(Integer value) { if (value != null) { add(value+""); } } /** Add to the list of values the provided Long value. * * @param value the value to use */ public void add(Long value) { if (value != null) { add(value+""); } } /** Add to the list of values the provided Double value. * * @param value the value to use */ public void add(Double value) { if (value != null) { add(value+""); } } /** Add to the list of values the provided Boolean value. * * @param value the value to use */ public void add(Boolean value) { if (value != null) { add(value+""); } } /** Add to the list of values the provided File value. * * @param file the file to use * @param context the job context */ public void add(File file, JobStorage context) { if (file != null) { lazyLoad(); context.addContextFile(file, file.getName()); add(context.getContextFilePath(file)); } } /** Add to the list of values the provided String value. * * @param value the value to use */ public void add(String value) { if (value != null) { lazyLoad(); if (this.values == null) { this.values = new ArrayList<String>(); } values.add(normalizeValue(value)); } } /** Add to the list of values all the provided String values. * * @param values the values to use */ public void addAll(Collection<String> values) { if (values != null) { if (this.values == null) { this.values = new ArrayList<String>(); } for (String value : values) { values.add(normalizeValue(value)); } } } /** Remove all occurences of the provided Integer value from the list of values. * * @param value the value to use */ public void remove(Integer value) { if (value != null) { remove(value+""); } } /** Remove all occurences of the provided Long value from the list of values. * * @param value the value to use */ public void remove(Long value) { if (value != null) { remove(value+""); } } /** Remove all occurences of the provided Double value from the list of values. * * @param value the value to use */ public void remove(Double value) { if (value != null) { remove(value+""); } } /** Remove all occurences of the provided Boolean value from the list of values. * * @param value the value to use */ public void remove(Boolean value) { if (value != null) { remove(value+""); } } /** Remove all occurences of the provided File value from the list of values. * * @param file the file to use * @param context the job context */ public void remove(File file, JobStorage context) { if (file != null) { remove(context.getContextFilePath(file)); } } /** Remove all occurences of the provided String value from the list of values. * * @param value the value to use */ public void remove(String value) { if (value != null && values != null) { for (int i = values.size() - 1; i >= 0 ; i--) { if (value.equals(values.get(i))) { values.remove(i); } } } } /** Remove the first occurences of all the provided String values from the list of values. * * @param values the value to use */ public void removeAll(Collection<String> values) { if (values != null && this.values != null) { this.values.removeAll(values); } } /** Get the value as a Integer. * * Returns the first value if there are more than one. * Returns null if the value cannot be parsed as a Integer, or if the value is not set. * * @return the value as a Integer */ public Integer getAsInteger() { lazyLoad(); try { return Integer.parseInt(get()); } catch (Exception e) { return null; } } /** Get the value as a Long. * * Returns the first value if there are more than one. * Returns null if the value cannot be parsed as a Long, or if the value is not set. * * @return the value as a Long */ public Long getAsLong() { lazyLoad(); try { return Long.parseLong(get()); } catch (Exception e) { return null; } } /** Get the value as a Double. * * Returns the first value if there are more than one. * Returns null if the value cannot be parsed as a Double, or if the value is not set. * * @return the value as a Double */ public Double getAsDouble() { lazyLoad(); try { return Double.parseDouble(get()); } catch (Exception e) { return null; } } /** Get the value as a Boolean. * * Returns the first value if there are more than one. * Returns null if the value cannot be parsed as a Boolean, or if the value is not set. * * @return the value as a Boolean */ public Boolean getAsBoolean() { lazyLoad(); String value = get(); if (value != null && ("true".equals(value.toLowerCase()) || "false".equals(value.toLowerCase()))) { return Boolean.parseBoolean(get()); } else { return null; } } /** Get the value as a File. * * Returns the first value if there are more than one. * Returns null if the value cannot be parsed as a File, or if the value is not set. * * @param context the job context * @return the value as a File */ public File getAsFile(JobStorage context) { lazyLoad(); if (values == null || values.size() == 0) { return null; } else { return context.getContextFile(values.get(0)); } } /** Get the value as a String. * * Returns the first value if there are more than one. * Returns null if the value is not set. * If the option or input is a sequence, you should use {@link #getAsList() getAsList} to get all values instead. * * @return the value as a String */ public String get() { lazyLoad(); if (values == null || values.size() == 0) { return null; } else { return values.get(0); } } /** Get all the values as a List of Strings. * * @return null if the value is not set. */ public List<String> getAsList() { lazyLoad(); return values; } /** Get all the values as a List of Files. * * @param context the job context * @return null if any of the values cannot be parsed as a File, or if the value is not set. */ public List<File> getAsFileList(JobStorage context) { lazyLoad(); if (values != null) { List<File> contextFiles = new ArrayList<File>(); assert(contextFiles != null); for (String value : values) { File contextFile = context.getContextFile(value); contextFiles.add(contextFile); } return contextFiles; } else { return null; } } /** * Move a value from one position in the value list to another. * * @param from which value to move * @param to which position to move the value */ public void moveTo(int from, int to) { lazyLoad(); if (values == null) { return; } if (from < 0 || from >= values.size()) { return; } if (to < 0 || to >= values.size()) { return; } int shiftDistance = -1; if (from > to) { int rememberMe = from; from = to; to = rememberMe; shiftDistance = 1; } Collections.rotate(values.subList(from, to+1), shiftDistance); } /** Get the default value as a Integer. * * Returns null if the value cannot be parsed as a Integer, or if the value is not set. * * @return the default value as a Integer */ public Integer getDefaultValueAsInteger() { lazyLoad(); try { return Integer.parseInt(getDefaultValue()); } catch (Exception e) { return null; } } /** Get the default value as a Long. * * Returns null if the value cannot be parsed as a Long, or if the value is not set. * * @return the default value as a Long */ public Long getDefaultValueAsLong() { lazyLoad(); try { return Long.parseLong(getDefaultValue()); } catch (Exception e) { return null; } } /** Get the default value as a Double. * * Returns null if the value cannot be parsed as a Double, or if the value is not set. * * @return the default value as a Double */ public Double getDefaultValueAsDouble() { lazyLoad(); try { return Double.parseDouble(getDefaultValue()); } catch (Exception e) { return null; } } /** Get the default value as a Boolean. * * Returns null if the value cannot be parsed as a Boolean, or if the value is not set. * * @return the default value as a Boolean */ public Boolean getDefaultValueAsBoolean() { lazyLoad(); String value = getDefaultValue(); if (value != null && ("true".equals(value.toLowerCase()) || "false".equals(value.toLowerCase()))) { return Boolean.parseBoolean(getDefaultValue()); } else { return null; } } /** Get the default value as a File. * * Returns null if the value cannot be parsed as a File, or if the value is not set. * * @param context the job context * @return the default value as a File */ public File getDefaultValueAsFile(JobStorage context) { lazyLoad(); String value = getDefaultValue(); if (value == null) { return null; } else { return context.getContextFile(value); } } /** Get the default value as a String. * * Returns null if the value is not set. * * @return the default value as a String */ public String getDefaultValue() { lazyLoad(); return defaultValue; } // getters and setters to ensure lazy loading public String getName() { lazyLoad(); return name; } public String getNicename() { lazyLoad(); return nicename; } public String getDesc() { lazyLoad(); return desc; } public Boolean getRequired() { lazyLoad(); return required; } public Boolean getSequence() { lazyLoad(); return sequence; } public List<String> getMediaTypes() { lazyLoad(); return mediaTypes; } public Output getOutput() { lazyLoad(); return output; } public Kind getKind() { lazyLoad(); return this.kind; } public Boolean getOrdered() { lazyLoad(); return ordered; } public String getType() { lazyLoad(); return type; } public void setName(String name) { lazyLoad(); this.name = name; } public void setNicename(String nicename) { lazyLoad(); this.nicename = nicename; } public void setDesc(String desc) { lazyLoad(); this.desc = desc; } public void setRequired(Boolean required) { lazyLoad(); this.required = required; } public void setSequence(Boolean sequence) { lazyLoad(); this.sequence = sequence; } public void setMediaTypes(List<String> mediaTypes) { lazyLoad(); this.mediaTypes = mediaTypes; } public void setOutput(Output output) { lazyLoad(); this.output = output; } public void setKind(Kind kind) { lazyLoad(); this.kind = kind; } public void setOrdered(Boolean ordered) { lazyLoad(); this.ordered = ordered; } public void setType(String type) { lazyLoad(); this.type = type; } public Document toXml() { lazyLoad(); Document argDoc = XML.getXml("<"+kind+" xmlns=\"http://www.daisy.org/ns/pipeline/data\"/>"); Element argElem = argDoc.getDocumentElement(); if (name != null) { argElem.setAttribute("name", name); } if (nicename != null) { argElem.setAttribute("nicename", nicename); } if (desc != null) { argElem.setAttribute("desc", desc); } if (required != null) { argElem.setAttribute("required", required+""); } if (sequence != null) { argElem.setAttribute("sequence", sequence+""); } if (mediaTypes != null) { String mediaTypesJoined = ""; for (int i = 0; i < mediaTypes.size(); i++) { if (i > 0) { mediaTypesJoined += " "; } mediaTypesJoined += mediaTypes.get(i); } argElem.setAttribute("mediaType", mediaTypesJoined); } if (output != null) { argElem.setAttribute("outputType", output+""); } if (ordered != null) { argElem.setAttribute("ordered", ordered+""); } if (type != null) { argElem.setAttribute("type", type); } if (values == null) { // do nothing } else if (values.size() == 1 && values.get(0).length() > 0 && !sequence) { argElem.setTextContent(values.get(0)); } else { for (String value : values) { Element item = argDoc.createElementNS(XPath.dp2ns.get("d"), "item"); item.setAttribute("value", value); argElem.appendChild(item); } } return argDoc; } }
/* * Copyright 2006-2012 The Scriptella Project Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package scriptella.util; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.ByteArrayOutputStream; import java.io.Closeable; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Reader; import java.io.StringReader; import java.io.UnsupportedEncodingException; import java.io.Writer; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; import java.util.regex.Pattern; /** * I/O utility methods. * * @author Fyodor Kupolov * @version 1.0 */ public final class IOUtils { //Singleton private IOUtils() { } private static final int DEFAULT_BUFFER_SIZE_FOR_STRINGS = 1024; /** * Default value of maximum stream size for arrays conversion */ static final long MAX_LENGTH = 1024 * 10000; //10Mb /** * Silently closes data. * * @param closeable data to close */ public static void closeSilently(Closeable closeable) { if (closeable != null) { try { closeable.close(); } catch (Exception e) { ExceptionUtils.ignoreThrowable(e); } } } /** * Silently closes a collection of objects. * * @param closeables iterable closeables. Null value allowed. * @see #closeSilently(java.io.Closeable) */ public static void closeSilently(Iterable<? extends Closeable> closeables) { if (closeables != null) { for (Closeable closeable : closeables) { closeSilently(closeable); } } } /** * Loads a reader content into a string. * <p><em>Note:</em>The content length is limited by {@link #MAX_LENGTH} characters. * * @param reader reader to load content from. Closed at the end of the operation. * @return string representation of reader content. */ public static String toString(Reader reader) throws IOException { return toString(reader, MAX_LENGTH); } /** * Loads a reader content into a string. * * @param reader reader to load content from. Closed at the end of the operation. * @param maxLength max number of characters to read before throwing a Content Too Long Exception. * @return string representation of reader content. */ public static String toString(final Reader reader, final long maxLength) throws IOException { char cb[] = new char[4096]; StringBuilder sb = new StringBuilder(cb.length); long len = 0; try { for (int n; (n = reader.read(cb)) >= 0;) { len += n; if (len > maxLength) { throw new IOException("Content too long to fit in memory"); } sb.append(cb, 0, n); } } finally { closeSilently(reader); } return sb.toString(); } /** * Loads an input stream content into a byte array. * <p><em>Note:</em>The content length is limited by {@link #MAX_LENGTH} bytes. * * @param is stream to load. Closed at the end of the operation. * @return stream bytes * @throws IOException if I/O error occurs or stream length exceeds the {@link #MAX_LENGTH}. */ public static byte[] toByteArray(InputStream is) throws IOException { return toByteArray(is, MAX_LENGTH); } /** * Loads an input stream content into a byte array. * * @param is stream to load. Closed at the end of the operation. * @param maxLength maxLength max number of bytes to read before throwing a Content Too Long Exception. * @return stream bytes * @throws IOException */ public static byte[] toByteArray(InputStream is, long maxLength) throws IOException { byte b[] = new byte[4096]; ByteArrayOutputStream os = new ByteArrayOutputStream(b.length); long len = 0; try { for (int n; (n = is.read(b)) >= 0;) { len += n; if (len > maxLength) { throw new IOException("Content too long to fit in memory"); } os.write(b, 0, n); } } finally { closeSilently(is); } return os.toByteArray(); } /** * Opens output stream for specified URL. * <p>This method is a helper for url.openConnection().getOutputStream(). * Additionally a file: URLs are supported, * see <a href="http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4191800"> * FileURLConnection doesn't implement getOutputStream()</a> * * @param url URL to open an output stream. * @return output stream for URL. * @throws IOException if an I/O error occurs while creating the output stream. */ public static OutputStream getOutputStream(final URL url) throws IOException { if ("file".equals(url.getProtocol())) { return new FileOutputStream(url.getFile()); } else { final URLConnection con = url.openConnection(); con.setDoOutput(true); //Use a proxy to read input on close. Required for some servers. return new BufferedOutputStream(con.getOutputStream()) { @Override public void close() throws IOException { super.close(); IOUtils.closeSilently(con.getInputStream()); } }; } } /** * @return buffered reader for specified input stream. * @see #getReader(java.io.InputStream, String, boolean) */ public static Reader getReader(final InputStream is, final String enc) throws UnsupportedEncodingException { return getReader(is, enc, true); } /** * Returns reader for specified input stream and charset name. * * @param is source input stream. * @param enc charset name, null means default. * @param buffered true if buffered reader should be used. * @return reader for inputstream. * @throws UnsupportedEncodingException If the named charset is not supported */ public static Reader getReader(final InputStream is, final String enc, final boolean buffered) throws UnsupportedEncodingException { Reader r = enc == null ? new InputStreamReader(is) : new InputStreamReader(is, enc); return buffered ? new BufferedReader(r) : r; } /** * Optionally makes a buffered reader from the specified one. * <p>If specified reader is buffered the object is returned unchanged. * * @param reader reader to convert. * @return buffered reader. */ public static BufferedReader asBuffered(Reader reader) { if (reader == null) { throw new IllegalArgumentException("Reader cannot be null"); } //Performance optimization. If content is in memory - use smaller buffer if (reader instanceof StringReader) { return new BufferedReader(reader, DEFAULT_BUFFER_SIZE_FOR_STRINGS); } return (reader instanceof BufferedReader ? (BufferedReader) reader : new BufferedReader(reader)); } /** * Optionally makes a buffered writer from the specified one. * <p>If specified writer is buffered the object is returned unchanged. * * @param writer writer to convert. * @return buffered writer. */ public static BufferedWriter asBuffered(Writer writer) { if (writer == null) { throw new IllegalArgumentException("Writer cannot be null"); } return (writer instanceof BufferedWriter ? (BufferedWriter) writer : new BufferedWriter(writer)); } /** * @return buffered writer for specified output stream. * @see #getWriter(java.io.OutputStream, String, boolean) */ public static Writer getWriter(final OutputStream os, final String enc) throws IOException { return getWriter(os, enc, true); } /** * Returns writer for specified output stream and charset name. * * @param os source output stream. * @param enc charset name, null means default. * @param buffered true if buffered reader should be used. * @return reader for inputstream. * @throws UnsupportedEncodingException If the named charset is not supported */ public static Writer getWriter(final OutputStream os, final String enc, final boolean buffered) throws IOException { Writer w = enc == null ? new OutputStreamWriter(os) : new OutputStreamWriter(os, enc); return buffered ? new BufferedWriter(w) : w; } /** * A replacement for a deprecated File.toURL() method. * * @param file file to convert to URL. * @return URL representing the file location. * @throws MalformedURLException If a protocol handler for the URL could not be found, * or if some other error occurred while constructing the URL. */ public static URL toUrl(File file) throws MalformedURLException { return file.toURI().toURL(); } //Windows path matcher. Examples: //Matches : C: C:/ D:/prj/file.etl //No match: C:// D:test private static final Pattern WINDOWS_PATH = Pattern.compile("[a-zA-Z]\\:/?([^/]|$).*"); /** * Resolves specified uri to a specified base URL. * <p>This method use {@link URL#URL(URL,String)} constructor and handles additional checks * if URL cannot be resolved by a standard mechanism. * <p>Typical example is handling windows absolute paths with forward slashes. * These paths are malformed URIs, but Scriptella recognize them and converts to URL * if no protocol handler has been registered. * <p>In future we can add support for default URL stream handlers in addition to the ones * supported by the JRE. * * @param base base URL to use for resulution. * @param uri a relative or an absolute URI. * @return URL resolved relatively to the base URL. * @throws java.net.MalformedURLException if specified URI cannot be resolved. */ public static URL resolve(URL base, String uri) throws MalformedURLException { try { return new URL(base, uri); } catch (MalformedURLException e) { //if windows path, e.g. DRIVE:/, see CR #5029 if (WINDOWS_PATH.matcher(uri).matches()) { //Add file:/ prefix and create URL return new URL("file:/" + uri); } else { //otherwise rethrow throw e; } } } }
/******************************************************************************* * * Pentaho Big Data * * Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.job; import org.pentaho.di.cluster.SlaveServer; import org.pentaho.di.core.Const; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.encryption.Encr; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.Serializable; import java.lang.reflect.Array; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; public class JobEntrySerializationHelper implements Serializable { private static final long serialVersionUID = -3924431164206698711L; private static final String INDENT_STRING = " "; /** * This method will perform the work that used to be done by hand in each kettle input meta for: readData(Node node). * We handle all primitive types, complex user types, arrays, lists and any number of nested object levels, via * recursion of this method. * * @param object * The object to be persisted * @param node * The node to 'attach' our XML to */ public static void read( Object object, Node node ) { // get this classes declared fields, public, private, protected, package, everything, but not super Field[] declaredFields = getAllDeclaredFields( object.getClass() ); for ( Field field : declaredFields ) { // ignore fields which are final, static or transient if ( Modifier.isFinal( field.getModifiers() ) || Modifier.isStatic( field.getModifiers() ) || Modifier.isTransient( field.getModifiers() ) ) { continue; } // if the field is not accessible (private), we'll open it up so we can operate on it boolean accessible = field.isAccessible(); if ( !accessible ) { field.setAccessible( true ); } try { // check if we're going to try to read an array if ( field.getType().isArray() ) { try { // get the node (if available) for the field Node fieldNode = XMLHandler.getSubNode( node, field.getName() ); if ( fieldNode == null ) { // doesn't exist (this is possible if fields were empty/null when persisted) continue; } // get the Java classname for the array elements String fieldClassName = XMLHandler.getTagAttribute( fieldNode, "class" ); Class<?> clazz = null; // primitive types require special handling if ( fieldClassName.equals( "boolean" ) ) { clazz = boolean.class; } else if ( fieldClassName.equals( "int" ) ) { clazz = int.class; } else if ( fieldClassName.equals( "float" ) ) { clazz = float.class; } else if ( fieldClassName.equals( "double" ) ) { clazz = double.class; } else if ( fieldClassName.equals( "long" ) ) { clazz = long.class; } else { // normal, non primitive array class clazz = Class.forName( fieldClassName ); } // get the child nodes for the field NodeList childrenNodes = fieldNode.getChildNodes(); // create a new, appropriately sized array int arrayLength = 0; for ( int i = 0; i < childrenNodes.getLength(); i++ ) { Node child = childrenNodes.item( i ); // ignore TEXT_NODE, they'll cause us to have a larger count than reality, even if they are empty if ( child.getNodeType() != Node.TEXT_NODE ) { arrayLength++; } } // create a new instance of our array Object array = Array.newInstance( clazz, arrayLength ); // set the new array on the field (on object, passed in) field.set( object, array ); int arrayIndex = 0; for ( int i = 0; i < childrenNodes.getLength(); i++ ) { Node child = childrenNodes.item( i ); if ( child.getNodeType() == Node.TEXT_NODE ) { continue; } // roll through all of our array elements setting them as encountered if ( String.class.isAssignableFrom( clazz ) || Number.class.isAssignableFrom( clazz ) ) { Constructor<?> constructor = clazz.getConstructor( String.class ); Object instance = constructor.newInstance( XMLHandler.getTagAttribute( child, "value" ) ); Array.set( array, arrayIndex++, instance ); } else if ( Boolean.class.isAssignableFrom( clazz ) || boolean.class.isAssignableFrom( clazz ) ) { Object value = Boolean.valueOf( XMLHandler.getTagAttribute( child, "value" ) ); Array.set( array, arrayIndex++, value ); } else if ( Integer.class.isAssignableFrom( clazz ) || int.class.isAssignableFrom( clazz ) ) { Object value = Integer.valueOf( XMLHandler.getTagAttribute( child, "value" ) ); Array.set( array, arrayIndex++, value ); } else if ( Float.class.isAssignableFrom( clazz ) || float.class.isAssignableFrom( clazz ) ) { Object value = Float.valueOf( XMLHandler.getTagAttribute( child, "value" ) ); Array.set( array, arrayIndex++, value ); } else if ( Double.class.isAssignableFrom( clazz ) || double.class.isAssignableFrom( clazz ) ) { Object value = Double.valueOf( XMLHandler.getTagAttribute( child, "value" ) ); Array.set( array, arrayIndex++, value ); } else if ( Long.class.isAssignableFrom( clazz ) || long.class.isAssignableFrom( clazz ) ) { Object value = Long.valueOf( XMLHandler.getTagAttribute( child, "value" ) ); Array.set( array, arrayIndex++, value ); } else { // create an instance of 'fieldClassName' Object instance = clazz.newInstance(); // add the instance to the array Array.set( array, arrayIndex++, instance ); // read child, the same way as the parent read( instance, child ); } } } catch ( Throwable t ) { t.printStackTrace(); // TODO: log this } } else if ( Collection.class.isAssignableFrom( field.getType() ) ) { // handle collections try { // get the node (if available) for the field Node fieldNode = XMLHandler.getSubNode( node, field.getName() ); if ( fieldNode == null ) { // doesn't exist (this is possible if fields were empty/null when persisted) continue; } // get the Java classname for the array elements String fieldClassName = XMLHandler.getTagAttribute( fieldNode, "class" ); Class<?> clazz = Class.forName( fieldClassName ); // create a new, appropriately sized array, we already know it's a collection @SuppressWarnings( "unchecked" ) Collection<Object> collection = (Collection<Object>) field.getType().newInstance(); field.set( object, collection ); // iterate over all of the array elements and add them one by one as encountered NodeList childrenNodes = fieldNode.getChildNodes(); for ( int i = 0; i < childrenNodes.getLength(); i++ ) { Node child = childrenNodes.item( i ); if ( child.getNodeType() == Node.TEXT_NODE ) { continue; } // create an instance of 'fieldClassName' if ( String.class.isAssignableFrom( clazz ) || Number.class.isAssignableFrom( clazz ) || Boolean.class.isAssignableFrom( clazz ) ) { Constructor<?> constructor = clazz.getConstructor( String.class ); Object instance = constructor.newInstance( XMLHandler.getTagAttribute( child, "value" ) ); collection.add( instance ); } else { // read child, the same way as the parent Object instance = clazz.newInstance(); // add the instance to the array collection.add( instance ); read( instance, child ); } } } catch ( Throwable t ) { t.printStackTrace(); // TODO: log this } } else { // we're handling a regular field (not an array or list) try { String value = XMLHandler.getTagValue( node, field.getName() ); if ( value == null ) { continue; } if ( field.isAnnotationPresent( Password.class ) ) { value = Encr.decryptPasswordOptionallyEncrypted( value ); } // System.out.println("Setting " + field.getName() + "(" + field.getType().getSimpleName() + ") = " + value // + " on: " + object.getClass().getName()); if ( field.getType().isPrimitive() && "".equals( value ) ) { // skip setting of primitives if we see null continue; } else if ( "".equals( value ) ) { field.set( object, value ); } else if ( field.getType().isPrimitive() ) { // special primitive handling if ( double.class.isAssignableFrom( field.getType() ) ) { field.set( object, Double.parseDouble( value ) ); } else if ( float.class.isAssignableFrom( field.getType() ) ) { field.set( object, Float.parseFloat( value ) ); } else if ( long.class.isAssignableFrom( field.getType() ) ) { field.set( object, Long.parseLong( value ) ); } else if ( int.class.isAssignableFrom( field.getType() ) ) { field.set( object, Integer.parseInt( value ) ); } else if ( byte.class.isAssignableFrom( field.getType() ) ) { field.set( object, value.getBytes() ); } else if ( boolean.class.isAssignableFrom( field.getType() ) ) { field.set( object, "true".equalsIgnoreCase( value ) ); } } else if ( String.class.isAssignableFrom( field.getType() ) || Number.class.isAssignableFrom( field.getType() ) || Boolean.class.isAssignableFrom( field.getType() ) ) { Constructor<?> constructor = field.getType().getConstructor( String.class ); Object instance = constructor.newInstance( value ); field.set( object, instance ); } else { // we don't know what we're handling, but we'll give it a shot Node fieldNode = XMLHandler.getSubNode( node, field.getName() ); if ( fieldNode == null ) { // doesn't exist (this is possible if fields were empty/null when persisted) continue; } // get the Java classname for the array elements String fieldClassName = XMLHandler.getTagAttribute( fieldNode, "class" ); Class<?> clazz = Class.forName( fieldClassName ); Object instance = clazz.newInstance(); field.set( object, instance ); read( instance, fieldNode ); } } catch ( Throwable t ) { // TODO: log this t.printStackTrace(); } } } finally { if ( !accessible ) { field.setAccessible( false ); } } } } /** * This method will perform the work that used to be done by hand in each kettle input meta for: getXML(). We handle * all primitive types, complex user types, arrays, lists and any number of nested object levels, via recursion of * this method. * * @param object * @param buffer */ public static void write( Object object, int indentLevel, StringBuffer buffer ) { // don't even attempt to persist if ( object == null ) { return; } // get this classes declared fields, public, private, protected, package, everything, but not super Field[] declaredFields = getAllDeclaredFields( object.getClass() ); for ( Field field : declaredFields ) { // ignore fields which are final, static or transient if ( Modifier.isFinal( field.getModifiers() ) || Modifier.isStatic( field.getModifiers() ) || Modifier.isTransient( field.getModifiers() ) ) { continue; } // if the field is not accessible (private), we'll open it up so we can operate on it boolean accessible = field.isAccessible(); if ( !accessible ) { field.setAccessible( true ); } try { Object fieldValue = field.get( object ); // no value? null? skip it! if ( fieldValue == null || "".equals( fieldValue ) ) { continue; } if ( field.isAnnotationPresent( Password.class ) && String.class.isAssignableFrom( field.getType() ) ) { fieldValue = Encr.encryptPasswordIfNotUsingVariables( String.class.cast( fieldValue ) ); } if ( field.getType().isPrimitive() || String.class.isAssignableFrom( field.getType() ) || Number.class.isAssignableFrom( field.getType() ) || Boolean.class.isAssignableFrom( field.getType() ) ) { indent( buffer, indentLevel ); buffer.append( XMLHandler.addTagValue( field.getName(), fieldValue.toString() ) ); } else if ( field.getType().isArray() ) { // write array values int length = Array.getLength( fieldValue ); // open node (add class name attribute) indent( buffer, indentLevel ); buffer.append( "<" + field.getName() + " class=\"" + fieldValue.getClass().getComponentType().getName() + "\">" ) .append( Const.CR ); for ( int i = 0; i < length; i++ ) { Object childObject = Array.get( fieldValue, i ); // handle all strings/numbers if ( String.class.isAssignableFrom( childObject.getClass() ) || Number.class.isAssignableFrom( childObject.getClass() ) ) { indent( buffer, indentLevel + 1 ); buffer.append( "<" ).append( fieldValue.getClass().getComponentType().getSimpleName() ); buffer.append( " value=\"" + childObject.toString() + "\"/>" ).append( Const.CR ); } else if ( Boolean.class.isAssignableFrom( childObject.getClass() ) || boolean.class.isAssignableFrom( childObject.getClass() ) ) { // handle booleans (special case) indent( buffer, indentLevel + 1 ); buffer.append( "<" ).append( fieldValue.getClass().getComponentType().getSimpleName() ); buffer.append( " value=\"" + childObject.toString() + "\"/>" ).append( Const.CR ); } else { // array element is a user defined/complex type, recurse into it indent( buffer, indentLevel + 1 ); buffer.append( "<" + fieldValue.getClass().getComponentType().getSimpleName() + ">" ).append( Const.CR ); write( childObject, indentLevel + 1, buffer ); indent( buffer, indentLevel + 1 ); buffer.append( "</" + fieldValue.getClass().getComponentType().getSimpleName() + ">" ).append( Const.CR ); } } // close node buffer.append( " </" + field.getName() + ">" ).append( Const.CR ); } else if ( Collection.class.isAssignableFrom( field.getType() ) ) { // write collection values Collection<?> collection = (Collection<?>) fieldValue; if ( collection.size() == 0 ) { continue; } Class<?> listClass = collection.iterator().next().getClass(); // open node (add class name attribute) indent( buffer, indentLevel ); buffer.append( "<" + field.getName() + " class=\"" + listClass.getName() + "\">" ).append( Const.CR ); for ( Object childObject : collection ) { // handle all strings/numbers if ( String.class.isAssignableFrom( childObject.getClass() ) || Number.class.isAssignableFrom( childObject.getClass() ) ) { indent( buffer, indentLevel + 1 ); buffer.append( "<" ).append( listClass.getSimpleName() ); buffer.append( " value=\"" + childObject.toString() + "\"/>" ).append( Const.CR ); } else if ( Boolean.class.isAssignableFrom( childObject.getClass() ) || boolean.class.isAssignableFrom( childObject.getClass() ) ) { // handle booleans (special case) indent( buffer, indentLevel + 1 ); buffer.append( "<" ).append( listClass.getSimpleName() ); buffer.append( " value=\"" + childObject.toString() + "\"/>" ).append( Const.CR ); } else { // array element is a user defined/complex type, recurse into it indent( buffer, indentLevel + 1 ); buffer.append( "<" + listClass.getSimpleName() + ">" ).append( Const.CR ); write( childObject, indentLevel + 1, buffer ); indent( buffer, indentLevel + 1 ); buffer.append( "</" + listClass.getSimpleName() + ">" ).append( Const.CR ); } } // close node indent( buffer, indentLevel ); buffer.append( "</" + field.getName() + ">" ).append( Const.CR ); } else { // if we don't now what it is, let's treat it like a first class citizen and try to write it out // open node (add class name attribute) indent( buffer, indentLevel ); buffer.append( "<" + field.getName() + " class=\"" + fieldValue.getClass().getName() + "\">" ).append( Const.CR ); write( fieldValue, indentLevel + 1, buffer ); // close node indent( buffer, indentLevel ); buffer.append( "</" + field.getName() + ">" ).append( Const.CR ); } } catch ( Throwable t ) { t.printStackTrace(); // TODO: log this } finally { if ( !accessible ) { field.setAccessible( false ); } } } } /** * Get all declared fields of the provided class including any inherited class fields. * * @param aClass * Class to look up fields for * @return All declared fields for the class provided */ private static Field[] getAllDeclaredFields( Class<?> aClass ) { List<Field> fields = new ArrayList<Field>(); while ( aClass != null ) { fields.addAll( Arrays.asList( aClass.getDeclaredFields() ) ); aClass = aClass.getSuperclass(); } return fields.toArray( new Field[0] ); } /** * Handle saving of the input (object) to the kettle repository using the most simple method available, by calling * write and then saving the xml as an attribute. * * @param object * @param rep * @param id_job * @param id_jobentry * @throws KettleException */ public static void saveRep( Object object, Repository rep, ObjectId id_job, ObjectId id_jobentry ) throws KettleException { StringBuffer sb = new StringBuffer( 1024 ); sb.append( "<job-xml>" ); write( object, 0, sb ); sb.append( "</job-xml>" ); rep.saveJobEntryAttribute( id_job, id_jobentry, "job-xml", sb.toString() ); } /** * Handle reading of the input (object) from the kettle repository by getting the xml from the repository attribute * string and then re-hydrate the object with our already existing read method. * * @param object * @param rep * @param id_job * @param databases * @param slaveServers * @throws KettleException */ public static void loadRep( Object object, Repository rep, ObjectId id_job, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { String xml = rep.getJobEntryAttributeString( id_job, "job-xml" ); ByteArrayInputStream bais = new ByteArrayInputStream( xml.getBytes() ); Document doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse( bais ); read( object, doc.getDocumentElement() ); } catch ( ParserConfigurationException ex ) { throw new KettleException( ex.getMessage(), ex ); } catch ( SAXException ex ) { throw new KettleException( ex.getMessage(), ex ); } catch ( IOException ex ) { throw new KettleException( ex.getMessage(), ex ); } } private static void indent( StringBuffer sb, int indentLevel ) { for ( int i = 0; i < indentLevel; i++ ) { sb.append( INDENT_STRING ); } } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.qpid.mina; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.org.apache.mina.common.*; import org.wso2.org.apache.mina.transport.socket.nio.SocketConnector; import org.wso2.org.apache.mina.transport.socket.nio.SocketConnectorConfig; import org.wso2.org.apache.mina.transport.socket.nio.SocketSessionConfig; import java.io.IOException; import java.net.InetSocketAddress; import java.util.concurrent.CountDownLatch; import junit.framework.TestCase; public class WriterTest extends TestCase { private static final Log _logger = LogFactory.getLog(WriterTest.class); private static class RunnableWriterTest implements Runnable { private Logger _logger; private IoSession _session; private long _startTime; private long[] _chunkTimes; private int _chunkCount = 500000; private int _chunkSize = 1024; private CountDownLatch _notifier; public RunnableWriterTest(Logger logger) { _logger = logger; } public void run() { _startTime = System.currentTimeMillis(); _notifier = new CountDownLatch(1); for (int i = 0; i < _chunkCount; i++) { ByteBuffer buf = ByteBuffer.allocate(_chunkSize, false); byte check = (byte) (i % 128); buf.put(check); buf.fill((byte)88, buf.remaining()); buf.flip(); _session.write(buf); } try { _logger.info("All buffers sent; waiting for receipt from server"); _notifier.await(); } catch (InterruptedException e) { } _logger.info("Completed"); long totalTime = System.currentTimeMillis() - _startTime; _logger.info("Total time: " + totalTime); _logger.info("MB per second: " + (_chunkSize * _chunkCount)/totalTime); long lastChunkTime = _startTime; double average = 0; for (int i = 0; i < _chunkTimes.length; i++) { if (i == 0) { average = _chunkTimes[i] - _startTime; } else { long delta = _chunkTimes[i] - lastChunkTime; if (delta != 0) { average = (average + delta)/2; } } lastChunkTime = _chunkTimes[i]; } _logger.info("Average chunk time: " + average + "ms"); CloseFuture cf = _session.close(); cf.join(); } private class WriterHandler extends IoHandlerAdapter { private int _chunksReceived = 0; private int _partialBytesRead = 0; private byte _partialCheckNumber; private int _totalBytesReceived = 0; public void messageReceived(IoSession session, Object message) throws Exception { ByteBuffer result = (ByteBuffer) message; _totalBytesReceived += result.remaining(); int size = result.remaining(); long now = System.currentTimeMillis(); if (_partialBytesRead > 0) { int offset = _chunkSize - _partialBytesRead; if (size >= offset) { _chunkTimes[_chunksReceived++] = now; result.position(offset); } else { // have not read even one chunk, including the previous partial bytes _partialBytesRead += size; return; } } int chunkCount = result.remaining()/_chunkSize; for (int i = 0; i < chunkCount; i++) { _chunkTimes[_chunksReceived++] = now; byte check = result.get(); _logger.debug("Check number " + check + " read"); if (check != (byte)((_chunksReceived - 1)%128)) { _logger.error("Check number " + check + " read when expected " + (_chunksReceived%128)); } _logger.debug("Chunk times recorded"); try { result.skip(_chunkSize - 1); } catch (IllegalArgumentException e) { _logger.error("Position was: " + result.position()); _logger.error("Tried to skip to: " + (_chunkSize * i)); _logger.error("limit was; " + result.limit()); } } _logger.debug("Chunks received now " + _chunksReceived); _logger.debug("Bytes received: " + _totalBytesReceived); _partialBytesRead = result.remaining(); if (_partialBytesRead > 0) { _partialCheckNumber = result.get(); } if (_chunksReceived >= _chunkCount) { _notifier.countDown(); } } public void exceptionCaught(IoSession session, Throwable cause) throws Exception { _logger.error("Error: " + cause, cause); } } public void startWriter(int chunkSize) throws IOException, InterruptedException { _chunkSize = chunkSize; IoConnector ioConnector = null; ioConnector = new SocketConnector(); SocketConnectorConfig cfg = (SocketConnectorConfig) ioConnector.getDefaultConfig(); cfg.setThreadModel(ThreadModel.MANUAL); SocketSessionConfig scfg = (SocketSessionConfig) cfg.getSessionConfig(); scfg.setTcpNoDelay(true); scfg.setSendBufferSize(32768); scfg.setReceiveBufferSize(32768); final InetSocketAddress address = new InetSocketAddress("localhost", AcceptorTest.PORT); _logger.info("Attempting connection to " + address); ConnectFuture future = ioConnector.connect(address, new WriterHandler()); // wait for connection to complete future.join(); _logger.info("Connection completed"); // we call getSession which throws an IOException if there has been an error connecting _session = future.getSession(); _chunkTimes = new long[_chunkCount]; Thread t = new Thread(this); t.start(); t.join(); _logger.info("Test completed"); } } private RunnableWriterTest _runnableWriterTest = new RunnableWriterTest(_logger); public void test1k() throws IOException, InterruptedException { _logger.info("Starting 1k test"); _runnableWriterTest.startWriter(1024); } public void test2k() throws IOException, InterruptedException { _logger.info("Starting 2k test"); _runnableWriterTest.startWriter(2048); } public void test4k() throws IOException, InterruptedException { _logger.info("Starting 4k test"); _runnableWriterTest.startWriter(4096); } public void test8k() throws IOException, InterruptedException { _logger.info("Starting 8k test"); _runnableWriterTest.startWriter(8192); } public void test16k() throws IOException, InterruptedException { _logger.info("Starting 16k test"); _runnableWriterTest.startWriter(16384); } public void test32k() throws IOException, InterruptedException { _logger.info("Starting 32k test"); _runnableWriterTest.startWriter(32768); } public static void main(String[] args) throws IOException, InterruptedException { WriterTest w = new WriterTest(); //w.test1k(); //w.test2k(); //w.test4k(); w.test8k(); //w.test16k(); //w.test32k(); } public static junit.framework.Test suite() { return new junit.framework.TestSuite(WriterTest.class); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.test.spring.junit5; import java.util.Arrays; import java.util.HashSet; import java.util.List; import org.apache.camel.CamelContext; import org.apache.camel.spring.SpringCamelContext; import org.apache.camel.test.ExcludingPackageScanClassResolver; import org.apache.camel.test.junit5.CamelTestSupport; import org.apache.camel.util.IOHelper; import org.apache.camel.util.ObjectHelper; import org.junit.jupiter.api.AfterEach; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.support.RootBeanDefinition; import org.springframework.context.ApplicationContext; import org.springframework.context.support.AbstractApplicationContext; import org.springframework.context.support.GenericApplicationContext; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.fail; /** * Base test-class for classic Spring application such as standalone, web applications. * Do <tt>not</tt> use this class for Spring Boot testing, instead use <code>@CamelSpringBootTest</code>. */ public abstract class CamelSpringTestSupport extends CamelTestSupport { protected static ThreadLocal<AbstractApplicationContext> threadAppContext = new ThreadLocal<>(); protected static Object lock = new Object(); private static final Logger LOG = LoggerFactory.getLogger(CamelSpringTestSupport.class); protected AbstractApplicationContext applicationContext; protected abstract AbstractApplicationContext createApplicationContext(); @Override public void postProcessTest() throws Exception { if (isCreateCamelContextPerClass()) { applicationContext = threadAppContext.get(); } super.postProcessTest(); } @Override public void doPreSetup() throws Exception { if (!"true".equalsIgnoreCase(System.getProperty("skipStartingCamelContext"))) { // tell camel-spring it should not trigger starting CamelContext, since we do that later // after we are finished setting up the unit test synchronized (lock) { SpringCamelContext.setNoStart(true); if (isCreateCamelContextPerClass()) { applicationContext = threadAppContext.get(); if (applicationContext == null) { applicationContext = doCreateApplicationContext(); threadAppContext.set(applicationContext); } } else { applicationContext = doCreateApplicationContext(); } SpringCamelContext.setNoStart(false); } } else { LOG.info("Skipping starting CamelContext as system property skipStartingCamelContext is set to be true."); } } private AbstractApplicationContext doCreateApplicationContext() { AbstractApplicationContext context = createApplicationContext(); assertNotNull(context, "Should have created a valid Spring application context"); String[] profiles = activeProfiles(); if (profiles != null && profiles.length > 0) { // the context must not be active if (context.isActive()) { throw new IllegalStateException("Cannot active profiles: " + Arrays.asList(profiles) + " on active Spring application context: " + context + ". The code in your createApplicationContext() method should be adjusted to create the application context with refresh = false as parameter"); } LOG.info("Spring activating profiles: {}", Arrays.asList(profiles)); context.getEnvironment().setActiveProfiles(profiles); } // ensure the context has been refreshed at least once if (!context.isActive()) { context.refresh(); } return context; } @Override @AfterEach public void tearDown() throws Exception { super.tearDown(); if (!isCreateCamelContextPerClass()) { IOHelper.close(applicationContext); applicationContext = null; } } @Override public void doPostTearDown() throws Exception { super.doPostTearDown(); if (threadAppContext.get() != null) { IOHelper.close(threadAppContext.get()); threadAppContext.remove(); } } /** * Create a parent context that initializes a * {@link org.apache.camel.spi.PackageScanClassResolver} to exclude a set of given classes from * being resolved. Typically this is used at test time to exclude certain routes, * which might otherwise be just noisy, from being discovered and initialized. * <p/> * To use this filtering mechanism it is necessary to provide the * {@link org.springframework.context.ApplicationContext} returned from here as the parent context to * your test context e.g. * * <pre> * protected AbstractXmlApplicationContext createApplicationContext() { * return new ClassPathXmlApplicationContext(new String[] {&quot;test-context.xml&quot;}, getRouteExcludingApplicationContext()); * } * </pre> * * This will, in turn, call the template methods <code>excludedRoutes</code> * and <code>excludedRoute</code> to determine the classes to be excluded from scanning. * * @return ApplicationContext a parent {@link org.springframework.context.ApplicationContext} configured * to exclude certain classes from package scanning */ protected ApplicationContext getRouteExcludingApplicationContext() { GenericApplicationContext routeExcludingContext = new GenericApplicationContext(); routeExcludingContext.registerBeanDefinition("excludingResolver", new RootBeanDefinition(ExcludingPackageScanClassResolver.class)); routeExcludingContext.refresh(); ExcludingPackageScanClassResolver excludingResolver = routeExcludingContext.getBean("excludingResolver", ExcludingPackageScanClassResolver.class); List<Class<?>> excluded = Arrays.asList(excludeRoutes()); excludingResolver.setExcludedClasses(new HashSet<>(excluded)); return routeExcludingContext; } /** * Template method used to exclude {@link org.apache.camel.Route} from the test time context * route scanning * * @return Class[] the classes to be excluded from test time context route scanning */ protected Class<?>[] excludeRoutes() { Class<?> excludedRoute = excludeRoute(); return excludedRoute != null ? new Class[] {excludedRoute} : new Class[0]; } /** * Template method used to exclude a {@link org.apache.camel.Route} from the test camel context */ protected Class<?> excludeRoute() { return null; } /** * Looks up the mandatory spring bean of the given name and type, failing if * it is not present or the correct type */ public <T> T getMandatoryBean(Class<T> type, String name) { Object value = applicationContext.getBean(name); assertNotNull(value, "No spring bean found for name <" + name + ">"); if (type.isInstance(value)) { return type.cast(value); } else { fail("Spring bean <" + name + "> is not an instanceof " + type.getName() + " but is of type " + ObjectHelper.className(value)); return null; } } /** * Which active profiles should be used. * <p/> * <b>Important:</b> When using active profiles, then the code in {@link #createApplicationContext()} should create * the Spring {@link org.springframework.context.support.AbstractApplicationContext} without refreshing. For example creating an * {@link org.springframework.context.support.ClassPathXmlApplicationContext} you would need to pass in * <tt>false</tt> in the refresh parameter, in the constructor. * Camel will thrown an {@link IllegalStateException} if this is not correct stating this problem. * The reason is that we cannot active profiles <b>after</b> a Spring application context has already * been refreshed, and is active. * * @return an array of active profiles to use, use <tt>null</tt> to not use any active profiles. */ protected String[] activeProfiles() { return null; } @Override protected CamelContext createCamelContext() throws Exception { // don't start the springCamelContext if we return SpringCamelContext.springCamelContext(applicationContext, false); } }
package com.zac4j.yoda.data.model; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; import java.util.HashMap; import java.util.Map; @JsonInclude(JsonInclude.Include.NON_NULL) @JsonPropertyOrder({ "created_at", "id", "mid", "idstr", "text", "textLength", "source_allowclick", "source_type", "source", "favorited", "truncated", "in_reply_to_status_id", "in_reply_to_user_id", "in_reply_to_screen_name" }) public class Fuck { @JsonProperty("created_at") private String createdAt; @JsonProperty("id") private Long id; @JsonProperty("mid") private String mid; @JsonProperty("idstr") private String idstr; @JsonProperty("text") private String text; @JsonProperty("textLength") private Integer textLength; @JsonProperty("source_allowclick") private Integer sourceAllowclick; @JsonProperty("source_type") private Integer sourceType; @JsonProperty("source") private String source; @JsonProperty("favorited") private Boolean favorited; @JsonProperty("truncated") private Boolean truncated; @JsonProperty("in_reply_to_status_id") private String inReplyToStatusId; @JsonProperty("in_reply_to_user_id") private String inReplyToUserId; @JsonProperty("in_reply_to_screen_name") private String inReplyToScreenName; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); @JsonProperty("created_at") public String getCreatedAt() { return createdAt; } @JsonProperty("created_at") public void setCreatedAt(String createdAt) { this.createdAt = createdAt; } @JsonProperty("id") public Long getId() { return id; } @JsonProperty("id") public void setId(Long id) { this.id = id; } @JsonProperty("mid") public String getMid() { return mid; } @JsonProperty("mid") public void setMid(String mid) { this.mid = mid; } @JsonProperty("idstr") public String getIdstr() { return idstr; } @JsonProperty("idstr") public void setIdstr(String idstr) { this.idstr = idstr; } @JsonProperty("text") public String getText() { return text; } @JsonProperty("text") public void setText(String text) { this.text = text; } @JsonProperty("textLength") public Integer getTextLength() { return textLength; } @JsonProperty("textLength") public void setTextLength(Integer textLength) { this.textLength = textLength; } @JsonProperty("source_allowclick") public Integer getSourceAllowclick() { return sourceAllowclick; } @JsonProperty("source_allowclick") public void setSourceAllowclick(Integer sourceAllowclick) { this.sourceAllowclick = sourceAllowclick; } @JsonProperty("source_type") public Integer getSourceType() { return sourceType; } @JsonProperty("source_type") public void setSourceType(Integer sourceType) { this.sourceType = sourceType; } @JsonProperty("source") public String getSource() { return source; } @JsonProperty("source") public void setSource(String source) { this.source = source; } @JsonProperty("favorited") public Boolean getFavorited() { return favorited; } @JsonProperty("favorited") public void setFavorited(Boolean favorited) { this.favorited = favorited; } @JsonProperty("truncated") public Boolean getTruncated() { return truncated; } @JsonProperty("truncated") public void setTruncated(Boolean truncated) { this.truncated = truncated; } @JsonProperty("in_reply_to_status_id") public String getInReplyToStatusId() { return inReplyToStatusId; } @JsonProperty("in_reply_to_status_id") public void setInReplyToStatusId(String inReplyToStatusId) { this.inReplyToStatusId = inReplyToStatusId; } @JsonProperty("in_reply_to_user_id") public String getInReplyToUserId() { return inReplyToUserId; } @JsonProperty("in_reply_to_user_id") public void setInReplyToUserId(String inReplyToUserId) { this.inReplyToUserId = inReplyToUserId; } @JsonProperty("in_reply_to_screen_name") public String getInReplyToScreenName() { return inReplyToScreenName; } @JsonProperty("in_reply_to_screen_name") public void setInReplyToScreenName(String inReplyToScreenName) { this.inReplyToScreenName = inReplyToScreenName; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return "Fuck{" + "createdAt='" + createdAt + '\'' + ", id=" + id + ", mid='" + mid + '\'' + ", idstr='" + idstr + '\'' + ", text='" + text + '\'' + ", textLength=" + textLength + ", sourceAllowclick=" + sourceAllowclick + ", sourceType=" + sourceType + ", source='" + source + '\'' + ", favorited=" + favorited + ", truncated=" + truncated + ", inReplyToStatusId='" + inReplyToStatusId + '\'' + ", inReplyToUserId='" + inReplyToUserId + '\'' + ", inReplyToScreenName='" + inReplyToScreenName + '\'' + ", additionalProperties=" + additionalProperties + '}'; } }
/** * Copyright (C) 2014-2015 LinkedIn Corp. (pinot-core@linkedin.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.linkedin.pinot.core.data.manager.realtime; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicInteger; import org.apache.commons.io.FileUtils; import org.apache.helix.ZNRecord; import org.apache.helix.store.zk.ZkHelixPropertyStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.linkedin.pinot.common.config.AbstractTableConfig; import com.linkedin.pinot.common.data.Schema; import com.linkedin.pinot.common.metadata.ZKMetadataProvider; import com.linkedin.pinot.common.metadata.instance.InstanceZKMetadata; import com.linkedin.pinot.common.metadata.segment.IndexLoadingConfigMetadata; import com.linkedin.pinot.common.metadata.segment.RealtimeSegmentZKMetadata; import com.linkedin.pinot.common.metadata.segment.SegmentZKMetadata; import com.linkedin.pinot.common.segment.ReadMode; import com.linkedin.pinot.common.segment.SegmentMetadata; import com.linkedin.pinot.common.utils.CommonConstants; import com.linkedin.pinot.common.utils.CommonConstants.Segment.Realtime.Status; import com.linkedin.pinot.common.utils.NamedThreadFactory; import com.linkedin.pinot.common.utils.helix.PinotHelixPropertyStoreZnRecordProvider; import com.linkedin.pinot.core.data.manager.config.TableDataManagerConfig; import com.linkedin.pinot.core.data.manager.offline.OfflineSegmentDataManager; import com.linkedin.pinot.core.data.manager.offline.SegmentDataManager; import com.linkedin.pinot.core.data.manager.offline.TableDataManager; import com.linkedin.pinot.core.indexsegment.IndexSegment; import com.linkedin.pinot.core.indexsegment.columnar.ColumnarSegmentLoader; import com.yammer.metrics.Metrics; import com.yammer.metrics.core.Counter; public class RealtimeTableDataManager implements TableDataManager { private Logger LOGGER = LoggerFactory.getLogger(RealtimeTableDataManager.class); private final Object _globalLock = new Object(); private boolean _isStarted = false; private File _indexDir; private ReadMode _readMode; private TableDataManagerConfig _tableDataManagerConfig; private String _tableDataDir; private int _numberOfTableQueryExecutorThreads; private IndexLoadingConfigMetadata _indexLoadingConfigMetadata; private ExecutorService _queryExecutorService; private final Map<String, SegmentDataManager> _segmentsMap = new HashMap<String, SegmentDataManager>(); private final ExecutorService _segmentAsyncExecutorService = Executors .newSingleThreadExecutor(new NamedThreadFactory("SegmentAsyncExecutorService")); private final List<String> _activeSegments = new ArrayList<String>(); private final List<String> _loadingSegments = new ArrayList<String>(); private Map<String, AtomicInteger> _referenceCounts = new HashMap<String, AtomicInteger>(); private ZkHelixPropertyStore<ZNRecord> _helixPropertyStore; private String _tableName; private Counter _currentNumberOfSegments = Metrics.newCounter(RealtimeTableDataManager.class, CommonConstants.Metric.Server.CURRENT_NUMBER_OF_SEGMENTS); private Counter _currentNumberOfDocuments = Metrics.newCounter(RealtimeTableDataManager.class, CommonConstants.Metric.Server.CURRENT_NUMBER_OF_DOCUMENTS); private Counter _numDeletedSegments = Metrics.newCounter(RealtimeTableDataManager.class, CommonConstants.Metric.Server.NUMBER_OF_DELETED_SEGMENTS); @Override public void init(TableDataManagerConfig tableDataManagerConfig) { _tableDataManagerConfig = tableDataManagerConfig; _tableName = _tableDataManagerConfig.getTableName(); LOGGER = LoggerFactory.getLogger(_tableName + "-RealtimeTableDataManager"); _currentNumberOfSegments = Metrics.newCounter(RealtimeTableDataManager.class, _tableName + "-" + CommonConstants.Metric.Server.CURRENT_NUMBER_OF_SEGMENTS); _currentNumberOfDocuments = Metrics.newCounter(RealtimeTableDataManager.class, _tableName + "-" + CommonConstants.Metric.Server.CURRENT_NUMBER_OF_DOCUMENTS); _numDeletedSegments = Metrics.newCounter(RealtimeTableDataManager.class, _tableName + "-" + CommonConstants.Metric.Server.NUMBER_OF_DELETED_SEGMENTS); _tableDataDir = _tableDataManagerConfig.getDataDir(); if (!new File(_tableDataDir).exists()) { new File(_tableDataDir).mkdirs(); } _numberOfTableQueryExecutorThreads = _tableDataManagerConfig.getNumberOfTableQueryExecutorThreads(); if (_numberOfTableQueryExecutorThreads > 0) { _queryExecutorService = Executors.newFixedThreadPool(_numberOfTableQueryExecutorThreads, new NamedThreadFactory( "parallel-query-executor-" + _tableName)); } else { _queryExecutorService = Executors.newCachedThreadPool(new NamedThreadFactory("parallel-query-executor-" + _tableName)); } _readMode = ReadMode.valueOf(_tableDataManagerConfig.getReadMode()); _indexLoadingConfigMetadata = _tableDataManagerConfig.getIndexLoadingConfigMetadata(); LOGGER.info("Initialized RealtimeTableDataManager: table : " + _tableName + " with :\n\tData Directory: " + _tableDataDir + "\n\tRead Mode : " + _readMode + "\n\tQuery Exeutor with " + ((_numberOfTableQueryExecutorThreads > 0) ? _numberOfTableQueryExecutorThreads : "cached") + " threads"); } @Override public void start() { if (_isStarted) { LOGGER.warn("RealtimeTableDataManager is already started."); return; } _indexDir = new File(_tableDataManagerConfig.getDataDir()); _readMode = ReadMode.valueOf(_tableDataManagerConfig.getReadMode()); if (!_indexDir.exists()) { if (!_indexDir.mkdir()) { LOGGER.error("could not create data dir"); } } _isStarted = true; } @Override public void shutDown() { LOGGER.info("Trying to shutdown table : " + _tableName); if (_isStarted) { _queryExecutorService.shutdown(); _segmentAsyncExecutorService.shutdown(); _tableDataManagerConfig = null; _isStarted = false; } else { LOGGER.warn("Already shutDown table : " + _tableName); } } public void notify(RealtimeSegmentZKMetadata metadata) { ZKMetadataProvider.setRealtimeSegmentZKMetadata(_helixPropertyStore, metadata); markSegmentAsLoaded(metadata.getSegmentName()); } @Override public boolean isStarted() { return _isStarted; } @Override public void addSegment(SegmentZKMetadata segmentMetadata) throws Exception { throw new UnsupportedOperationException("Cannot add realtime segment with just SegmentZKMetadata"); } @Override public void addSegment(ZkHelixPropertyStore<ZNRecord> propertyStore, AbstractTableConfig tableConfig, InstanceZKMetadata instanceZKMetadata, SegmentZKMetadata segmentZKMetadata) throws Exception { this._helixPropertyStore = propertyStore; String segmentId = segmentZKMetadata.getSegmentName(); if (segmentZKMetadata instanceof RealtimeSegmentZKMetadata) { if (new File(_indexDir, segmentId).exists() && ((RealtimeSegmentZKMetadata) segmentZKMetadata).getStatus() == Status.DONE) { // segment already exists on file, simply load it and add it to the map if (!_segmentsMap.containsKey(segmentId)) { synchronized (getGlobalLock()) { if (!_segmentsMap.containsKey(segmentId)) { IndexSegment segment = ColumnarSegmentLoader.load(new File(_indexDir, segmentId), _readMode, _indexLoadingConfigMetadata); _segmentsMap.put(segmentId, new OfflineSegmentDataManager(segment)); markSegmentAsLoaded(segmentId); _referenceCounts.put(segmentId, new AtomicInteger(1)); } } } } else { if (!_segmentsMap.containsKey(segmentId)) { synchronized (getGlobalLock()) { if (!_segmentsMap.containsKey(segmentId)) { // this is a new segment, lets create an instance of RealtimeSegmentDataManager PinotHelixPropertyStoreZnRecordProvider propertyStoreHelper = PinotHelixPropertyStoreZnRecordProvider.forSchema(propertyStore); ZNRecord record = propertyStoreHelper.get(tableConfig.getValidationConfig().getSchemaName()); LOGGER.info("found schema {} ", tableConfig.getValidationConfig().getSchemaName()); SegmentDataManager manager = new RealtimeSegmentDataManager((RealtimeSegmentZKMetadata) segmentZKMetadata, tableConfig, instanceZKMetadata, this, _indexDir.getAbsolutePath(), _readMode, Schema.fromZNRecord(record)); LOGGER.info("Initialize RealtimeSegmentDataManager - " + segmentId); _segmentsMap.put(segmentId, manager); _loadingSegments.add(segmentId); _referenceCounts.put(segmentId, new AtomicInteger(1)); } } } } } } public void updateStatus() { } @Override public void addSegment(IndexSegment indexSegmentToAdd) { throw new UnsupportedOperationException("Not supported addSegment(IndexSegment) in RealtimeTableDataManager"); } @Override public void addSegment(SegmentMetadata segmentMetaToAdd) throws Exception { throw new UnsupportedOperationException("Not supported addSegment(SegmentMetadata) in RealtimeTableDataManager"); } @Override public void removeSegment(String segmentToRemove) { decrementCount(segmentToRemove); } public void incrementCount(final String segmentId) { _referenceCounts.get(segmentId).incrementAndGet(); } private void markSegmentAsLoaded(String segmentId) { _currentNumberOfSegments.inc(); if (_segmentsMap.containsKey(segmentId)) { _currentNumberOfDocuments.inc(_segmentsMap.get(segmentId).getSegment().getTotalDocs()); } _loadingSegments.remove(segmentId); if (!_activeSegments.contains(segmentId)) { _activeSegments.add(segmentId); } } public void decrementCount(final String segmentId) { if (!_referenceCounts.containsKey(segmentId)) { LOGGER.warn("Received command to delete unexisting segment - " + segmentId); return; } AtomicInteger count = _referenceCounts.get(segmentId); if (count.get() == 1) { SegmentDataManager segment = null; synchronized (getGlobalLock()) { if (count.get() == 1) { segment = _segmentsMap.remove(segmentId); _activeSegments.remove(segmentId); _referenceCounts.remove(segmentId); } } if (segment != null) { _currentNumberOfSegments.dec(); _currentNumberOfDocuments.dec(segment.getSegment().getTotalDocs()); _numDeletedSegments.inc(); segment.getSegment().destroy(); } LOGGER.info("Segment " + segmentId + " has been deleted"); _segmentAsyncExecutorService.execute(new Runnable() { @Override public void run() { FileUtils.deleteQuietly(new File(_tableDataDir, segmentId)); LOGGER.info("The index directory for the segment " + segmentId + " has been deleted"); } }); } else { count.decrementAndGet(); } } @Override public List<SegmentDataManager> getAllSegments() { List<SegmentDataManager> ret = new ArrayList<SegmentDataManager>(); synchronized (getGlobalLock()) { for (SegmentDataManager segment : _segmentsMap.values()) { incrementCount(segment.getSegmentName()); ret.add(segment); } } return ret; } @Override public List<SegmentDataManager> getSegments(List<String> segmentList) { List<SegmentDataManager> ret = new ArrayList<SegmentDataManager>(); synchronized (getGlobalLock()) { for (String segmentName : segmentList) { if (_segmentsMap.containsKey(segmentName)) { incrementCount(segmentName); ret.add(_segmentsMap.get(segmentName)); } } } return ret; } @Override public SegmentDataManager getSegment(String segmentName) { if (_segmentsMap.containsKey(segmentName)) { incrementCount(segmentName); return _segmentsMap.get(segmentName); } else { return null; } } @Override public void returnSegmentReaders(List<String> segmentList) { synchronized (getGlobalLock()) { for (String segmentId : segmentList) { returnSegmentReader(segmentId); } } } @Override public ExecutorService getExecutorService() { return _queryExecutorService; } public Object getGlobalLock() { return _globalLock; } @Override public void returnSegmentReader(String segmentId) { decrementCount(segmentId); } }
package org.xcolab.client.modeling.models.ui; import edu.mit.cci.roma.client.MetaData; import edu.mit.cci.roma.client.Scenario; import edu.mit.cci.roma.client.Simulation; import edu.mit.cci.roma.client.Tuple; import edu.mit.cci.roma.client.Variable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xcolab.client.modeling.StaticModelingContext; import org.xcolab.client.modeling.pojo.IModelInputGroup; import org.xcolab.client.modeling.pojo.IModelInputItem; import org.xcolab.util.json.JsonUtil; import org.xcolab.util.json.NullsafeJsonArrayBuilder; import org.xcolab.util.json.NullsafeJsonObjectBuilder; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.json.Json; import javax.json.JsonObjectBuilder; public class ModelUIFactory { private static final ModelUIFactory ourInstance = new ModelUIFactory(); private static final Logger _log = LoggerFactory.getLogger(ModelUIFactory.class); private ModelUIFactory() { } public static ModelUIFactory getInstance() { return ourInstance; } /** * Helper function, returns variable for a scenario given its associated metadata */ public static Variable getVariableForMetaData(Scenario s, MetaData md, boolean isInput) { Variable result = null; for (Variable var : (isInput ? s.getInputSet() : s.getOutputSet())) { if (var.getMetaData().equals(md)) { result = var; break; } } return result; } public static JsonObjectBuilder convertToJson(Variable var) { NullsafeJsonArrayBuilder valuesArray = JsonUtil.nullsafe(Json.createArrayBuilder()); for (Tuple val : var.getValue()) { valuesArray.addArray(val.getValues()); } return JsonUtil.nullsafe(Json.createObjectBuilder()) .add("id", var.getId()) .add("metaData", convertToJson(var.getMetaData())) .add("values", valuesArray); } public static JsonObjectBuilder convertToJson(MetaData md) { final NullsafeJsonObjectBuilder jsonBuilder = JsonUtil.nullsafe(Json.createObjectBuilder()) .add("id", md.getId()) .add("name", md.getName()) .add("description", md.getDescription()) .add("externalInfo", md.getExternalInfo()) .add("internalName", md.getInternalName()) .add("varType", md.getVarType().name()) .add("isIndex", md.getIndex()) .addArray("categories", md.getCategories()) .addArray("labels", md.getLabels()) .addArray("default", md.getDefault()) .addArray("max", md.getMax()) .addArray("min", md.getMin()) .addArray("units", md.getUnits()) .add("varContext", md.getVarContext().name()) .addArray("profiles", md.getProfile()); if (md.getIndexingMetaData() != null) { jsonBuilder.add("indexingMetaData", md.getIndexingMetaData().getId()); } return jsonBuilder; } /** * Returns the layout information for the model */ public ModelDisplay getDisplay(Simulation s) throws IllegalUIConfigurationException, IOException { return new ModelDisplay(s); } /** * Returns the layout information for the model, and also sets the scenario * on the display container (enabling variable retrieval functions through the * display classes */ public ModelDisplay getDisplay(Scenario s) throws IllegalUIConfigurationException, IOException { return new ModelDisplay(s); } /** * Package scoped helper function, used to build the output layout classes for the Simulation */ List<ModelOutputDisplayItem> parseOutputs(Simulation s) { Map<String, ModelOutputDisplayItem> found = new HashMap<>(); for (MetaData md : s.getOutputs()) { if (md.getVarContext() == MetaData.VarContext.INDEXED) { ModelOutputIndexedDisplayItem item; if (md.getVarType() == MetaData.VarType.FREE) { item = (ModelOutputIndexedDisplayItem) found.get(md.getName()); if (item == null) { item = new ModelOutputIndexedDisplayItem(s, md.getName()); item.setChartType(ModelOutputChartType.FREE); found.put(md.getName(), item); } item.addSeriesData(md); } else if (md.getVarType() == MetaData.VarType.RANGE) { if (md.getLabels().length < 2) { _log.warn("Metadata {} only has one element", md.getName()); } item = (ModelOutputIndexedDisplayItem) found.get(md.getLabels()[1]); if (item == null) { item = new ModelOutputIndexedDisplayItem(s, md.getLabels()[1]); item.setChartType(ModelOutputChartType.TIME_SERIES); found.put(md.getLabels()[1], item); } item.addSeriesData(md); } else { _log.warn("Unknown variable type {}", md.getVarType()); continue; } if (item.getIndex() == null) { item.setIndex(md.getIndexingMetaData()); } } else if (md.getVarContext() == MetaData.VarContext.SCALAR) { found.put(md.getName(), new ModelOutputScalarDisplayItem(s, md)); } } return new ArrayList<>(found.values()); } /** * Recursive call to process groups */ private ModelInputGroupDisplayItem processGroup(IModelInputGroup group, Set<MetaData> bareMetaData, Simulation simulation) throws IllegalUIConfigurationException, IOException { for (IModelInputItem item : StaticModelingContext.getModelingClient() .getInputItems(group)) { final MetaData metaData = StaticModelingContext.getModelingClient().getMetaData(item); bareMetaData.remove(metaData); } ModelInputGroupDisplayItem result; try { result = new ModelInputGroupDisplayItem(simulation, group); } catch (IOException e) { _log.error("", e); return null; } for (IModelInputGroup g : StaticModelingContext.getModelingClient().getChildGroups(group)) { result.addChildGroup(processGroup(g, bareMetaData, simulation)); } return result; } /** * Package scoped helper function, used to build the input layout classes for the Simulation */ public List<ModelInputDisplayItem> parseInputs(Simulation s) throws IllegalUIConfigurationException, IOException { List<ModelInputDisplayItem> result = new ArrayList<>(); Set<MetaData> inputs = new HashSet<>(s.getInputs()); for (IModelInputGroup group : StaticModelingContext.getModelingClient().getInputGroups(s)) { if (group.getParentGroupId() <= 0) { result.add(processGroup(group, inputs, s)); } } //any left overs for (MetaData md : inputs) { try { IModelInputItem item = StaticModelingContext.getModelingClient() .getItemForMetaData(s.getId(), md); ModelInputDisplayItem toAdd = item == null ? ModelInputIndividualDisplayItem .create(s, md, ModelInputWidgetType.TEXT_FIELD) : getInputItem(item); result.add(toAdd); } catch (IOException e) { _log.error("", e); } } return result; } public ModelInputDisplayItem getInputItem(IModelInputItem item) { try { return new ModelInputIndividualDisplayItem(item); } catch (IOException e) { _log.error("", e); } return null; } public ModelInputGroupDisplayItem getGroupItem(Simulation simulation, IModelInputGroup item) { try { return new ModelInputGroupDisplayItem(simulation, item); } catch (IOException e) { _log.error("", e); } return null; } }
package org.zv.fintrack.command; import java.util.Date; import java.util.List; import java.util.Locale; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.zv.common.mvc.CommandSupport; import org.zv.common.mvc.Resource; import org.zv.fintrack.ejb.api.CategoryDao; import org.zv.fintrack.ejb.api.ExpenseDao; import org.zv.fintrack.pd.Category; import org.zv.fintrack.pd.Expense; import org.zv.fintrack.util.Messages; /** * Handle list request. * * @author Arvid Juskaitis */ public class ExpenseCommand extends CommandSupport { /** * Selected expense for editing, deleting. */ private Expense expense = new Expense(); /** * Error message */ private String error; /** * Message */ private String message; /** * Selected income/expense record id for editing, deleting. */ private Integer preinitId; /** * Session bean, injected by MVC. */ private CategoryDao categoryDao; /** * Session bean, injected by MVC. */ private ExpenseDao expenseDao; @Resource(name = "ejb/CategoryDao") public void setCategoryDao(CategoryDao categoryDao) { this.categoryDao = categoryDao; } @Resource(name = "ejb/ExpenseDao") public void setExpenseDao(ExpenseDao expenseDao) { this.expenseDao = expenseDao; } /** * Put initial values for html elements and return without populating lists. * * @param request * @param response * @return view to forward */ public String execute(HttpServletRequest request, HttpServletResponse response) { // Set default values expense.setCreateDate(new Date()); // Forward to the view return "expense"; } /** * Handle submit. * * @param request * @param response * @return view to forward */ public String save(HttpServletRequest request, HttpServletResponse response) { // do validation long days = Math.abs(new Date().getTime() - expense.getCreateDate().getTime())/1000/3660/24; if (days > 365) { error = Messages.getMessageResourceString("messages", Locale.US, "error.date.range", new Long[] { days }); return "expense"; } if (expense.getCategoryId() == null) { error = Messages.getMessageResourceString("messages", Locale.US, "error.no.category", null); return "expense"; } if (expense.getAmount() == 0.0) { error = Messages.getMessageResourceString("messages", Locale.US, "error.amount.min", new Float[] { 0.0f }); return "expense"; } // persist/update entity if (preinitId == null) { expense.setUserId(request.getUserPrincipal().getName()); expenseDao.save(expense); message = Messages.getMessageResourceString("messages", Locale.US, "status.added.expense", null); // Reset to default expense = new Expense(); return execute(request, response); } else { expense.setExpenseId(preinitId); expense.setUserId(request.getUserPrincipal().getName()); expenseDao.save(expense); message = Messages.getMessageResourceString("messages", Locale.US, "status.updated.expense", new Integer[] {preinitId}); } return "expense"; } /** * Edit selected expense record. * * @param request * @param response * @return view to forward */ public String edit(HttpServletRequest request, HttpServletResponse response) { if (preinitId == null) { throw new RuntimeException("item id is not set"); } expense = expenseDao.getById(preinitId); return "expense"; } /** * Prompt for deletion of expense record. * * @param request * @param response * @return view to forward */ public String delete(HttpServletRequest request, HttpServletResponse response) { if (preinitId == null) { throw new RuntimeException("item id is not set"); } expense = expenseDao.getById(preinitId); return "expense-delete"; } /** * Do actual deletion of expense record. * * @param request * @param response * @return view to forward */ public String doDelete(HttpServletRequest request, HttpServletResponse response) { if (preinitId == null) { throw new RuntimeException("item id is not set"); } expenseDao.delete(preinitId); expense = new Expense(); message = Messages.getMessageResourceString("messages", Locale.US, "status.deleted.expense", new Integer[] {preinitId}); return "expense-delete"; } public List<Category> getCategories() { return categoryDao.getAll(); } public Expense getExpense() { return expense; } public void setCreateDate(Date createDate) { expense.setCreateDate(createDate); } public void setCategoryId(String categoryId) { expense.setCategoryId(categoryId); } public void setAmount(float amount) { expense.setAmount(amount); } public void setDescr(String descr) { expense.setDescr(descr); } public String getError() { return error; } public String getMessage() { return message; } public Integer getPreinitId() { return preinitId; } public void setPreinitId(Integer preinitId) { this.preinitId = preinitId; } }